4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
22 * Copyright 2007 Sun Microsystems, Inc. All rights reserved.
23 * Use is subject to license terms.
26 .ident "%Z%%M% %I% %E% SMI"
31 #include <sys/asm_linkage.h>
35 * Legacy kernel interfaces; they will go away (eventually).
37 ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
38 ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
39 ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
40 ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
41 ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
42 ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
43 ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
44 ANSI_PRAGMA_WEAK2(swapl,atomic_swap_32,function)
47 * Include the definitions for the libc weak aliases.
49 #include "../atomic_asm_weak.h"
53 * NOTE: If atomic_inc_8 and atomic_inc_8_nv are ever
54 * separated, you need to also edit the libc sparc platform
55 * specific mapfile and remove the NODYNSORT attribute
56 * from atomic_inc_8_nv.
59 ALTENTRY(atomic_inc_8_nv)
60 ALTENTRY(atomic_inc_uchar)
61 ALTENTRY(atomic_inc_uchar_nv)
64 SET_SIZE(atomic_inc_uchar_nv)
65 SET_SIZE(atomic_inc_uchar)
66 SET_SIZE(atomic_inc_8_nv)
67 SET_SIZE(atomic_inc_8)
70 * NOTE: If atomic_dec_8 and atomic_dec_8_nv are ever
71 * separated, you need to also edit the libc sparc platform
72 * specific mapfile and remove the NODYNSORT attribute
73 * from atomic_dec_8_nv.
76 ALTENTRY(atomic_dec_8_nv)
77 ALTENTRY(atomic_dec_uchar)
78 ALTENTRY(atomic_dec_uchar_nv)
81 SET_SIZE(atomic_dec_uchar_nv)
82 SET_SIZE(atomic_dec_uchar)
83 SET_SIZE(atomic_dec_8_nv)
84 SET_SIZE(atomic_dec_8)
87 * NOTE: If atomic_add_8 and atomic_add_8_nv are ever
88 * separated, you need to also edit the libc sparc platform
89 * specific mapfile and remove the NODYNSORT attribute
90 * from atomic_add_8_nv.
93 ALTENTRY(atomic_add_8_nv)
94 ALTENTRY(atomic_add_char)
95 ALTENTRY(atomic_add_char_nv)
97 and %o0, 0x3, %o4 ! %o4 = byte offset, left-to-right
98 xor %o4, 0x3, %g1 ! %g1 = byte offset, right-to-left
99 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
100 set 0xff, %o3 ! %o3 = mask
101 sll %o3, %g1, %o3 ! %o3 = shifted to bit offset
102 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
103 and %o1, %o3, %o1 ! %o1 = single byte value
104 andn %o0, 0x3, %o0 ! %o0 = word address
105 ld [%o0], %o2 ! read old value
107 add %o2, %o1, %o5 ! add value to the old value
108 and %o5, %o3, %o5 ! clear other bits
109 andn %o2, %o3, %o4 ! clear target bits
110 or %o4, %o5, %o5 ! insert the new value
114 mov %o5, %o2 ! %o2 = old value
118 srl %o5, %g1, %o0 ! %o0 = new value
119 SET_SIZE(atomic_add_char_nv)
120 SET_SIZE(atomic_add_char)
121 SET_SIZE(atomic_add_8_nv)
122 SET_SIZE(atomic_add_8)
125 * NOTE: If atomic_inc_16 and atomic_inc_16_nv are ever
126 * separated, you need to also edit the libc sparc platform
127 * specific mapfile and remove the NODYNSORT attribute
128 * from atomic_inc_16_nv.
131 ALTENTRY(atomic_inc_16_nv)
132 ALTENTRY(atomic_inc_ushort)
133 ALTENTRY(atomic_inc_ushort_nv)
136 SET_SIZE(atomic_inc_ushort_nv)
137 SET_SIZE(atomic_inc_ushort)
138 SET_SIZE(atomic_inc_16_nv)
139 SET_SIZE(atomic_inc_16)
142 * NOTE: If atomic_dec_16 and atomic_dec_16_nv are ever
143 * separated, you need to also edit the libc sparc platform
144 * specific mapfile and remove the NODYNSORT attribute
145 * from atomic_dec_16_nv.
148 ALTENTRY(atomic_dec_16_nv)
149 ALTENTRY(atomic_dec_ushort)
150 ALTENTRY(atomic_dec_ushort_nv)
153 SET_SIZE(atomic_dec_ushort_nv)
154 SET_SIZE(atomic_dec_ushort)
155 SET_SIZE(atomic_dec_16_nv)
156 SET_SIZE(atomic_dec_16)
159 * NOTE: If atomic_add_16 and atomic_add_16_nv are ever
160 * separated, you need to also edit the libc sparc platform
161 * specific mapfile and remove the NODYNSORT attribute
162 * from atomic_add_16_nv.
165 ALTENTRY(atomic_add_16_nv)
166 ALTENTRY(atomic_add_short)
167 ALTENTRY(atomic_add_short_nv)
169 and %o0, 0x2, %o4 ! %o4 = byte offset, left-to-right
170 xor %o4, 0x2, %g1 ! %g1 = byte offset, right-to-left
171 sll %o4, 3, %o4 ! %o4 = bit offset, left-to-right
172 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
173 sethi %hi(0xffff0000), %o3 ! %o3 = mask
174 srl %o3, %o4, %o3 ! %o3 = shifted to bit offset
175 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
176 and %o1, %o3, %o1 ! %o1 = single short value
177 andn %o0, 0x2, %o0 ! %o0 = word address
178 ! if low-order bit is 1, we will properly get an alignment fault here
179 ld [%o0], %o2 ! read old value
181 add %o1, %o2, %o5 ! add value to the old value
182 and %o5, %o3, %o5 ! clear other bits
183 andn %o2, %o3, %o4 ! clear target bits
184 or %o4, %o5, %o5 ! insert the new value
188 mov %o5, %o2 ! %o2 = old value
192 srl %o5, %g1, %o0 ! %o0 = new value
193 SET_SIZE(atomic_add_short_nv)
194 SET_SIZE(atomic_add_short)
195 SET_SIZE(atomic_add_16_nv)
196 SET_SIZE(atomic_add_16)
199 * NOTE: If atomic_inc_32 and atomic_inc_32_nv are ever
200 * separated, you need to also edit the libc sparc platform
201 * specific mapfile and remove the NODYNSORT attribute
202 * from atomic_inc_32_nv.
205 ALTENTRY(atomic_inc_32_nv)
206 ALTENTRY(atomic_inc_uint)
207 ALTENTRY(atomic_inc_uint_nv)
208 ALTENTRY(atomic_inc_ulong)
209 ALTENTRY(atomic_inc_ulong_nv)
212 SET_SIZE(atomic_inc_ulong_nv)
213 SET_SIZE(atomic_inc_ulong)
214 SET_SIZE(atomic_inc_uint_nv)
215 SET_SIZE(atomic_inc_uint)
216 SET_SIZE(atomic_inc_32_nv)
217 SET_SIZE(atomic_inc_32)
220 * NOTE: If atomic_dec_32 and atomic_dec_32_nv are ever
221 * separated, you need to also edit the libc sparc platform
222 * specific mapfile and remove the NODYNSORT attribute
223 * from atomic_dec_32_nv.
226 ALTENTRY(atomic_dec_32_nv)
227 ALTENTRY(atomic_dec_uint)
228 ALTENTRY(atomic_dec_uint_nv)
229 ALTENTRY(atomic_dec_ulong)
230 ALTENTRY(atomic_dec_ulong_nv)
233 SET_SIZE(atomic_dec_ulong_nv)
234 SET_SIZE(atomic_dec_ulong)
235 SET_SIZE(atomic_dec_uint_nv)
236 SET_SIZE(atomic_dec_uint)
237 SET_SIZE(atomic_dec_32_nv)
238 SET_SIZE(atomic_dec_32)
241 * NOTE: If atomic_add_32 and atomic_add_32_nv are ever
242 * separated, you need to also edit the libc sparc platform
243 * specific mapfile and remove the NODYNSORT attribute
244 * from atomic_add_32_nv.
247 ALTENTRY(atomic_add_32_nv)
248 ALTENTRY(atomic_add_int)
249 ALTENTRY(atomic_add_int_nv)
250 ALTENTRY(atomic_add_ptr)
251 ALTENTRY(atomic_add_ptr_nv)
252 ALTENTRY(atomic_add_long)
253 ALTENTRY(atomic_add_long_nv)
263 add %o2, %o1, %o0 ! return new value
264 SET_SIZE(atomic_add_long_nv)
265 SET_SIZE(atomic_add_long)
266 SET_SIZE(atomic_add_ptr_nv)
267 SET_SIZE(atomic_add_ptr)
268 SET_SIZE(atomic_add_int_nv)
269 SET_SIZE(atomic_add_int)
270 SET_SIZE(atomic_add_32_nv)
271 SET_SIZE(atomic_add_32)
274 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
275 * separated, you need to also edit the libc sparc platform
276 * specific mapfile and remove the NODYNSORT attribute
277 * from atomic_inc_64_nv.
280 ALTENTRY(atomic_inc_64_nv)
283 SET_SIZE(atomic_inc_64_nv)
284 SET_SIZE(atomic_inc_64)
287 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
288 * separated, you need to also edit the libc sparc platform
289 * specific mapfile and remove the NODYNSORT attribute
290 * from atomic_dec_64_nv.
293 ALTENTRY(atomic_dec_64_nv)
296 SET_SIZE(atomic_dec_64_nv)
297 SET_SIZE(atomic_dec_64)
300 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
301 * separated, you need to also edit the libc sparc platform
302 * specific mapfile and remove the NODYNSORT attribute
303 * from atomic_add_64_nv.
306 ALTENTRY(atomic_add_64_nv)
307 sllx %o1, 32, %o1 ! upper 32 in %o1, lower in %o2
309 add %o1, %o2, %o1 ! convert 2 32-bit args into 1 64-bit
318 add %o2, %o1, %o1 ! return lower 32-bits in %o1
320 srlx %o1, 32, %o0 ! return upper 32-bits in %o0
321 SET_SIZE(atomic_add_64_nv)
322 SET_SIZE(atomic_add_64)
325 * NOTE: If atomic_or_8 and atomic_or_8_nv are ever
326 * separated, you need to also edit the libc sparc platform
327 * specific mapfile and remove the NODYNSORT attribute
328 * from atomic_or_8_nv.
331 ALTENTRY(atomic_or_8_nv)
332 ALTENTRY(atomic_or_uchar)
333 ALTENTRY(atomic_or_uchar_nv)
334 and %o0, 0x3, %o4 ! %o4 = byte offset, left-to-right
335 xor %o4, 0x3, %g1 ! %g1 = byte offset, right-to-left
336 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
337 set 0xff, %o3 ! %o3 = mask
338 sll %o3, %g1, %o3 ! %o3 = shifted to bit offset
339 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
340 and %o1, %o3, %o1 ! %o1 = single byte value
341 andn %o0, 0x3, %o0 ! %o0 = word address
342 ld [%o0], %o2 ! read old value
344 or %o2, %o1, %o5 ! or in the new value
348 mov %o5, %o2 ! %o2 = old value
352 srl %o5, %g1, %o0 ! %o0 = new value
353 SET_SIZE(atomic_or_uchar_nv)
354 SET_SIZE(atomic_or_uchar)
355 SET_SIZE(atomic_or_8_nv)
356 SET_SIZE(atomic_or_8)
359 * NOTE: If atomic_or_16 and atomic_or_16_nv are ever
360 * separated, you need to also edit the libc sparc platform
361 * specific mapfile and remove the NODYNSORT attribute
362 * from atomic_or_16_nv.
365 ALTENTRY(atomic_or_16_nv)
366 ALTENTRY(atomic_or_ushort)
367 ALTENTRY(atomic_or_ushort_nv)
368 and %o0, 0x2, %o4 ! %o4 = byte offset, left-to-right
369 xor %o4, 0x2, %g1 ! %g1 = byte offset, right-to-left
370 sll %o4, 3, %o4 ! %o4 = bit offset, left-to-right
371 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
372 sethi %hi(0xffff0000), %o3 ! %o3 = mask
373 srl %o3, %o4, %o3 ! %o3 = shifted to bit offset
374 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
375 and %o1, %o3, %o1 ! %o1 = single short value
376 andn %o0, 0x2, %o0 ! %o0 = word address
377 ! if low-order bit is 1, we will properly get an alignment fault here
378 ld [%o0], %o2 ! read old value
380 or %o2, %o1, %o5 ! or in the new value
384 mov %o5, %o2 ! %o2 = old value
385 or %o2, %o1, %o5 ! or in the new value
388 srl %o5, %g1, %o0 ! %o0 = new value
389 SET_SIZE(atomic_or_ushort_nv)
390 SET_SIZE(atomic_or_ushort)
391 SET_SIZE(atomic_or_16_nv)
392 SET_SIZE(atomic_or_16)
395 * NOTE: If atomic_or_32 and atomic_or_32_nv are ever
396 * separated, you need to also edit the libc sparc platform
397 * specific mapfile and remove the NODYNSORT attribute
398 * from atomic_or_32_nv.
401 ALTENTRY(atomic_or_32_nv)
402 ALTENTRY(atomic_or_uint)
403 ALTENTRY(atomic_or_uint_nv)
404 ALTENTRY(atomic_or_ulong)
405 ALTENTRY(atomic_or_ulong_nv)
414 or %o2, %o1, %o0 ! return new value
415 SET_SIZE(atomic_or_ulong_nv)
416 SET_SIZE(atomic_or_ulong)
417 SET_SIZE(atomic_or_uint_nv)
418 SET_SIZE(atomic_or_uint)
419 SET_SIZE(atomic_or_32_nv)
420 SET_SIZE(atomic_or_32)
423 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
424 * separated, you need to also edit the libc sparc platform
425 * specific mapfile and remove the NODYNSORT attribute
426 * from atomic_or_64_nv.
429 ALTENTRY(atomic_or_64_nv)
430 sllx %o1, 32, %o1 ! upper 32 in %o1, lower in %o2
432 add %o1, %o2, %o1 ! convert 2 32-bit args into 1 64-bit
440 or %o2, %o1, %o1 ! return lower 32-bits in %o1
442 srlx %o1, 32, %o0 ! return upper 32-bits in %o0
443 SET_SIZE(atomic_or_64_nv)
444 SET_SIZE(atomic_or_64)
447 * NOTE: If atomic_and_8 and atomic_and_8_nv are ever
448 * separated, you need to also edit the libc sparc platform
449 * specific mapfile and remove the NODYNSORT attribute
450 * from atomic_and_8_nv.
453 ALTENTRY(atomic_and_8_nv)
454 ALTENTRY(atomic_and_uchar)
455 ALTENTRY(atomic_and_uchar_nv)
456 and %o0, 0x3, %o4 ! %o4 = byte offset, left-to-right
457 xor %o4, 0x3, %g1 ! %g1 = byte offset, right-to-left
458 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
459 set 0xff, %o3 ! %o3 = mask
460 sll %o3, %g1, %o3 ! %o3 = shifted to bit offset
461 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
462 orn %o1, %o3, %o1 ! all ones in other bytes
463 andn %o0, 0x3, %o0 ! %o0 = word address
464 ld [%o0], %o2 ! read old value
466 and %o2, %o1, %o5 ! and in the new value
470 mov %o5, %o2 ! %o2 = old value
474 srl %o5, %g1, %o0 ! %o0 = new value
475 SET_SIZE(atomic_and_uchar_nv)
476 SET_SIZE(atomic_and_uchar)
477 SET_SIZE(atomic_and_8_nv)
478 SET_SIZE(atomic_and_8)
481 * NOTE: If atomic_and_16 and atomic_and_16_nv are ever
482 * separated, you need to also edit the libc sparc platform
483 * specific mapfile and remove the NODYNSORT attribute
484 * from atomic_and_16_nv.
487 ALTENTRY(atomic_and_16_nv)
488 ALTENTRY(atomic_and_ushort)
489 ALTENTRY(atomic_and_ushort_nv)
490 and %o0, 0x2, %o4 ! %o4 = byte offset, left-to-right
491 xor %o4, 0x2, %g1 ! %g1 = byte offset, right-to-left
492 sll %o4, 3, %o4 ! %o4 = bit offset, left-to-right
493 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
494 sethi %hi(0xffff0000), %o3 ! %o3 = mask
495 srl %o3, %o4, %o3 ! %o3 = shifted to bit offset
496 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
497 orn %o1, %o3, %o1 ! all ones in the other half
498 andn %o0, 0x2, %o0 ! %o0 = word address
499 ! if low-order bit is 1, we will properly get an alignment fault here
500 ld [%o0], %o2 ! read old value
502 and %o2, %o1, %o5 ! and in the new value
506 mov %o5, %o2 ! %o2 = old value
510 srl %o5, %g1, %o0 ! %o0 = new value
511 SET_SIZE(atomic_and_ushort_nv)
512 SET_SIZE(atomic_and_ushort)
513 SET_SIZE(atomic_and_16_nv)
514 SET_SIZE(atomic_and_16)
517 * NOTE: If atomic_and_32 and atomic_and_32_nv are ever
518 * separated, you need to also edit the libc sparc platform
519 * specific mapfile and remove the NODYNSORT attribute
520 * from atomic_and_32_nv.
523 ALTENTRY(atomic_and_32_nv)
524 ALTENTRY(atomic_and_uint)
525 ALTENTRY(atomic_and_uint_nv)
526 ALTENTRY(atomic_and_ulong)
527 ALTENTRY(atomic_and_ulong_nv)
536 and %o2, %o1, %o0 ! return new value
537 SET_SIZE(atomic_and_ulong_nv)
538 SET_SIZE(atomic_and_ulong)
539 SET_SIZE(atomic_and_uint_nv)
540 SET_SIZE(atomic_and_uint)
541 SET_SIZE(atomic_and_32_nv)
542 SET_SIZE(atomic_and_32)
545 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
546 * separated, you need to also edit the libc sparc platform
547 * specific mapfile and remove the NODYNSORT attribute
548 * from atomic_and_64_nv.
551 ALTENTRY(atomic_and_64_nv)
552 sllx %o1, 32, %o1 ! upper 32 in %o1, lower in %o2
554 add %o1, %o2, %o1 ! convert 2 32-bit args into 1 64-bit
562 and %o2, %o1, %o1 ! return lower 32-bits in %o1
564 srlx %o1, 32, %o0 ! return upper 32-bits in %o0
565 SET_SIZE(atomic_and_64_nv)
566 SET_SIZE(atomic_and_64)
569 ALTENTRY(atomic_cas_uchar)
570 and %o0, 0x3, %o4 ! %o4 = byte offset, left-to-right
571 xor %o4, 0x3, %g1 ! %g1 = byte offset, right-to-left
572 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
573 set 0xff, %o3 ! %o3 = mask
574 sll %o3, %g1, %o3 ! %o3 = shifted to bit offset
575 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
576 and %o1, %o3, %o1 ! %o1 = single byte value
577 sll %o2, %g1, %o2 ! %o2 = shifted to bit offset
578 and %o2, %o3, %o2 ! %o2 = single byte value
579 andn %o0, 0x3, %o0 ! %o0 = word address
580 ld [%o0], %o4 ! read old value
582 andn %o4, %o3, %o4 ! clear target bits
583 or %o4, %o2, %o5 ! insert the new value
584 or %o4, %o1, %o4 ! insert the comparison value
586 cmp %o4, %o5 ! did we succeed?
588 and %o5, %o3, %o4 ! isolate the old value
589 cmp %o1, %o4 ! should we have succeeded?
590 be,a,pt %icc, 1b ! yes, try again
591 mov %o5, %o4 ! %o4 = old value
594 srl %o4, %g1, %o0 ! %o0 = old value
595 SET_SIZE(atomic_cas_uchar)
596 SET_SIZE(atomic_cas_8)
599 ALTENTRY(atomic_cas_ushort)
600 and %o0, 0x2, %o4 ! %o4 = byte offset, left-to-right
601 xor %o4, 0x2, %g1 ! %g1 = byte offset, right-to-left
602 sll %o4, 3, %o4 ! %o4 = bit offset, left-to-right
603 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
604 sethi %hi(0xffff0000), %o3 ! %o3 = mask
605 srl %o3, %o4, %o3 ! %o3 = shifted to bit offset
606 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
607 and %o1, %o3, %o1 ! %o1 = single short value
608 sll %o2, %g1, %o2 ! %o2 = shifted to bit offset
609 and %o2, %o3, %o2 ! %o2 = single short value
610 andn %o0, 0x2, %o0 ! %o0 = word address
611 ! if low-order bit is 1, we will properly get an alignment fault here
612 ld [%o0], %o4 ! read old value
614 andn %o4, %o3, %o4 ! clear target bits
615 or %o4, %o2, %o5 ! insert the new value
616 or %o4, %o1, %o4 ! insert the comparison value
618 cmp %o4, %o5 ! did we succeed?
620 and %o5, %o3, %o4 ! isolate the old value
621 cmp %o1, %o4 ! should we have succeeded?
622 be,a,pt %icc, 1b ! yes, try again
623 mov %o5, %o4 ! %o4 = old value
626 srl %o4, %g1, %o0 ! %o0 = old value
627 SET_SIZE(atomic_cas_ushort)
628 SET_SIZE(atomic_cas_16)
631 ALTENTRY(atomic_cas_uint)
632 ALTENTRY(atomic_cas_ptr)
633 ALTENTRY(atomic_cas_ulong)
637 SET_SIZE(atomic_cas_ulong)
638 SET_SIZE(atomic_cas_ptr)
639 SET_SIZE(atomic_cas_uint)
640 SET_SIZE(atomic_cas_32)
643 sllx %o1, 32, %o1 ! cmp's upper 32 in %o1, lower in %o2
644 srl %o2, 0, %o2 ! convert 2 32-bit args into 1 64-bit
646 sllx %o3, 32, %o2 ! newval upper 32 in %o3, lower in %o4
647 srl %o4, 0, %o4 ! setup %o2 to have newval
650 srl %o2, 0, %o1 ! return lower 32-bits in %o1
652 srlx %o2, 32, %o0 ! return upper 32-bits in %o0
653 SET_SIZE(atomic_cas_64)
656 ALTENTRY(atomic_swap_uchar)
657 and %o0, 0x3, %o4 ! %o4 = byte offset, left-to-right
658 xor %o4, 0x3, %g1 ! %g1 = byte offset, right-to-left
659 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
660 set 0xff, %o3 ! %o3 = mask
661 sll %o3, %g1, %o3 ! %o3 = shifted to bit offset
662 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
663 and %o1, %o3, %o1 ! %o1 = single byte value
664 andn %o0, 0x3, %o0 ! %o0 = word address
665 ld [%o0], %o2 ! read old value
667 andn %o2, %o3, %o5 ! clear target bits
668 or %o5, %o1, %o5 ! insert the new value
672 mov %o5, %o2 ! %o2 = old value
675 srl %o5, %g1, %o0 ! %o0 = old value
676 SET_SIZE(atomic_swap_uchar)
677 SET_SIZE(atomic_swap_8)
679 ENTRY(atomic_swap_16)
680 ALTENTRY(atomic_swap_ushort)
681 and %o0, 0x2, %o4 ! %o4 = byte offset, left-to-right
682 xor %o4, 0x2, %g1 ! %g1 = byte offset, right-to-left
683 sll %o4, 3, %o4 ! %o4 = bit offset, left-to-right
684 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
685 sethi %hi(0xffff0000), %o3 ! %o3 = mask
686 srl %o3, %o4, %o3 ! %o3 = shifted to bit offset
687 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
688 and %o1, %o3, %o1 ! %o1 = single short value
689 andn %o0, 0x2, %o0 ! %o0 = word address
690 ! if low-order bit is 1, we will properly get an alignment fault here
691 ld [%o0], %o2 ! read old value
693 andn %o2, %o3, %o5 ! clear target bits
694 or %o5, %o1, %o5 ! insert the new value
698 mov %o5, %o2 ! %o2 = old value
701 srl %o5, %g1, %o0 ! %o0 = old value
702 SET_SIZE(atomic_swap_ushort)
703 SET_SIZE(atomic_swap_16)
705 ENTRY(atomic_swap_32)
706 ALTENTRY(atomic_swap_uint)
707 ALTENTRY(atomic_swap_ptr)
708 ALTENTRY(atomic_swap_ulong)
718 SET_SIZE(atomic_swap_ulong)
719 SET_SIZE(atomic_swap_ptr)
720 SET_SIZE(atomic_swap_uint)
721 SET_SIZE(atomic_swap_32)
723 ENTRY(atomic_swap_64)
724 sllx %o1, 32, %o1 ! upper 32 in %o1, lower in %o2
726 add %o1, %o2, %o1 ! convert 2 32-bit args into 1 64-bit
734 srl %o3, 0, %o1 ! return lower 32-bits in %o1
736 srlx %o3, 32, %o0 ! return upper 32-bits in %o0
737 SET_SIZE(atomic_swap_64)
739 /* these are not used by ZFS
740 ENTRY(atomic_set_long_excl)
745 andcc %o2, %o3, %g0 ! test if the bit is set
746 bnz,a,pn %ncc, 2f ! if so, then fail out
748 or %o2, %o3, %o4 ! set the bit, and try to commit it
751 bne,a,pn %ncc, 1b ! failed to commit, try again
757 SET_SIZE(atomic_set_long_excl)
759 ENTRY(atomic_clear_long_excl)
764 andncc %o3, %o2, %g0 ! test if the bit is clear
765 bnz,a,pn %ncc, 2f ! if so, then fail out
767 andn %o2, %o3, %o4 ! clear the bit, and try to commit it
770 bne,a,pn %ncc, 1b ! failed to commit, try again
776 SET_SIZE(atomic_clear_long_excl)
778 #if !defined(_KERNEL)
781 * Spitfires and Blackbirds have a problem with membars in the
782 * delay slot (SF_ERRATA_51). For safety's sake, we assume
783 * that the whole world needs the workaround.
786 membar #StoreLoad|#StoreStore
789 SET_SIZE(membar_enter)
792 membar #LoadStore|#StoreStore
795 SET_SIZE(membar_exit)
797 ENTRY(membar_producer)
801 SET_SIZE(membar_producer)
803 ENTRY(membar_consumer)
807 SET_SIZE(membar_consumer)
809 #endif /* !_KERNEL */
812 .section .note.GNU-stack,"",%progbits