]> git.proxmox.com Git - mirror_zfs.git/blob - zfs/lib/libsolcompat/sparc64/atomic.S
Initial Linux ZFS GIT Repo
[mirror_zfs.git] / zfs / lib / libsolcompat / sparc64 / atomic.S
1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright 2007 Sun Microsystems, Inc. All rights reserved.
23 * Use is subject to license terms.
24 */
25
26 .ident "%Z%%M% %I% %E% SMI"
27
28 .file "%M%"
29
30 #define _ASM
31 #include <sys/asm_linkage.h>
32
33 #if defined(_KERNEL)
34 /*
35 * Legacy kernel interfaces; they will go away (eventually).
36 */
37 ANSI_PRAGMA_WEAK2(cas8,atomic_cas_8,function)
38 ANSI_PRAGMA_WEAK2(cas32,atomic_cas_32,function)
39 ANSI_PRAGMA_WEAK2(cas64,atomic_cas_64,function)
40 ANSI_PRAGMA_WEAK2(caslong,atomic_cas_ulong,function)
41 ANSI_PRAGMA_WEAK2(casptr,atomic_cas_ptr,function)
42 ANSI_PRAGMA_WEAK2(atomic_and_long,atomic_and_ulong,function)
43 ANSI_PRAGMA_WEAK2(atomic_or_long,atomic_or_ulong,function)
44 ANSI_PRAGMA_WEAK2(swapl,atomic_swap_32,function)
45 #else
46 /*
47 * Include the definitions for the libc weak aliases.
48 */
49 #include "../atomic_asm_weak.h"
50 #endif
51
52 /*
53 * NOTE: If atomic_inc_8 and atomic_inc_8_nv are ever
54 * separated, you need to also edit the libc sparc platform
55 * specific mapfile and remove the NODYNSORT attribute
56 * from atomic_inc_8_nv.
57 */
58 ENTRY(atomic_inc_8)
59 ALTENTRY(atomic_inc_8_nv)
60 ALTENTRY(atomic_inc_uchar)
61 ALTENTRY(atomic_inc_uchar_nv)
62 ba add_8
63 add %g0, 1, %o1
64 SET_SIZE(atomic_inc_uchar_nv)
65 SET_SIZE(atomic_inc_uchar)
66 SET_SIZE(atomic_inc_8_nv)
67 SET_SIZE(atomic_inc_8)
68
69 /*
70 * NOTE: If atomic_dec_8 and atomic_dec_8_nv are ever
71 * separated, you need to also edit the libc sparc platform
72 * specific mapfile and remove the NODYNSORT attribute
73 * from atomic_dec_8_nv.
74 */
75 ENTRY(atomic_dec_8)
76 ALTENTRY(atomic_dec_8_nv)
77 ALTENTRY(atomic_dec_uchar)
78 ALTENTRY(atomic_dec_uchar_nv)
79 ba add_8
80 sub %g0, 1, %o1
81 SET_SIZE(atomic_dec_uchar_nv)
82 SET_SIZE(atomic_dec_uchar)
83 SET_SIZE(atomic_dec_8_nv)
84 SET_SIZE(atomic_dec_8)
85
86 /*
87 * NOTE: If atomic_add_8 and atomic_add_8_nv are ever
88 * separated, you need to also edit the libc sparc platform
89 * specific mapfile and remove the NODYNSORT attribute
90 * from atomic_add_8_nv.
91 */
92 ENTRY(atomic_add_8)
93 ALTENTRY(atomic_add_8_nv)
94 ALTENTRY(atomic_add_char)
95 ALTENTRY(atomic_add_char_nv)
96 add_8:
97 and %o0, 0x3, %o4 ! %o4 = byte offset, left-to-right
98 xor %o4, 0x3, %g1 ! %g1 = byte offset, right-to-left
99 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
100 set 0xff, %o3 ! %o3 = mask
101 sll %o3, %g1, %o3 ! %o3 = shifted to bit offset
102 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
103 and %o1, %o3, %o1 ! %o1 = single byte value
104 andn %o0, 0x3, %o0 ! %o0 = word address
105 ld [%o0], %o2 ! read old value
106 1:
107 add %o2, %o1, %o5 ! add value to the old value
108 and %o5, %o3, %o5 ! clear other bits
109 andn %o2, %o3, %o4 ! clear target bits
110 or %o4, %o5, %o5 ! insert the new value
111 cas [%o0], %o2, %o5
112 cmp %o2, %o5
113 bne,a,pn %icc, 1b
114 mov %o5, %o2 ! %o2 = old value
115 add %o2, %o1, %o5
116 and %o5, %o3, %o5
117 retl
118 srl %o5, %g1, %o0 ! %o0 = new value
119 SET_SIZE(atomic_add_char_nv)
120 SET_SIZE(atomic_add_char)
121 SET_SIZE(atomic_add_8_nv)
122 SET_SIZE(atomic_add_8)
123
124 /*
125 * NOTE: If atomic_inc_16 and atomic_inc_16_nv are ever
126 * separated, you need to also edit the libc sparc platform
127 * specific mapfile and remove the NODYNSORT attribute
128 * from atomic_inc_16_nv.
129 */
130 ENTRY(atomic_inc_16)
131 ALTENTRY(atomic_inc_16_nv)
132 ALTENTRY(atomic_inc_ushort)
133 ALTENTRY(atomic_inc_ushort_nv)
134 ba add_16
135 add %g0, 1, %o1
136 SET_SIZE(atomic_inc_ushort_nv)
137 SET_SIZE(atomic_inc_ushort)
138 SET_SIZE(atomic_inc_16_nv)
139 SET_SIZE(atomic_inc_16)
140
141 /*
142 * NOTE: If atomic_dec_16 and atomic_dec_16_nv are ever
143 * separated, you need to also edit the libc sparc platform
144 * specific mapfile and remove the NODYNSORT attribute
145 * from atomic_dec_16_nv.
146 */
147 ENTRY(atomic_dec_16)
148 ALTENTRY(atomic_dec_16_nv)
149 ALTENTRY(atomic_dec_ushort)
150 ALTENTRY(atomic_dec_ushort_nv)
151 ba add_16
152 sub %g0, 1, %o1
153 SET_SIZE(atomic_dec_ushort_nv)
154 SET_SIZE(atomic_dec_ushort)
155 SET_SIZE(atomic_dec_16_nv)
156 SET_SIZE(atomic_dec_16)
157
158 /*
159 * NOTE: If atomic_add_16 and atomic_add_16_nv are ever
160 * separated, you need to also edit the libc sparc platform
161 * specific mapfile and remove the NODYNSORT attribute
162 * from atomic_add_16_nv.
163 */
164 ENTRY(atomic_add_16)
165 ALTENTRY(atomic_add_16_nv)
166 ALTENTRY(atomic_add_short)
167 ALTENTRY(atomic_add_short_nv)
168 add_16:
169 and %o0, 0x2, %o4 ! %o4 = byte offset, left-to-right
170 xor %o4, 0x2, %g1 ! %g1 = byte offset, right-to-left
171 sll %o4, 3, %o4 ! %o4 = bit offset, left-to-right
172 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
173 sethi %hi(0xffff0000), %o3 ! %o3 = mask
174 srl %o3, %o4, %o3 ! %o3 = shifted to bit offset
175 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
176 and %o1, %o3, %o1 ! %o1 = single short value
177 andn %o0, 0x2, %o0 ! %o0 = word address
178 ! if low-order bit is 1, we will properly get an alignment fault here
179 ld [%o0], %o2 ! read old value
180 1:
181 add %o1, %o2, %o5 ! add value to the old value
182 and %o5, %o3, %o5 ! clear other bits
183 andn %o2, %o3, %o4 ! clear target bits
184 or %o4, %o5, %o5 ! insert the new value
185 cas [%o0], %o2, %o5
186 cmp %o2, %o5
187 bne,a,pn %icc, 1b
188 mov %o5, %o2 ! %o2 = old value
189 add %o1, %o2, %o5
190 and %o5, %o3, %o5
191 retl
192 srl %o5, %g1, %o0 ! %o0 = new value
193 SET_SIZE(atomic_add_short_nv)
194 SET_SIZE(atomic_add_short)
195 SET_SIZE(atomic_add_16_nv)
196 SET_SIZE(atomic_add_16)
197
198 /*
199 * NOTE: If atomic_inc_32 and atomic_inc_32_nv are ever
200 * separated, you need to also edit the libc sparc platform
201 * specific mapfile and remove the NODYNSORT attribute
202 * from atomic_inc_32_nv.
203 */
204 ENTRY(atomic_inc_32)
205 ALTENTRY(atomic_inc_32_nv)
206 ALTENTRY(atomic_inc_uint)
207 ALTENTRY(atomic_inc_uint_nv)
208 ALTENTRY(atomic_inc_ulong)
209 ALTENTRY(atomic_inc_ulong_nv)
210 ba add_32
211 add %g0, 1, %o1
212 SET_SIZE(atomic_inc_ulong_nv)
213 SET_SIZE(atomic_inc_ulong)
214 SET_SIZE(atomic_inc_uint_nv)
215 SET_SIZE(atomic_inc_uint)
216 SET_SIZE(atomic_inc_32_nv)
217 SET_SIZE(atomic_inc_32)
218
219 /*
220 * NOTE: If atomic_dec_32 and atomic_dec_32_nv are ever
221 * separated, you need to also edit the libc sparc platform
222 * specific mapfile and remove the NODYNSORT attribute
223 * from atomic_dec_32_nv.
224 */
225 ENTRY(atomic_dec_32)
226 ALTENTRY(atomic_dec_32_nv)
227 ALTENTRY(atomic_dec_uint)
228 ALTENTRY(atomic_dec_uint_nv)
229 ALTENTRY(atomic_dec_ulong)
230 ALTENTRY(atomic_dec_ulong_nv)
231 ba add_32
232 sub %g0, 1, %o1
233 SET_SIZE(atomic_dec_ulong_nv)
234 SET_SIZE(atomic_dec_ulong)
235 SET_SIZE(atomic_dec_uint_nv)
236 SET_SIZE(atomic_dec_uint)
237 SET_SIZE(atomic_dec_32_nv)
238 SET_SIZE(atomic_dec_32)
239
240 /*
241 * NOTE: If atomic_add_32 and atomic_add_32_nv are ever
242 * separated, you need to also edit the libc sparc platform
243 * specific mapfile and remove the NODYNSORT attribute
244 * from atomic_add_32_nv.
245 */
246 ENTRY(atomic_add_32)
247 ALTENTRY(atomic_add_32_nv)
248 ALTENTRY(atomic_add_int)
249 ALTENTRY(atomic_add_int_nv)
250 ALTENTRY(atomic_add_ptr)
251 ALTENTRY(atomic_add_ptr_nv)
252 ALTENTRY(atomic_add_long)
253 ALTENTRY(atomic_add_long_nv)
254 add_32:
255 ld [%o0], %o2
256 1:
257 add %o2, %o1, %o3
258 cas [%o0], %o2, %o3
259 cmp %o2, %o3
260 bne,a,pn %icc, 1b
261 mov %o3, %o2
262 retl
263 add %o2, %o1, %o0 ! return new value
264 SET_SIZE(atomic_add_long_nv)
265 SET_SIZE(atomic_add_long)
266 SET_SIZE(atomic_add_ptr_nv)
267 SET_SIZE(atomic_add_ptr)
268 SET_SIZE(atomic_add_int_nv)
269 SET_SIZE(atomic_add_int)
270 SET_SIZE(atomic_add_32_nv)
271 SET_SIZE(atomic_add_32)
272
273 /*
274 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
275 * separated, you need to also edit the libc sparc platform
276 * specific mapfile and remove the NODYNSORT attribute
277 * from atomic_inc_64_nv.
278 */
279 ENTRY(atomic_inc_64)
280 ALTENTRY(atomic_inc_64_nv)
281 ba add_64
282 add %g0, 1, %o1
283 SET_SIZE(atomic_inc_64_nv)
284 SET_SIZE(atomic_inc_64)
285
286 /*
287 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
288 * separated, you need to also edit the libc sparc platform
289 * specific mapfile and remove the NODYNSORT attribute
290 * from atomic_dec_64_nv.
291 */
292 ENTRY(atomic_dec_64)
293 ALTENTRY(atomic_dec_64_nv)
294 ba add_64
295 sub %g0, 1, %o1
296 SET_SIZE(atomic_dec_64_nv)
297 SET_SIZE(atomic_dec_64)
298
299 /*
300 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
301 * separated, you need to also edit the libc sparc platform
302 * specific mapfile and remove the NODYNSORT attribute
303 * from atomic_add_64_nv.
304 */
305 ENTRY(atomic_add_64)
306 ALTENTRY(atomic_add_64_nv)
307 sllx %o1, 32, %o1 ! upper 32 in %o1, lower in %o2
308 srl %o2, 0, %o2
309 add %o1, %o2, %o1 ! convert 2 32-bit args into 1 64-bit
310 add_64:
311 ldx [%o0], %o2
312 1:
313 add %o2, %o1, %o3
314 casx [%o0], %o2, %o3
315 cmp %o2, %o3
316 bne,a,pn %xcc, 1b
317 mov %o3, %o2
318 add %o2, %o1, %o1 ! return lower 32-bits in %o1
319 retl
320 srlx %o1, 32, %o0 ! return upper 32-bits in %o0
321 SET_SIZE(atomic_add_64_nv)
322 SET_SIZE(atomic_add_64)
323
324 /*
325 * NOTE: If atomic_or_8 and atomic_or_8_nv are ever
326 * separated, you need to also edit the libc sparc platform
327 * specific mapfile and remove the NODYNSORT attribute
328 * from atomic_or_8_nv.
329 */
330 ENTRY(atomic_or_8)
331 ALTENTRY(atomic_or_8_nv)
332 ALTENTRY(atomic_or_uchar)
333 ALTENTRY(atomic_or_uchar_nv)
334 and %o0, 0x3, %o4 ! %o4 = byte offset, left-to-right
335 xor %o4, 0x3, %g1 ! %g1 = byte offset, right-to-left
336 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
337 set 0xff, %o3 ! %o3 = mask
338 sll %o3, %g1, %o3 ! %o3 = shifted to bit offset
339 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
340 and %o1, %o3, %o1 ! %o1 = single byte value
341 andn %o0, 0x3, %o0 ! %o0 = word address
342 ld [%o0], %o2 ! read old value
343 1:
344 or %o2, %o1, %o5 ! or in the new value
345 cas [%o0], %o2, %o5
346 cmp %o2, %o5
347 bne,a,pn %icc, 1b
348 mov %o5, %o2 ! %o2 = old value
349 or %o2, %o1, %o5
350 and %o5, %o3, %o5
351 retl
352 srl %o5, %g1, %o0 ! %o0 = new value
353 SET_SIZE(atomic_or_uchar_nv)
354 SET_SIZE(atomic_or_uchar)
355 SET_SIZE(atomic_or_8_nv)
356 SET_SIZE(atomic_or_8)
357
358 /*
359 * NOTE: If atomic_or_16 and atomic_or_16_nv are ever
360 * separated, you need to also edit the libc sparc platform
361 * specific mapfile and remove the NODYNSORT attribute
362 * from atomic_or_16_nv.
363 */
364 ENTRY(atomic_or_16)
365 ALTENTRY(atomic_or_16_nv)
366 ALTENTRY(atomic_or_ushort)
367 ALTENTRY(atomic_or_ushort_nv)
368 and %o0, 0x2, %o4 ! %o4 = byte offset, left-to-right
369 xor %o4, 0x2, %g1 ! %g1 = byte offset, right-to-left
370 sll %o4, 3, %o4 ! %o4 = bit offset, left-to-right
371 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
372 sethi %hi(0xffff0000), %o3 ! %o3 = mask
373 srl %o3, %o4, %o3 ! %o3 = shifted to bit offset
374 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
375 and %o1, %o3, %o1 ! %o1 = single short value
376 andn %o0, 0x2, %o0 ! %o0 = word address
377 ! if low-order bit is 1, we will properly get an alignment fault here
378 ld [%o0], %o2 ! read old value
379 1:
380 or %o2, %o1, %o5 ! or in the new value
381 cas [%o0], %o2, %o5
382 cmp %o2, %o5
383 bne,a,pn %icc, 1b
384 mov %o5, %o2 ! %o2 = old value
385 or %o2, %o1, %o5 ! or in the new value
386 and %o5, %o3, %o5
387 retl
388 srl %o5, %g1, %o0 ! %o0 = new value
389 SET_SIZE(atomic_or_ushort_nv)
390 SET_SIZE(atomic_or_ushort)
391 SET_SIZE(atomic_or_16_nv)
392 SET_SIZE(atomic_or_16)
393
394 /*
395 * NOTE: If atomic_or_32 and atomic_or_32_nv are ever
396 * separated, you need to also edit the libc sparc platform
397 * specific mapfile and remove the NODYNSORT attribute
398 * from atomic_or_32_nv.
399 */
400 ENTRY(atomic_or_32)
401 ALTENTRY(atomic_or_32_nv)
402 ALTENTRY(atomic_or_uint)
403 ALTENTRY(atomic_or_uint_nv)
404 ALTENTRY(atomic_or_ulong)
405 ALTENTRY(atomic_or_ulong_nv)
406 ld [%o0], %o2
407 1:
408 or %o2, %o1, %o3
409 cas [%o0], %o2, %o3
410 cmp %o2, %o3
411 bne,a,pn %icc, 1b
412 mov %o3, %o2
413 retl
414 or %o2, %o1, %o0 ! return new value
415 SET_SIZE(atomic_or_ulong_nv)
416 SET_SIZE(atomic_or_ulong)
417 SET_SIZE(atomic_or_uint_nv)
418 SET_SIZE(atomic_or_uint)
419 SET_SIZE(atomic_or_32_nv)
420 SET_SIZE(atomic_or_32)
421
422 /*
423 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
424 * separated, you need to also edit the libc sparc platform
425 * specific mapfile and remove the NODYNSORT attribute
426 * from atomic_or_64_nv.
427 */
428 ENTRY(atomic_or_64)
429 ALTENTRY(atomic_or_64_nv)
430 sllx %o1, 32, %o1 ! upper 32 in %o1, lower in %o2
431 srl %o2, 0, %o2
432 add %o1, %o2, %o1 ! convert 2 32-bit args into 1 64-bit
433 ldx [%o0], %o2
434 1:
435 or %o2, %o1, %o3
436 casx [%o0], %o2, %o3
437 cmp %o2, %o3
438 bne,a,pn %xcc, 1b
439 mov %o3, %o2
440 or %o2, %o1, %o1 ! return lower 32-bits in %o1
441 retl
442 srlx %o1, 32, %o0 ! return upper 32-bits in %o0
443 SET_SIZE(atomic_or_64_nv)
444 SET_SIZE(atomic_or_64)
445
446 /*
447 * NOTE: If atomic_and_8 and atomic_and_8_nv are ever
448 * separated, you need to also edit the libc sparc platform
449 * specific mapfile and remove the NODYNSORT attribute
450 * from atomic_and_8_nv.
451 */
452 ENTRY(atomic_and_8)
453 ALTENTRY(atomic_and_8_nv)
454 ALTENTRY(atomic_and_uchar)
455 ALTENTRY(atomic_and_uchar_nv)
456 and %o0, 0x3, %o4 ! %o4 = byte offset, left-to-right
457 xor %o4, 0x3, %g1 ! %g1 = byte offset, right-to-left
458 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
459 set 0xff, %o3 ! %o3 = mask
460 sll %o3, %g1, %o3 ! %o3 = shifted to bit offset
461 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
462 orn %o1, %o3, %o1 ! all ones in other bytes
463 andn %o0, 0x3, %o0 ! %o0 = word address
464 ld [%o0], %o2 ! read old value
465 1:
466 and %o2, %o1, %o5 ! and in the new value
467 cas [%o0], %o2, %o5
468 cmp %o2, %o5
469 bne,a,pn %icc, 1b
470 mov %o5, %o2 ! %o2 = old value
471 and %o2, %o1, %o5
472 and %o5, %o3, %o5
473 retl
474 srl %o5, %g1, %o0 ! %o0 = new value
475 SET_SIZE(atomic_and_uchar_nv)
476 SET_SIZE(atomic_and_uchar)
477 SET_SIZE(atomic_and_8_nv)
478 SET_SIZE(atomic_and_8)
479
480 /*
481 * NOTE: If atomic_and_16 and atomic_and_16_nv are ever
482 * separated, you need to also edit the libc sparc platform
483 * specific mapfile and remove the NODYNSORT attribute
484 * from atomic_and_16_nv.
485 */
486 ENTRY(atomic_and_16)
487 ALTENTRY(atomic_and_16_nv)
488 ALTENTRY(atomic_and_ushort)
489 ALTENTRY(atomic_and_ushort_nv)
490 and %o0, 0x2, %o4 ! %o4 = byte offset, left-to-right
491 xor %o4, 0x2, %g1 ! %g1 = byte offset, right-to-left
492 sll %o4, 3, %o4 ! %o4 = bit offset, left-to-right
493 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
494 sethi %hi(0xffff0000), %o3 ! %o3 = mask
495 srl %o3, %o4, %o3 ! %o3 = shifted to bit offset
496 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
497 orn %o1, %o3, %o1 ! all ones in the other half
498 andn %o0, 0x2, %o0 ! %o0 = word address
499 ! if low-order bit is 1, we will properly get an alignment fault here
500 ld [%o0], %o2 ! read old value
501 1:
502 and %o2, %o1, %o5 ! and in the new value
503 cas [%o0], %o2, %o5
504 cmp %o2, %o5
505 bne,a,pn %icc, 1b
506 mov %o5, %o2 ! %o2 = old value
507 and %o2, %o1, %o5
508 and %o5, %o3, %o5
509 retl
510 srl %o5, %g1, %o0 ! %o0 = new value
511 SET_SIZE(atomic_and_ushort_nv)
512 SET_SIZE(atomic_and_ushort)
513 SET_SIZE(atomic_and_16_nv)
514 SET_SIZE(atomic_and_16)
515
516 /*
517 * NOTE: If atomic_and_32 and atomic_and_32_nv are ever
518 * separated, you need to also edit the libc sparc platform
519 * specific mapfile and remove the NODYNSORT attribute
520 * from atomic_and_32_nv.
521 */
522 ENTRY(atomic_and_32)
523 ALTENTRY(atomic_and_32_nv)
524 ALTENTRY(atomic_and_uint)
525 ALTENTRY(atomic_and_uint_nv)
526 ALTENTRY(atomic_and_ulong)
527 ALTENTRY(atomic_and_ulong_nv)
528 ld [%o0], %o2
529 1:
530 and %o2, %o1, %o3
531 cas [%o0], %o2, %o3
532 cmp %o2, %o3
533 bne,a,pn %icc, 1b
534 mov %o3, %o2
535 retl
536 and %o2, %o1, %o0 ! return new value
537 SET_SIZE(atomic_and_ulong_nv)
538 SET_SIZE(atomic_and_ulong)
539 SET_SIZE(atomic_and_uint_nv)
540 SET_SIZE(atomic_and_uint)
541 SET_SIZE(atomic_and_32_nv)
542 SET_SIZE(atomic_and_32)
543
544 /*
545 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
546 * separated, you need to also edit the libc sparc platform
547 * specific mapfile and remove the NODYNSORT attribute
548 * from atomic_and_64_nv.
549 */
550 ENTRY(atomic_and_64)
551 ALTENTRY(atomic_and_64_nv)
552 sllx %o1, 32, %o1 ! upper 32 in %o1, lower in %o2
553 srl %o2, 0, %o2
554 add %o1, %o2, %o1 ! convert 2 32-bit args into 1 64-bit
555 ldx [%o0], %o2
556 1:
557 and %o2, %o1, %o3
558 casx [%o0], %o2, %o3
559 cmp %o2, %o3
560 bne,a,pn %xcc, 1b
561 mov %o3, %o2
562 and %o2, %o1, %o1 ! return lower 32-bits in %o1
563 retl
564 srlx %o1, 32, %o0 ! return upper 32-bits in %o0
565 SET_SIZE(atomic_and_64_nv)
566 SET_SIZE(atomic_and_64)
567
568 ENTRY(atomic_cas_8)
569 ALTENTRY(atomic_cas_uchar)
570 and %o0, 0x3, %o4 ! %o4 = byte offset, left-to-right
571 xor %o4, 0x3, %g1 ! %g1 = byte offset, right-to-left
572 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
573 set 0xff, %o3 ! %o3 = mask
574 sll %o3, %g1, %o3 ! %o3 = shifted to bit offset
575 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
576 and %o1, %o3, %o1 ! %o1 = single byte value
577 sll %o2, %g1, %o2 ! %o2 = shifted to bit offset
578 and %o2, %o3, %o2 ! %o2 = single byte value
579 andn %o0, 0x3, %o0 ! %o0 = word address
580 ld [%o0], %o4 ! read old value
581 1:
582 andn %o4, %o3, %o4 ! clear target bits
583 or %o4, %o2, %o5 ! insert the new value
584 or %o4, %o1, %o4 ! insert the comparison value
585 cas [%o0], %o4, %o5
586 cmp %o4, %o5 ! did we succeed?
587 be,pt %icc, 2f
588 and %o5, %o3, %o4 ! isolate the old value
589 cmp %o1, %o4 ! should we have succeeded?
590 be,a,pt %icc, 1b ! yes, try again
591 mov %o5, %o4 ! %o4 = old value
592 2:
593 retl
594 srl %o4, %g1, %o0 ! %o0 = old value
595 SET_SIZE(atomic_cas_uchar)
596 SET_SIZE(atomic_cas_8)
597
598 ENTRY(atomic_cas_16)
599 ALTENTRY(atomic_cas_ushort)
600 and %o0, 0x2, %o4 ! %o4 = byte offset, left-to-right
601 xor %o4, 0x2, %g1 ! %g1 = byte offset, right-to-left
602 sll %o4, 3, %o4 ! %o4 = bit offset, left-to-right
603 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
604 sethi %hi(0xffff0000), %o3 ! %o3 = mask
605 srl %o3, %o4, %o3 ! %o3 = shifted to bit offset
606 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
607 and %o1, %o3, %o1 ! %o1 = single short value
608 sll %o2, %g1, %o2 ! %o2 = shifted to bit offset
609 and %o2, %o3, %o2 ! %o2 = single short value
610 andn %o0, 0x2, %o0 ! %o0 = word address
611 ! if low-order bit is 1, we will properly get an alignment fault here
612 ld [%o0], %o4 ! read old value
613 1:
614 andn %o4, %o3, %o4 ! clear target bits
615 or %o4, %o2, %o5 ! insert the new value
616 or %o4, %o1, %o4 ! insert the comparison value
617 cas [%o0], %o4, %o5
618 cmp %o4, %o5 ! did we succeed?
619 be,pt %icc, 2f
620 and %o5, %o3, %o4 ! isolate the old value
621 cmp %o1, %o4 ! should we have succeeded?
622 be,a,pt %icc, 1b ! yes, try again
623 mov %o5, %o4 ! %o4 = old value
624 2:
625 retl
626 srl %o4, %g1, %o0 ! %o0 = old value
627 SET_SIZE(atomic_cas_ushort)
628 SET_SIZE(atomic_cas_16)
629
630 ENTRY(atomic_cas_32)
631 ALTENTRY(atomic_cas_uint)
632 ALTENTRY(atomic_cas_ptr)
633 ALTENTRY(atomic_cas_ulong)
634 cas [%o0], %o1, %o2
635 retl
636 mov %o2, %o0
637 SET_SIZE(atomic_cas_ulong)
638 SET_SIZE(atomic_cas_ptr)
639 SET_SIZE(atomic_cas_uint)
640 SET_SIZE(atomic_cas_32)
641
642 ENTRY(atomic_cas_64)
643 sllx %o1, 32, %o1 ! cmp's upper 32 in %o1, lower in %o2
644 srl %o2, 0, %o2 ! convert 2 32-bit args into 1 64-bit
645 add %o1, %o2, %o1
646 sllx %o3, 32, %o2 ! newval upper 32 in %o3, lower in %o4
647 srl %o4, 0, %o4 ! setup %o2 to have newval
648 add %o2, %o4, %o2
649 casx [%o0], %o1, %o2
650 srl %o2, 0, %o1 ! return lower 32-bits in %o1
651 retl
652 srlx %o2, 32, %o0 ! return upper 32-bits in %o0
653 SET_SIZE(atomic_cas_64)
654
655 ENTRY(atomic_swap_8)
656 ALTENTRY(atomic_swap_uchar)
657 and %o0, 0x3, %o4 ! %o4 = byte offset, left-to-right
658 xor %o4, 0x3, %g1 ! %g1 = byte offset, right-to-left
659 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
660 set 0xff, %o3 ! %o3 = mask
661 sll %o3, %g1, %o3 ! %o3 = shifted to bit offset
662 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
663 and %o1, %o3, %o1 ! %o1 = single byte value
664 andn %o0, 0x3, %o0 ! %o0 = word address
665 ld [%o0], %o2 ! read old value
666 1:
667 andn %o2, %o3, %o5 ! clear target bits
668 or %o5, %o1, %o5 ! insert the new value
669 cas [%o0], %o2, %o5
670 cmp %o2, %o5
671 bne,a,pn %icc, 1b
672 mov %o5, %o2 ! %o2 = old value
673 and %o5, %o3, %o5
674 retl
675 srl %o5, %g1, %o0 ! %o0 = old value
676 SET_SIZE(atomic_swap_uchar)
677 SET_SIZE(atomic_swap_8)
678
679 ENTRY(atomic_swap_16)
680 ALTENTRY(atomic_swap_ushort)
681 and %o0, 0x2, %o4 ! %o4 = byte offset, left-to-right
682 xor %o4, 0x2, %g1 ! %g1 = byte offset, right-to-left
683 sll %o4, 3, %o4 ! %o4 = bit offset, left-to-right
684 sll %g1, 3, %g1 ! %g1 = bit offset, right-to-left
685 sethi %hi(0xffff0000), %o3 ! %o3 = mask
686 srl %o3, %o4, %o3 ! %o3 = shifted to bit offset
687 sll %o1, %g1, %o1 ! %o1 = shifted to bit offset
688 and %o1, %o3, %o1 ! %o1 = single short value
689 andn %o0, 0x2, %o0 ! %o0 = word address
690 ! if low-order bit is 1, we will properly get an alignment fault here
691 ld [%o0], %o2 ! read old value
692 1:
693 andn %o2, %o3, %o5 ! clear target bits
694 or %o5, %o1, %o5 ! insert the new value
695 cas [%o0], %o2, %o5
696 cmp %o2, %o5
697 bne,a,pn %icc, 1b
698 mov %o5, %o2 ! %o2 = old value
699 and %o5, %o3, %o5
700 retl
701 srl %o5, %g1, %o0 ! %o0 = old value
702 SET_SIZE(atomic_swap_ushort)
703 SET_SIZE(atomic_swap_16)
704
705 ENTRY(atomic_swap_32)
706 ALTENTRY(atomic_swap_uint)
707 ALTENTRY(atomic_swap_ptr)
708 ALTENTRY(atomic_swap_ulong)
709 ld [%o0], %o2
710 1:
711 mov %o1, %o3
712 cas [%o0], %o2, %o3
713 cmp %o2, %o3
714 bne,a,pn %icc, 1b
715 mov %o3, %o2
716 retl
717 mov %o3, %o0
718 SET_SIZE(atomic_swap_ulong)
719 SET_SIZE(atomic_swap_ptr)
720 SET_SIZE(atomic_swap_uint)
721 SET_SIZE(atomic_swap_32)
722
723 ENTRY(atomic_swap_64)
724 sllx %o1, 32, %o1 ! upper 32 in %o1, lower in %o2
725 srl %o2, 0, %o2
726 add %o1, %o2, %o1 ! convert 2 32-bit args into 1 64-bit
727 ldx [%o0], %o2
728 1:
729 mov %o1, %o3
730 casx [%o0], %o2, %o3
731 cmp %o2, %o3
732 bne,a,pn %xcc, 1b
733 mov %o3, %o2
734 srl %o3, 0, %o1 ! return lower 32-bits in %o1
735 retl
736 srlx %o3, 32, %o0 ! return upper 32-bits in %o0
737 SET_SIZE(atomic_swap_64)
738
739 /* these are not used by ZFS
740 ENTRY(atomic_set_long_excl)
741 mov 1, %o3
742 slln %o3, %o1, %o3
743 ldn [%o0], %o2
744 1:
745 andcc %o2, %o3, %g0 ! test if the bit is set
746 bnz,a,pn %ncc, 2f ! if so, then fail out
747 mov -1, %o0
748 or %o2, %o3, %o4 ! set the bit, and try to commit it
749 casn [%o0], %o2, %o4
750 cmp %o2, %o4
751 bne,a,pn %ncc, 1b ! failed to commit, try again
752 mov %o4, %o2
753 mov %g0, %o0
754 2:
755 retl
756 nop
757 SET_SIZE(atomic_set_long_excl)
758
759 ENTRY(atomic_clear_long_excl)
760 mov 1, %o3
761 slln %o3, %o1, %o3
762 ldn [%o0], %o2
763 1:
764 andncc %o3, %o2, %g0 ! test if the bit is clear
765 bnz,a,pn %ncc, 2f ! if so, then fail out
766 mov -1, %o0
767 andn %o2, %o3, %o4 ! clear the bit, and try to commit it
768 casn [%o0], %o2, %o4
769 cmp %o2, %o4
770 bne,a,pn %ncc, 1b ! failed to commit, try again
771 mov %o4, %o2
772 mov %g0, %o0
773 2:
774 retl
775 nop
776 SET_SIZE(atomic_clear_long_excl)
777 */
778 #if !defined(_KERNEL)
779
780 /*
781 * Spitfires and Blackbirds have a problem with membars in the
782 * delay slot (SF_ERRATA_51). For safety's sake, we assume
783 * that the whole world needs the workaround.
784 */
785 ENTRY(membar_enter)
786 membar #StoreLoad|#StoreStore
787 retl
788 nop
789 SET_SIZE(membar_enter)
790
791 ENTRY(membar_exit)
792 membar #LoadStore|#StoreStore
793 retl
794 nop
795 SET_SIZE(membar_exit)
796
797 ENTRY(membar_producer)
798 membar #StoreStore
799 retl
800 nop
801 SET_SIZE(membar_producer)
802
803 ENTRY(membar_consumer)
804 membar #LoadLoad
805 retl
806 nop
807 SET_SIZE(membar_consumer)
808
809 #endif /* !_KERNEL */
810
811 #ifdef __ELF__
812 .section .note.GNU-stack,"",%progbits
813 #endif