]> git.proxmox.com Git - mirror_zfs-debian.git/blob - lib/libspl/asm-i386/atomic.S
Add atomic_sub_* functions to libspl.
[mirror_zfs-debian.git] / lib / libspl / asm-i386 / atomic.S
1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright 2007 Sun Microsystems, Inc. All rights reserved.
23 * Use is subject to license terms.
24 */
25
26 .ident "%Z%%M% %I% %E% SMI"
27
28 .file "%M%"
29
30 #define _ASM
31 #include <ia32/sys/asm_linkage.h>
32
33 ENTRY(atomic_inc_8)
34 ALTENTRY(atomic_inc_uchar)
35 movl 4(%esp), %eax
36 lock
37 incb (%eax)
38 ret
39 SET_SIZE(atomic_inc_uchar)
40 SET_SIZE(atomic_inc_8)
41
42 ENTRY(atomic_inc_16)
43 ALTENTRY(atomic_inc_ushort)
44 movl 4(%esp), %eax
45 lock
46 incw (%eax)
47 ret
48 SET_SIZE(atomic_inc_ushort)
49 SET_SIZE(atomic_inc_16)
50
51 ENTRY(atomic_inc_32)
52 ALTENTRY(atomic_inc_uint)
53 ALTENTRY(atomic_inc_ulong)
54 movl 4(%esp), %eax
55 lock
56 incl (%eax)
57 ret
58 SET_SIZE(atomic_inc_ulong)
59 SET_SIZE(atomic_inc_uint)
60 SET_SIZE(atomic_inc_32)
61
62 ENTRY(atomic_inc_8_nv)
63 ALTENTRY(atomic_inc_uchar_nv)
64 movl 4(%esp), %edx
65 movb (%edx), %al
66 1:
67 leal 1(%eax), %ecx
68 lock
69 cmpxchgb %cl, (%edx)
70 jne 1b
71 movzbl %cl, %eax
72 ret
73 SET_SIZE(atomic_inc_uchar_nv)
74 SET_SIZE(atomic_inc_8_nv)
75
76 ENTRY(atomic_inc_16_nv)
77 ALTENTRY(atomic_inc_ushort_nv)
78 movl 4(%esp), %edx
79 movw (%edx), %ax
80 1:
81 leal 1(%eax), %ecx
82 lock
83 cmpxchgw %cx, (%edx)
84 jne 1b
85 movzwl %cx, %eax
86 ret
87 SET_SIZE(atomic_inc_ushort_nv)
88 SET_SIZE(atomic_inc_16_nv)
89
90 ENTRY(atomic_inc_32_nv)
91 ALTENTRY(atomic_inc_uint_nv)
92 ALTENTRY(atomic_inc_ulong_nv)
93 movl 4(%esp), %edx
94 movl (%edx), %eax
95 1:
96 leal 1(%eax), %ecx
97 lock
98 cmpxchgl %ecx, (%edx)
99 jne 1b
100 movl %ecx, %eax
101 ret
102 SET_SIZE(atomic_inc_ulong_nv)
103 SET_SIZE(atomic_inc_uint_nv)
104 SET_SIZE(atomic_inc_32_nv)
105
106 /*
107 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
108 * separated, you need to also edit the libc i386 platform
109 * specific mapfile and remove the NODYNSORT attribute
110 * from atomic_inc_64_nv.
111 */
112 ENTRY(atomic_inc_64)
113 ALTENTRY(atomic_inc_64_nv)
114 pushl %edi
115 pushl %ebx
116 movl 12(%esp), %edi
117 movl (%edi), %eax
118 movl 4(%edi), %edx
119 1:
120 xorl %ebx, %ebx
121 xorl %ecx, %ecx
122 incl %ebx
123 addl %eax, %ebx
124 adcl %edx, %ecx
125 lock
126 cmpxchg8b (%edi)
127 jne 1b
128 movl %ebx, %eax
129 movl %ecx, %edx
130 popl %ebx
131 popl %edi
132 ret
133 SET_SIZE(atomic_inc_64_nv)
134 SET_SIZE(atomic_inc_64)
135
136 ENTRY(atomic_dec_8)
137 ALTENTRY(atomic_dec_uchar)
138 movl 4(%esp), %eax
139 lock
140 decb (%eax)
141 ret
142 SET_SIZE(atomic_dec_uchar)
143 SET_SIZE(atomic_dec_8)
144
145 ENTRY(atomic_dec_16)
146 ALTENTRY(atomic_dec_ushort)
147 movl 4(%esp), %eax
148 lock
149 decw (%eax)
150 ret
151 SET_SIZE(atomic_dec_ushort)
152 SET_SIZE(atomic_dec_16)
153
154 ENTRY(atomic_dec_32)
155 ALTENTRY(atomic_dec_uint)
156 ALTENTRY(atomic_dec_ulong)
157 movl 4(%esp), %eax
158 lock
159 decl (%eax)
160 ret
161 SET_SIZE(atomic_dec_ulong)
162 SET_SIZE(atomic_dec_uint)
163 SET_SIZE(atomic_dec_32)
164
165 ENTRY(atomic_dec_8_nv)
166 ALTENTRY(atomic_dec_uchar_nv)
167 movl 4(%esp), %edx
168 movb (%edx), %al
169 1:
170 leal -1(%eax), %ecx
171 lock
172 cmpxchgb %cl, (%edx)
173 jne 1b
174 movzbl %cl, %eax
175 ret
176 SET_SIZE(atomic_dec_uchar_nv)
177 SET_SIZE(atomic_dec_8_nv)
178
179 ENTRY(atomic_dec_16_nv)
180 ALTENTRY(atomic_dec_ushort_nv)
181 movl 4(%esp), %edx
182 movw (%edx), %ax
183 1:
184 leal -1(%eax), %ecx
185 lock
186 cmpxchgw %cx, (%edx)
187 jne 1b
188 movzwl %cx, %eax
189 ret
190 SET_SIZE(atomic_dec_ushort_nv)
191 SET_SIZE(atomic_dec_16_nv)
192
193 ENTRY(atomic_dec_32_nv)
194 ALTENTRY(atomic_dec_uint_nv)
195 ALTENTRY(atomic_dec_ulong_nv)
196 movl 4(%esp), %edx
197 movl (%edx), %eax
198 1:
199 leal -1(%eax), %ecx
200 lock
201 cmpxchgl %ecx, (%edx)
202 jne 1b
203 movl %ecx, %eax
204 ret
205 SET_SIZE(atomic_dec_ulong_nv)
206 SET_SIZE(atomic_dec_uint_nv)
207 SET_SIZE(atomic_dec_32_nv)
208
209 /*
210 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
211 * separated, it is important to edit the libc i386 platform
212 * specific mapfile and remove the NODYNSORT attribute
213 * from atomic_dec_64_nv.
214 */
215 ENTRY(atomic_dec_64)
216 ALTENTRY(atomic_dec_64_nv)
217 pushl %edi
218 pushl %ebx
219 movl 12(%esp), %edi
220 movl (%edi), %eax
221 movl 4(%edi), %edx
222 1:
223 xorl %ebx, %ebx
224 xorl %ecx, %ecx
225 not %ecx
226 not %ebx
227 addl %eax, %ebx
228 adcl %edx, %ecx
229 lock
230 cmpxchg8b (%edi)
231 jne 1b
232 movl %ebx, %eax
233 movl %ecx, %edx
234 popl %ebx
235 popl %edi
236 ret
237 SET_SIZE(atomic_dec_64_nv)
238 SET_SIZE(atomic_dec_64)
239
240 ENTRY(atomic_add_8)
241 ALTENTRY(atomic_add_char)
242 movl 4(%esp), %eax
243 movl 8(%esp), %ecx
244 lock
245 addb %cl, (%eax)
246 ret
247 SET_SIZE(atomic_add_char)
248 SET_SIZE(atomic_add_8)
249
250 ENTRY(atomic_add_16)
251 ALTENTRY(atomic_add_short)
252 movl 4(%esp), %eax
253 movl 8(%esp), %ecx
254 lock
255 addw %cx, (%eax)
256 ret
257 SET_SIZE(atomic_add_short)
258 SET_SIZE(atomic_add_16)
259
260 ENTRY(atomic_add_32)
261 ALTENTRY(atomic_add_int)
262 ALTENTRY(atomic_add_ptr)
263 ALTENTRY(atomic_add_long)
264 movl 4(%esp), %eax
265 movl 8(%esp), %ecx
266 lock
267 addl %ecx, (%eax)
268 ret
269 SET_SIZE(atomic_add_long)
270 SET_SIZE(atomic_add_ptr)
271 SET_SIZE(atomic_add_int)
272 SET_SIZE(atomic_add_32)
273
274 ENTRY(atomic_sub_8)
275 ALTENTRY(atomic_sub_char)
276 movl 4(%esp), %eax
277 movl 8(%esp), %ecx
278 lock
279 subb %cl, (%eax)
280 ret
281 SET_SIZE(atomic_sub_char)
282 SET_SIZE(atomic_sub_8)
283
284 ENTRY(atomic_sub_16)
285 ALTENTRY(atomic_sub_short)
286 movl 4(%esp), %eax
287 movl 8(%esp), %ecx
288 lock
289 subw %cx, (%eax)
290 ret
291 SET_SIZE(atomic_sub_short)
292 SET_SIZE(atomic_sub_16)
293
294 ENTRY(atomic_sub_32)
295 ALTENTRY(atomic_sub_int)
296 ALTENTRY(atomic_sub_ptr)
297 ALTENTRY(atomic_sub_long)
298 movl 4(%esp), %eax
299 movl 8(%esp), %ecx
300 lock
301 subl %ecx, (%eax)
302 ret
303 SET_SIZE(atomic_sub_long)
304 SET_SIZE(atomic_sub_ptr)
305 SET_SIZE(atomic_sub_int)
306 SET_SIZE(atomic_sub_32)
307
308 ENTRY(atomic_or_8)
309 ALTENTRY(atomic_or_uchar)
310 movl 4(%esp), %eax
311 movb 8(%esp), %cl
312 lock
313 orb %cl, (%eax)
314 ret
315 SET_SIZE(atomic_or_uchar)
316 SET_SIZE(atomic_or_8)
317
318 ENTRY(atomic_or_16)
319 ALTENTRY(atomic_or_ushort)
320 movl 4(%esp), %eax
321 movw 8(%esp), %cx
322 lock
323 orw %cx, (%eax)
324 ret
325 SET_SIZE(atomic_or_ushort)
326 SET_SIZE(atomic_or_16)
327
328 ENTRY(atomic_or_32)
329 ALTENTRY(atomic_or_uint)
330 ALTENTRY(atomic_or_ulong)
331 movl 4(%esp), %eax
332 movl 8(%esp), %ecx
333 lock
334 orl %ecx, (%eax)
335 ret
336 SET_SIZE(atomic_or_ulong)
337 SET_SIZE(atomic_or_uint)
338 SET_SIZE(atomic_or_32)
339
340 ENTRY(atomic_and_8)
341 ALTENTRY(atomic_and_uchar)
342 movl 4(%esp), %eax
343 movb 8(%esp), %cl
344 lock
345 andb %cl, (%eax)
346 ret
347 SET_SIZE(atomic_and_uchar)
348 SET_SIZE(atomic_and_8)
349
350 ENTRY(atomic_and_16)
351 ALTENTRY(atomic_and_ushort)
352 movl 4(%esp), %eax
353 movw 8(%esp), %cx
354 lock
355 andw %cx, (%eax)
356 ret
357 SET_SIZE(atomic_and_ushort)
358 SET_SIZE(atomic_and_16)
359
360 ENTRY(atomic_and_32)
361 ALTENTRY(atomic_and_uint)
362 ALTENTRY(atomic_and_ulong)
363 movl 4(%esp), %eax
364 movl 8(%esp), %ecx
365 lock
366 andl %ecx, (%eax)
367 ret
368 SET_SIZE(atomic_and_ulong)
369 SET_SIZE(atomic_and_uint)
370 SET_SIZE(atomic_and_32)
371
372 ENTRY(atomic_add_8_nv)
373 ALTENTRY(atomic_add_char_nv)
374 movl 4(%esp), %edx
375 movb (%edx), %al
376 1:
377 movl 8(%esp), %ecx
378 addb %al, %cl
379 lock
380 cmpxchgb %cl, (%edx)
381 jne 1b
382 movzbl %cl, %eax
383 ret
384 SET_SIZE(atomic_add_char_nv)
385 SET_SIZE(atomic_add_8_nv)
386
387 ENTRY(atomic_add_16_nv)
388 ALTENTRY(atomic_add_short_nv)
389 movl 4(%esp), %edx
390 movw (%edx), %ax
391 1:
392 movl 8(%esp), %ecx
393 addw %ax, %cx
394 lock
395 cmpxchgw %cx, (%edx)
396 jne 1b
397 movzwl %cx, %eax
398 ret
399 SET_SIZE(atomic_add_short_nv)
400 SET_SIZE(atomic_add_16_nv)
401
402 ENTRY(atomic_add_32_nv)
403 ALTENTRY(atomic_add_int_nv)
404 ALTENTRY(atomic_add_ptr_nv)
405 ALTENTRY(atomic_add_long_nv)
406 movl 4(%esp), %edx
407 movl (%edx), %eax
408 1:
409 movl 8(%esp), %ecx
410 addl %eax, %ecx
411 lock
412 cmpxchgl %ecx, (%edx)
413 jne 1b
414 movl %ecx, %eax
415 ret
416 SET_SIZE(atomic_add_long_nv)
417 SET_SIZE(atomic_add_ptr_nv)
418 SET_SIZE(atomic_add_int_nv)
419 SET_SIZE(atomic_add_32_nv)
420
421 ENTRY(atomic_sub_8_nv)
422 ALTENTRY(atomic_sub_char_nv)
423 movl 4(%esp), %edx
424 movb (%edx), %al
425 1:
426 movl 8(%esp), %ecx
427 subb %al, %cl
428 lock
429 cmpxchgb %cl, (%edx)
430 jne 1b
431 movzbl %cl, %eax
432 ret
433 SET_SIZE(atomic_sub_char_nv)
434 SET_SIZE(atomic_sub_8_nv)
435
436 ENTRY(atomic_sub_16_nv)
437 ALTENTRY(atomic_sub_short_nv)
438 movl 4(%esp), %edx
439 movw (%edx), %ax
440 1:
441 movl 8(%esp), %ecx
442 subw %ax, %cx
443 lock
444 cmpxchgw %cx, (%edx)
445 jne 1b
446 movzwl %cx, %eax
447 ret
448 SET_SIZE(atomic_sub_short_nv)
449 SET_SIZE(atomic_sub_16_nv)
450
451 ENTRY(atomic_sub_32_nv)
452 ALTENTRY(atomic_sub_int_nv)
453 ALTENTRY(atomic_sub_ptr_nv)
454 ALTENTRY(atomic_sub_long_nv)
455 movl 4(%esp), %edx
456 movl (%edx), %eax
457 1:
458 movl 8(%esp), %ecx
459 subl %eax, %ecx
460 lock
461 cmpxchgl %ecx, (%edx)
462 jne 1b
463 movl %ecx, %eax
464 ret
465 SET_SIZE(atomic_sub_long_nv)
466 SET_SIZE(atomic_sub_ptr_nv)
467 SET_SIZE(atomic_sub_int_nv)
468 SET_SIZE(atomic_sub_32_nv)
469
470 /*
471 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
472 * separated, it is important to edit the libc i386 platform
473 * specific mapfile and remove the NODYNSORT attribute
474 * from atomic_add_64_nv.
475 */
476 ENTRY(atomic_add_64)
477 ALTENTRY(atomic_add_64_nv)
478 pushl %edi
479 pushl %ebx
480 movl 12(%esp), %edi
481 movl (%edi), %eax
482 movl 4(%edi), %edx
483 1:
484 movl 16(%esp), %ebx
485 movl 20(%esp), %ecx
486 addl %eax, %ebx
487 adcl %edx, %ecx
488 lock
489 cmpxchg8b (%edi)
490 jne 1b
491 movl %ebx, %eax
492 movl %ecx, %edx
493 popl %ebx
494 popl %edi
495 ret
496 SET_SIZE(atomic_add_64_nv)
497 SET_SIZE(atomic_add_64)
498
499 ENTRY(atomic_sub_64)
500 ALTENTRY(atomic_sub_64_nv)
501 pushl %edi
502 pushl %ebx
503 movl 12(%esp), %edi
504 movl (%edi), %eax
505 movl 4(%edi), %edx
506 1:
507 movl 16(%esp), %ebx
508 movl 20(%esp), %ecx
509 subl %eax, %ebx
510 adcl %edx, %ecx
511 lock
512 cmpxchg8b (%edi)
513 jne 1b
514 movl %ebx, %eax
515 movl %ecx, %edx
516 popl %ebx
517 popl %edi
518 ret
519 SET_SIZE(atomic_sub_64_nv)
520 SET_SIZE(atomic_sub_64)
521
522 ENTRY(atomic_or_8_nv)
523 ALTENTRY(atomic_or_uchar_nv)
524 movl 4(%esp), %edx
525 movb (%edx), %al
526 1:
527 movl 8(%esp), %ecx
528 orb %al, %cl
529 lock
530 cmpxchgb %cl, (%edx)
531 jne 1b
532 movzbl %cl, %eax
533 ret
534 SET_SIZE(atomic_or_uchar_nv)
535 SET_SIZE(atomic_or_8_nv)
536
537 ENTRY(atomic_or_16_nv)
538 ALTENTRY(atomic_or_ushort_nv)
539 movl 4(%esp), %edx
540 movw (%edx), %ax
541 1:
542 movl 8(%esp), %ecx
543 orw %ax, %cx
544 lock
545 cmpxchgw %cx, (%edx)
546 jne 1b
547 movzwl %cx, %eax
548 ret
549 SET_SIZE(atomic_or_ushort_nv)
550 SET_SIZE(atomic_or_16_nv)
551
552 ENTRY(atomic_or_32_nv)
553 ALTENTRY(atomic_or_uint_nv)
554 ALTENTRY(atomic_or_ulong_nv)
555 movl 4(%esp), %edx
556 movl (%edx), %eax
557 1:
558 movl 8(%esp), %ecx
559 orl %eax, %ecx
560 lock
561 cmpxchgl %ecx, (%edx)
562 jne 1b
563 movl %ecx, %eax
564 ret
565 SET_SIZE(atomic_or_ulong_nv)
566 SET_SIZE(atomic_or_uint_nv)
567 SET_SIZE(atomic_or_32_nv)
568
569 /*
570 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
571 * separated, it is important to edit the libc i386 platform
572 * specific mapfile and remove the NODYNSORT attribute
573 * from atomic_or_64_nv.
574 */
575 ENTRY(atomic_or_64)
576 ALTENTRY(atomic_or_64_nv)
577 pushl %edi
578 pushl %ebx
579 movl 12(%esp), %edi
580 movl (%edi), %eax
581 movl 4(%edi), %edx
582 1:
583 movl 16(%esp), %ebx
584 movl 20(%esp), %ecx
585 orl %eax, %ebx
586 orl %edx, %ecx
587 lock
588 cmpxchg8b (%edi)
589 jne 1b
590 movl %ebx, %eax
591 movl %ecx, %edx
592 popl %ebx
593 popl %edi
594 ret
595 SET_SIZE(atomic_or_64_nv)
596 SET_SIZE(atomic_or_64)
597
598 ENTRY(atomic_and_8_nv)
599 ALTENTRY(atomic_and_uchar_nv)
600 movl 4(%esp), %edx
601 movb (%edx), %al
602 1:
603 movl 8(%esp), %ecx
604 andb %al, %cl
605 lock
606 cmpxchgb %cl, (%edx)
607 jne 1b
608 movzbl %cl, %eax
609 ret
610 SET_SIZE(atomic_and_uchar_nv)
611 SET_SIZE(atomic_and_8_nv)
612
613 ENTRY(atomic_and_16_nv)
614 ALTENTRY(atomic_and_ushort_nv)
615 movl 4(%esp), %edx
616 movw (%edx), %ax
617 1:
618 movl 8(%esp), %ecx
619 andw %ax, %cx
620 lock
621 cmpxchgw %cx, (%edx)
622 jne 1b
623 movzwl %cx, %eax
624 ret
625 SET_SIZE(atomic_and_ushort_nv)
626 SET_SIZE(atomic_and_16_nv)
627
628 ENTRY(atomic_and_32_nv)
629 ALTENTRY(atomic_and_uint_nv)
630 ALTENTRY(atomic_and_ulong_nv)
631 movl 4(%esp), %edx
632 movl (%edx), %eax
633 1:
634 movl 8(%esp), %ecx
635 andl %eax, %ecx
636 lock
637 cmpxchgl %ecx, (%edx)
638 jne 1b
639 movl %ecx, %eax
640 ret
641 SET_SIZE(atomic_and_ulong_nv)
642 SET_SIZE(atomic_and_uint_nv)
643 SET_SIZE(atomic_and_32_nv)
644
645 /*
646 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
647 * separated, it is important to edit the libc i386 platform
648 * specific mapfile and remove the NODYNSORT attribute
649 * from atomic_and_64_nv.
650 */
651 ENTRY(atomic_and_64)
652 ALTENTRY(atomic_and_64_nv)
653 pushl %edi
654 pushl %ebx
655 movl 12(%esp), %edi
656 movl (%edi), %eax
657 movl 4(%edi), %edx
658 1:
659 movl 16(%esp), %ebx
660 movl 20(%esp), %ecx
661 andl %eax, %ebx
662 andl %edx, %ecx
663 lock
664 cmpxchg8b (%edi)
665 jne 1b
666 movl %ebx, %eax
667 movl %ecx, %edx
668 popl %ebx
669 popl %edi
670 ret
671 SET_SIZE(atomic_and_64_nv)
672 SET_SIZE(atomic_and_64)
673
674 ENTRY(atomic_cas_8)
675 ALTENTRY(atomic_cas_uchar)
676 movl 4(%esp), %edx
677 movzbl 8(%esp), %eax
678 movb 12(%esp), %cl
679 lock
680 cmpxchgb %cl, (%edx)
681 ret
682 SET_SIZE(atomic_cas_uchar)
683 SET_SIZE(atomic_cas_8)
684
685 ENTRY(atomic_cas_16)
686 ALTENTRY(atomic_cas_ushort)
687 movl 4(%esp), %edx
688 movzwl 8(%esp), %eax
689 movw 12(%esp), %cx
690 lock
691 cmpxchgw %cx, (%edx)
692 ret
693 SET_SIZE(atomic_cas_ushort)
694 SET_SIZE(atomic_cas_16)
695
696 ENTRY(atomic_cas_32)
697 ALTENTRY(atomic_cas_uint)
698 ALTENTRY(atomic_cas_ulong)
699 ALTENTRY(atomic_cas_ptr)
700 movl 4(%esp), %edx
701 movl 8(%esp), %eax
702 movl 12(%esp), %ecx
703 lock
704 cmpxchgl %ecx, (%edx)
705 ret
706 SET_SIZE(atomic_cas_ptr)
707 SET_SIZE(atomic_cas_ulong)
708 SET_SIZE(atomic_cas_uint)
709 SET_SIZE(atomic_cas_32)
710
711 ENTRY(atomic_cas_64)
712 pushl %ebx
713 pushl %esi
714 movl 12(%esp), %esi
715 movl 16(%esp), %eax
716 movl 20(%esp), %edx
717 movl 24(%esp), %ebx
718 movl 28(%esp), %ecx
719 lock
720 cmpxchg8b (%esi)
721 popl %esi
722 popl %ebx
723 ret
724 SET_SIZE(atomic_cas_64)
725
726 ENTRY(atomic_swap_8)
727 ALTENTRY(atomic_swap_uchar)
728 movl 4(%esp), %edx
729 movzbl 8(%esp), %eax
730 lock
731 xchgb %al, (%edx)
732 ret
733 SET_SIZE(atomic_swap_uchar)
734 SET_SIZE(atomic_swap_8)
735
736 ENTRY(atomic_swap_16)
737 ALTENTRY(atomic_swap_ushort)
738 movl 4(%esp), %edx
739 movzwl 8(%esp), %eax
740 lock
741 xchgw %ax, (%edx)
742 ret
743 SET_SIZE(atomic_swap_ushort)
744 SET_SIZE(atomic_swap_16)
745
746 ENTRY(atomic_swap_32)
747 ALTENTRY(atomic_swap_uint)
748 ALTENTRY(atomic_swap_ptr)
749 ALTENTRY(atomic_swap_ulong)
750 movl 4(%esp), %edx
751 movl 8(%esp), %eax
752 lock
753 xchgl %eax, (%edx)
754 ret
755 SET_SIZE(atomic_swap_ulong)
756 SET_SIZE(atomic_swap_ptr)
757 SET_SIZE(atomic_swap_uint)
758 SET_SIZE(atomic_swap_32)
759
760 ENTRY(atomic_swap_64)
761 pushl %esi
762 pushl %ebx
763 movl 12(%esp), %esi
764 movl 16(%esp), %ebx
765 movl 20(%esp), %ecx
766 movl (%esi), %eax
767 movl 4(%esi), %edx
768 1:
769 lock
770 cmpxchg8b (%esi)
771 jne 1b
772 popl %ebx
773 popl %esi
774 ret
775 SET_SIZE(atomic_swap_64)
776
777 ENTRY(atomic_set_long_excl)
778 movl 4(%esp), %edx
779 movl 8(%esp), %ecx
780 xorl %eax, %eax
781 lock
782 btsl %ecx, (%edx)
783 jnc 1f
784 decl %eax
785 1:
786 ret
787 SET_SIZE(atomic_set_long_excl)
788
789 ENTRY(atomic_clear_long_excl)
790 movl 4(%esp), %edx
791 movl 8(%esp), %ecx
792 xorl %eax, %eax
793 lock
794 btrl %ecx, (%edx)
795 jc 1f
796 decl %eax
797 1:
798 ret
799 SET_SIZE(atomic_clear_long_excl)
800
801 /*
802 * NOTE: membar_enter, membar_exit, membar_producer, and
803 * membar_consumer are all identical routines. We define them
804 * separately, instead of using ALTENTRY definitions to alias them
805 * together, so that DTrace and debuggers will see a unique address
806 * for them, allowing more accurate tracing.
807 */
808
809
810 ENTRY(membar_enter)
811 lock
812 xorl $0, (%esp)
813 ret
814 SET_SIZE(membar_enter)
815
816 ENTRY(membar_exit)
817 lock
818 xorl $0, (%esp)
819 ret
820 SET_SIZE(membar_exit)
821
822 ENTRY(membar_producer)
823 lock
824 xorl $0, (%esp)
825 ret
826 SET_SIZE(membar_producer)
827
828 ENTRY(membar_consumer)
829 lock
830 xorl $0, (%esp)
831 ret
832 SET_SIZE(membar_consumer)
833
834 #ifdef __ELF__
835 .section .note.GNU-stack,"",%progbits
836 #endif