]> git.proxmox.com Git - mirror_zfs-debian.git/blob - lib/libspl/asm-i386/atomic.S
Add linux libspl support
[mirror_zfs-debian.git] / lib / libspl / asm-i386 / atomic.S
1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright 2007 Sun Microsystems, Inc. All rights reserved.
23 * Use is subject to license terms.
24 */
25
26 .ident "%Z%%M% %I% %E% SMI"
27
28 .file "%M%"
29
30 #define _ASM
31 #include <ia32/sys/asm_linkage.h>
32
33 ENTRY(atomic_inc_8)
34 ALTENTRY(atomic_inc_uchar)
35 movl 4(%esp), %eax
36 lock
37 incb (%eax)
38 ret
39 SET_SIZE(atomic_inc_uchar)
40 SET_SIZE(atomic_inc_8)
41
42 ENTRY(atomic_inc_16)
43 ALTENTRY(atomic_inc_ushort)
44 movl 4(%esp), %eax
45 lock
46 incw (%eax)
47 ret
48 SET_SIZE(atomic_inc_ushort)
49 SET_SIZE(atomic_inc_16)
50
51 ENTRY(atomic_inc_32)
52 ALTENTRY(atomic_inc_uint)
53 ALTENTRY(atomic_inc_ulong)
54 movl 4(%esp), %eax
55 lock
56 incl (%eax)
57 ret
58 SET_SIZE(atomic_inc_ulong)
59 SET_SIZE(atomic_inc_uint)
60 SET_SIZE(atomic_inc_32)
61
62 ENTRY(atomic_inc_8_nv)
63 ALTENTRY(atomic_inc_uchar_nv)
64 movl 4(%esp), %edx
65 movb (%edx), %al
66 1:
67 leal 1(%eax), %ecx
68 lock
69 cmpxchgb %cl, (%edx)
70 jne 1b
71 movzbl %cl, %eax
72 ret
73 SET_SIZE(atomic_inc_uchar_nv)
74 SET_SIZE(atomic_inc_8_nv)
75
76 ENTRY(atomic_inc_16_nv)
77 ALTENTRY(atomic_inc_ushort_nv)
78 movl 4(%esp), %edx
79 movw (%edx), %ax
80 1:
81 leal 1(%eax), %ecx
82 lock
83 cmpxchgw %cx, (%edx)
84 jne 1b
85 movzwl %cx, %eax
86 ret
87 SET_SIZE(atomic_inc_ushort_nv)
88 SET_SIZE(atomic_inc_16_nv)
89
90 ENTRY(atomic_inc_32_nv)
91 ALTENTRY(atomic_inc_uint_nv)
92 ALTENTRY(atomic_inc_ulong_nv)
93 movl 4(%esp), %edx
94 movl (%edx), %eax
95 1:
96 leal 1(%eax), %ecx
97 lock
98 cmpxchgl %ecx, (%edx)
99 jne 1b
100 movl %ecx, %eax
101 ret
102 SET_SIZE(atomic_inc_ulong_nv)
103 SET_SIZE(atomic_inc_uint_nv)
104 SET_SIZE(atomic_inc_32_nv)
105
106 /*
107 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
108 * separated, you need to also edit the libc i386 platform
109 * specific mapfile and remove the NODYNSORT attribute
110 * from atomic_inc_64_nv.
111 */
112 ENTRY(atomic_inc_64)
113 ALTENTRY(atomic_inc_64_nv)
114 pushl %edi
115 pushl %ebx
116 movl 12(%esp), %edi
117 movl (%edi), %eax
118 movl 4(%edi), %edx
119 1:
120 xorl %ebx, %ebx
121 xorl %ecx, %ecx
122 incl %ebx
123 addl %eax, %ebx
124 adcl %edx, %ecx
125 lock
126 cmpxchg8b (%edi)
127 jne 1b
128 movl %ebx, %eax
129 movl %ecx, %edx
130 popl %ebx
131 popl %edi
132 ret
133 SET_SIZE(atomic_inc_64_nv)
134 SET_SIZE(atomic_inc_64)
135
136 ENTRY(atomic_dec_8)
137 ALTENTRY(atomic_dec_uchar)
138 movl 4(%esp), %eax
139 lock
140 decb (%eax)
141 ret
142 SET_SIZE(atomic_dec_uchar)
143 SET_SIZE(atomic_dec_8)
144
145 ENTRY(atomic_dec_16)
146 ALTENTRY(atomic_dec_ushort)
147 movl 4(%esp), %eax
148 lock
149 decw (%eax)
150 ret
151 SET_SIZE(atomic_dec_ushort)
152 SET_SIZE(atomic_dec_16)
153
154 ENTRY(atomic_dec_32)
155 ALTENTRY(atomic_dec_uint)
156 ALTENTRY(atomic_dec_ulong)
157 movl 4(%esp), %eax
158 lock
159 decl (%eax)
160 ret
161 SET_SIZE(atomic_dec_ulong)
162 SET_SIZE(atomic_dec_uint)
163 SET_SIZE(atomic_dec_32)
164
165 ENTRY(atomic_dec_8_nv)
166 ALTENTRY(atomic_dec_uchar_nv)
167 movl 4(%esp), %edx
168 movb (%edx), %al
169 1:
170 leal -1(%eax), %ecx
171 lock
172 cmpxchgb %cl, (%edx)
173 jne 1b
174 movzbl %cl, %eax
175 ret
176 SET_SIZE(atomic_dec_uchar_nv)
177 SET_SIZE(atomic_dec_8_nv)
178
179 ENTRY(atomic_dec_16_nv)
180 ALTENTRY(atomic_dec_ushort_nv)
181 movl 4(%esp), %edx
182 movw (%edx), %ax
183 1:
184 leal -1(%eax), %ecx
185 lock
186 cmpxchgw %cx, (%edx)
187 jne 1b
188 movzwl %cx, %eax
189 ret
190 SET_SIZE(atomic_dec_ushort_nv)
191 SET_SIZE(atomic_dec_16_nv)
192
193 ENTRY(atomic_dec_32_nv)
194 ALTENTRY(atomic_dec_uint_nv)
195 ALTENTRY(atomic_dec_ulong_nv)
196 movl 4(%esp), %edx
197 movl (%edx), %eax
198 1:
199 leal -1(%eax), %ecx
200 lock
201 cmpxchgl %ecx, (%edx)
202 jne 1b
203 movl %ecx, %eax
204 ret
205 SET_SIZE(atomic_dec_ulong_nv)
206 SET_SIZE(atomic_dec_uint_nv)
207 SET_SIZE(atomic_dec_32_nv)
208
209 /*
210 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
211 * separated, it is important to edit the libc i386 platform
212 * specific mapfile and remove the NODYNSORT attribute
213 * from atomic_dec_64_nv.
214 */
215 ENTRY(atomic_dec_64)
216 ALTENTRY(atomic_dec_64_nv)
217 pushl %edi
218 pushl %ebx
219 movl 12(%esp), %edi
220 movl (%edi), %eax
221 movl 4(%edi), %edx
222 1:
223 xorl %ebx, %ebx
224 xorl %ecx, %ecx
225 not %ecx
226 not %ebx
227 addl %eax, %ebx
228 adcl %edx, %ecx
229 lock
230 cmpxchg8b (%edi)
231 jne 1b
232 movl %ebx, %eax
233 movl %ecx, %edx
234 popl %ebx
235 popl %edi
236 ret
237 SET_SIZE(atomic_dec_64_nv)
238 SET_SIZE(atomic_dec_64)
239
240 ENTRY(atomic_add_8)
241 ALTENTRY(atomic_add_char)
242 movl 4(%esp), %eax
243 movl 8(%esp), %ecx
244 lock
245 addb %cl, (%eax)
246 ret
247 SET_SIZE(atomic_add_char)
248 SET_SIZE(atomic_add_8)
249
250 ENTRY(atomic_add_16)
251 ALTENTRY(atomic_add_short)
252 movl 4(%esp), %eax
253 movl 8(%esp), %ecx
254 lock
255 addw %cx, (%eax)
256 ret
257 SET_SIZE(atomic_add_short)
258 SET_SIZE(atomic_add_16)
259
260 ENTRY(atomic_add_32)
261 ALTENTRY(atomic_add_int)
262 ALTENTRY(atomic_add_ptr)
263 ALTENTRY(atomic_add_long)
264 movl 4(%esp), %eax
265 movl 8(%esp), %ecx
266 lock
267 addl %ecx, (%eax)
268 ret
269 SET_SIZE(atomic_add_long)
270 SET_SIZE(atomic_add_ptr)
271 SET_SIZE(atomic_add_int)
272 SET_SIZE(atomic_add_32)
273
274 ENTRY(atomic_or_8)
275 ALTENTRY(atomic_or_uchar)
276 movl 4(%esp), %eax
277 movb 8(%esp), %cl
278 lock
279 orb %cl, (%eax)
280 ret
281 SET_SIZE(atomic_or_uchar)
282 SET_SIZE(atomic_or_8)
283
284 ENTRY(atomic_or_16)
285 ALTENTRY(atomic_or_ushort)
286 movl 4(%esp), %eax
287 movw 8(%esp), %cx
288 lock
289 orw %cx, (%eax)
290 ret
291 SET_SIZE(atomic_or_ushort)
292 SET_SIZE(atomic_or_16)
293
294 ENTRY(atomic_or_32)
295 ALTENTRY(atomic_or_uint)
296 ALTENTRY(atomic_or_ulong)
297 movl 4(%esp), %eax
298 movl 8(%esp), %ecx
299 lock
300 orl %ecx, (%eax)
301 ret
302 SET_SIZE(atomic_or_ulong)
303 SET_SIZE(atomic_or_uint)
304 SET_SIZE(atomic_or_32)
305
306 ENTRY(atomic_and_8)
307 ALTENTRY(atomic_and_uchar)
308 movl 4(%esp), %eax
309 movb 8(%esp), %cl
310 lock
311 andb %cl, (%eax)
312 ret
313 SET_SIZE(atomic_and_uchar)
314 SET_SIZE(atomic_and_8)
315
316 ENTRY(atomic_and_16)
317 ALTENTRY(atomic_and_ushort)
318 movl 4(%esp), %eax
319 movw 8(%esp), %cx
320 lock
321 andw %cx, (%eax)
322 ret
323 SET_SIZE(atomic_and_ushort)
324 SET_SIZE(atomic_and_16)
325
326 ENTRY(atomic_and_32)
327 ALTENTRY(atomic_and_uint)
328 ALTENTRY(atomic_and_ulong)
329 movl 4(%esp), %eax
330 movl 8(%esp), %ecx
331 lock
332 andl %ecx, (%eax)
333 ret
334 SET_SIZE(atomic_and_ulong)
335 SET_SIZE(atomic_and_uint)
336 SET_SIZE(atomic_and_32)
337
338 ENTRY(atomic_add_8_nv)
339 ALTENTRY(atomic_add_char_nv)
340 movl 4(%esp), %edx
341 movb (%edx), %al
342 1:
343 movl 8(%esp), %ecx
344 addb %al, %cl
345 lock
346 cmpxchgb %cl, (%edx)
347 jne 1b
348 movzbl %cl, %eax
349 ret
350 SET_SIZE(atomic_add_char_nv)
351 SET_SIZE(atomic_add_8_nv)
352
353 ENTRY(atomic_add_16_nv)
354 ALTENTRY(atomic_add_short_nv)
355 movl 4(%esp), %edx
356 movw (%edx), %ax
357 1:
358 movl 8(%esp), %ecx
359 addw %ax, %cx
360 lock
361 cmpxchgw %cx, (%edx)
362 jne 1b
363 movzwl %cx, %eax
364 ret
365 SET_SIZE(atomic_add_short_nv)
366 SET_SIZE(atomic_add_16_nv)
367
368 ENTRY(atomic_add_32_nv)
369 ALTENTRY(atomic_add_int_nv)
370 ALTENTRY(atomic_add_ptr_nv)
371 ALTENTRY(atomic_add_long_nv)
372 movl 4(%esp), %edx
373 movl (%edx), %eax
374 1:
375 movl 8(%esp), %ecx
376 addl %eax, %ecx
377 lock
378 cmpxchgl %ecx, (%edx)
379 jne 1b
380 movl %ecx, %eax
381 ret
382 SET_SIZE(atomic_add_long_nv)
383 SET_SIZE(atomic_add_ptr_nv)
384 SET_SIZE(atomic_add_int_nv)
385 SET_SIZE(atomic_add_32_nv)
386
387 /*
388 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
389 * separated, it is important to edit the libc i386 platform
390 * specific mapfile and remove the NODYNSORT attribute
391 * from atomic_add_64_nv.
392 */
393 ENTRY(atomic_add_64)
394 ALTENTRY(atomic_add_64_nv)
395 pushl %edi
396 pushl %ebx
397 movl 12(%esp), %edi
398 movl (%edi), %eax
399 movl 4(%edi), %edx
400 1:
401 movl 16(%esp), %ebx
402 movl 20(%esp), %ecx
403 addl %eax, %ebx
404 adcl %edx, %ecx
405 lock
406 cmpxchg8b (%edi)
407 jne 1b
408 movl %ebx, %eax
409 movl %ecx, %edx
410 popl %ebx
411 popl %edi
412 ret
413 SET_SIZE(atomic_add_64_nv)
414 SET_SIZE(atomic_add_64)
415
416 ENTRY(atomic_or_8_nv)
417 ALTENTRY(atomic_or_uchar_nv)
418 movl 4(%esp), %edx
419 movb (%edx), %al
420 1:
421 movl 8(%esp), %ecx
422 orb %al, %cl
423 lock
424 cmpxchgb %cl, (%edx)
425 jne 1b
426 movzbl %cl, %eax
427 ret
428 SET_SIZE(atomic_or_uchar_nv)
429 SET_SIZE(atomic_or_8_nv)
430
431 ENTRY(atomic_or_16_nv)
432 ALTENTRY(atomic_or_ushort_nv)
433 movl 4(%esp), %edx
434 movw (%edx), %ax
435 1:
436 movl 8(%esp), %ecx
437 orw %ax, %cx
438 lock
439 cmpxchgw %cx, (%edx)
440 jne 1b
441 movzwl %cx, %eax
442 ret
443 SET_SIZE(atomic_or_ushort_nv)
444 SET_SIZE(atomic_or_16_nv)
445
446 ENTRY(atomic_or_32_nv)
447 ALTENTRY(atomic_or_uint_nv)
448 ALTENTRY(atomic_or_ulong_nv)
449 movl 4(%esp), %edx
450 movl (%edx), %eax
451 1:
452 movl 8(%esp), %ecx
453 orl %eax, %ecx
454 lock
455 cmpxchgl %ecx, (%edx)
456 jne 1b
457 movl %ecx, %eax
458 ret
459 SET_SIZE(atomic_or_ulong_nv)
460 SET_SIZE(atomic_or_uint_nv)
461 SET_SIZE(atomic_or_32_nv)
462
463 /*
464 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
465 * separated, it is important to edit the libc i386 platform
466 * specific mapfile and remove the NODYNSORT attribute
467 * from atomic_or_64_nv.
468 */
469 ENTRY(atomic_or_64)
470 ALTENTRY(atomic_or_64_nv)
471 pushl %edi
472 pushl %ebx
473 movl 12(%esp), %edi
474 movl (%edi), %eax
475 movl 4(%edi), %edx
476 1:
477 movl 16(%esp), %ebx
478 movl 20(%esp), %ecx
479 orl %eax, %ebx
480 orl %edx, %ecx
481 lock
482 cmpxchg8b (%edi)
483 jne 1b
484 movl %ebx, %eax
485 movl %ecx, %edx
486 popl %ebx
487 popl %edi
488 ret
489 SET_SIZE(atomic_or_64_nv)
490 SET_SIZE(atomic_or_64)
491
492 ENTRY(atomic_and_8_nv)
493 ALTENTRY(atomic_and_uchar_nv)
494 movl 4(%esp), %edx
495 movb (%edx), %al
496 1:
497 movl 8(%esp), %ecx
498 andb %al, %cl
499 lock
500 cmpxchgb %cl, (%edx)
501 jne 1b
502 movzbl %cl, %eax
503 ret
504 SET_SIZE(atomic_and_uchar_nv)
505 SET_SIZE(atomic_and_8_nv)
506
507 ENTRY(atomic_and_16_nv)
508 ALTENTRY(atomic_and_ushort_nv)
509 movl 4(%esp), %edx
510 movw (%edx), %ax
511 1:
512 movl 8(%esp), %ecx
513 andw %ax, %cx
514 lock
515 cmpxchgw %cx, (%edx)
516 jne 1b
517 movzwl %cx, %eax
518 ret
519 SET_SIZE(atomic_and_ushort_nv)
520 SET_SIZE(atomic_and_16_nv)
521
522 ENTRY(atomic_and_32_nv)
523 ALTENTRY(atomic_and_uint_nv)
524 ALTENTRY(atomic_and_ulong_nv)
525 movl 4(%esp), %edx
526 movl (%edx), %eax
527 1:
528 movl 8(%esp), %ecx
529 andl %eax, %ecx
530 lock
531 cmpxchgl %ecx, (%edx)
532 jne 1b
533 movl %ecx, %eax
534 ret
535 SET_SIZE(atomic_and_ulong_nv)
536 SET_SIZE(atomic_and_uint_nv)
537 SET_SIZE(atomic_and_32_nv)
538
539 /*
540 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
541 * separated, it is important to edit the libc i386 platform
542 * specific mapfile and remove the NODYNSORT attribute
543 * from atomic_and_64_nv.
544 */
545 ENTRY(atomic_and_64)
546 ALTENTRY(atomic_and_64_nv)
547 pushl %edi
548 pushl %ebx
549 movl 12(%esp), %edi
550 movl (%edi), %eax
551 movl 4(%edi), %edx
552 1:
553 movl 16(%esp), %ebx
554 movl 20(%esp), %ecx
555 andl %eax, %ebx
556 andl %edx, %ecx
557 lock
558 cmpxchg8b (%edi)
559 jne 1b
560 movl %ebx, %eax
561 movl %ecx, %edx
562 popl %ebx
563 popl %edi
564 ret
565 SET_SIZE(atomic_and_64_nv)
566 SET_SIZE(atomic_and_64)
567
568 ENTRY(atomic_cas_8)
569 ALTENTRY(atomic_cas_uchar)
570 movl 4(%esp), %edx
571 movzbl 8(%esp), %eax
572 movb 12(%esp), %cl
573 lock
574 cmpxchgb %cl, (%edx)
575 ret
576 SET_SIZE(atomic_cas_uchar)
577 SET_SIZE(atomic_cas_8)
578
579 ENTRY(atomic_cas_16)
580 ALTENTRY(atomic_cas_ushort)
581 movl 4(%esp), %edx
582 movzwl 8(%esp), %eax
583 movw 12(%esp), %cx
584 lock
585 cmpxchgw %cx, (%edx)
586 ret
587 SET_SIZE(atomic_cas_ushort)
588 SET_SIZE(atomic_cas_16)
589
590 ENTRY(atomic_cas_32)
591 ALTENTRY(atomic_cas_uint)
592 ALTENTRY(atomic_cas_ulong)
593 ALTENTRY(atomic_cas_ptr)
594 movl 4(%esp), %edx
595 movl 8(%esp), %eax
596 movl 12(%esp), %ecx
597 lock
598 cmpxchgl %ecx, (%edx)
599 ret
600 SET_SIZE(atomic_cas_ptr)
601 SET_SIZE(atomic_cas_ulong)
602 SET_SIZE(atomic_cas_uint)
603 SET_SIZE(atomic_cas_32)
604
605 ENTRY(atomic_cas_64)
606 pushl %ebx
607 pushl %esi
608 movl 12(%esp), %esi
609 movl 16(%esp), %eax
610 movl 20(%esp), %edx
611 movl 24(%esp), %ebx
612 movl 28(%esp), %ecx
613 lock
614 cmpxchg8b (%esi)
615 popl %esi
616 popl %ebx
617 ret
618 SET_SIZE(atomic_cas_64)
619
620 ENTRY(atomic_swap_8)
621 ALTENTRY(atomic_swap_uchar)
622 movl 4(%esp), %edx
623 movzbl 8(%esp), %eax
624 lock
625 xchgb %al, (%edx)
626 ret
627 SET_SIZE(atomic_swap_uchar)
628 SET_SIZE(atomic_swap_8)
629
630 ENTRY(atomic_swap_16)
631 ALTENTRY(atomic_swap_ushort)
632 movl 4(%esp), %edx
633 movzwl 8(%esp), %eax
634 lock
635 xchgw %ax, (%edx)
636 ret
637 SET_SIZE(atomic_swap_ushort)
638 SET_SIZE(atomic_swap_16)
639
640 ENTRY(atomic_swap_32)
641 ALTENTRY(atomic_swap_uint)
642 ALTENTRY(atomic_swap_ptr)
643 ALTENTRY(atomic_swap_ulong)
644 movl 4(%esp), %edx
645 movl 8(%esp), %eax
646 lock
647 xchgl %eax, (%edx)
648 ret
649 SET_SIZE(atomic_swap_ulong)
650 SET_SIZE(atomic_swap_ptr)
651 SET_SIZE(atomic_swap_uint)
652 SET_SIZE(atomic_swap_32)
653
654 ENTRY(atomic_swap_64)
655 pushl %esi
656 pushl %ebx
657 movl 12(%esp), %esi
658 movl 16(%esp), %ebx
659 movl 20(%esp), %ecx
660 movl (%esi), %eax
661 movl 4(%esi), %edx
662 1:
663 lock
664 cmpxchg8b (%esi)
665 jne 1b
666 popl %ebx
667 popl %esi
668 ret
669 SET_SIZE(atomic_swap_64)
670
671 ENTRY(atomic_set_long_excl)
672 movl 4(%esp), %edx
673 movl 8(%esp), %ecx
674 xorl %eax, %eax
675 lock
676 btsl %ecx, (%edx)
677 jnc 1f
678 decl %eax
679 1:
680 ret
681 SET_SIZE(atomic_set_long_excl)
682
683 ENTRY(atomic_clear_long_excl)
684 movl 4(%esp), %edx
685 movl 8(%esp), %ecx
686 xorl %eax, %eax
687 lock
688 btrl %ecx, (%edx)
689 jc 1f
690 decl %eax
691 1:
692 ret
693 SET_SIZE(atomic_clear_long_excl)
694
695 /*
696 * NOTE: membar_enter, membar_exit, membar_producer, and
697 * membar_consumer are all identical routines. We define them
698 * separately, instead of using ALTENTRY definitions to alias them
699 * together, so that DTrace and debuggers will see a unique address
700 * for them, allowing more accurate tracing.
701 */
702
703
704 ENTRY(membar_enter)
705 lock
706 xorl $0, (%esp)
707 ret
708 SET_SIZE(membar_enter)
709
710 ENTRY(membar_exit)
711 lock
712 xorl $0, (%esp)
713 ret
714 SET_SIZE(membar_exit)
715
716 ENTRY(membar_producer)
717 lock
718 xorl $0, (%esp)
719 ret
720 SET_SIZE(membar_producer)
721
722 ENTRY(membar_consumer)
723 lock
724 xorl $0, (%esp)
725 ret
726 SET_SIZE(membar_consumer)
727
728 #ifdef __ELF__
729 .section .note.GNU-stack,"",%progbits
730 #endif