]>
Commit | Line | Data |
---|---|---|
a7386694 JB |
1 | /* |
2 | * x86 semaphore implementation. | |
3 | * | |
4 | * (C) Copyright 1999 Linus Torvalds | |
5 | * | |
6 | * Portions Copyright 1999 Red Hat, Inc. | |
7 | * | |
8 | * This program is free software; you can redistribute it and/or | |
9 | * modify it under the terms of the GNU General Public License | |
10 | * as published by the Free Software Foundation; either version | |
11 | * 2 of the License, or (at your option) any later version. | |
12 | * | |
13 | * rw semaphores implemented November 1999 by Benjamin LaHaise <bcrl@kvack.org> | |
14 | */ | |
15 | ||
16 | #include <linux/linkage.h> | |
17 | #include <asm/alternative-asm.h> | |
3387a535 | 18 | #include <asm/frame.h> |
a7386694 JB |
19 | |
20 | #define __ASM_HALF_REG(reg) __ASM_SEL(reg, e##reg) | |
21 | #define __ASM_HALF_SIZE(inst) __ASM_SEL(inst##w, inst##l) | |
22 | ||
23 | #ifdef CONFIG_X86_32 | |
24 | ||
25 | /* | |
26 | * The semaphore operations have a special calling sequence that | |
27 | * allow us to do a simpler in-line version of them. These routines | |
28 | * need to convert that sequence back into the C sequence when | |
29 | * there is contention on the semaphore. | |
30 | * | |
31 | * %eax contains the semaphore pointer on entry. Save the C-clobbered | |
4544ba8c BP |
32 | * registers (%eax, %edx and %ecx) except %eax which is either a return |
33 | * value or just gets clobbered. Same is true for %edx so make sure GCC | |
34 | * reloads it after the slow path, by making it hold a temporary, for | |
35 | * example see ____down_write(). | |
a7386694 JB |
36 | */ |
37 | ||
38 | #define save_common_regs \ | |
131484c8 | 39 | pushl %ecx |
a7386694 JB |
40 | |
41 | #define restore_common_regs \ | |
131484c8 | 42 | popl %ecx |
a7386694 JB |
43 | |
44 | /* Avoid uglifying the argument copying x86-64 needs to do. */ | |
45 | .macro movq src, dst | |
46 | .endm | |
47 | ||
48 | #else | |
49 | ||
bafaecd1 LT |
50 | /* |
51 | * x86-64 rwsem wrappers | |
52 | * | |
53 | * This interfaces the inline asm code to the slow-path | |
54 | * C routines. We need to save the call-clobbered regs | |
55 | * that the asm does not mark as clobbered, and move the | |
56 | * argument from %rax to %rdi. | |
57 | * | |
58 | * NOTE! We don't need to save %rax, because the functions | |
59 | * will always return the semaphore pointer in %rax (which | |
60 | * is also the input argument to these helpers) | |
61 | * | |
62 | * The following can clobber %rdx because the asm clobbers it: | |
63 | * call_rwsem_down_write_failed | |
64 | * call_rwsem_wake | |
65 | * but %rdi, %rsi, %rcx, %r8-r11 always need saving. | |
66 | */ | |
67 | ||
bafaecd1 | 68 | #define save_common_regs \ |
131484c8 IM |
69 | pushq %rdi; \ |
70 | pushq %rsi; \ | |
71 | pushq %rcx; \ | |
72 | pushq %r8; \ | |
73 | pushq %r9; \ | |
74 | pushq %r10; \ | |
75 | pushq %r11 | |
bafaecd1 LT |
76 | |
77 | #define restore_common_regs \ | |
131484c8 IM |
78 | popq %r11; \ |
79 | popq %r10; \ | |
80 | popq %r9; \ | |
81 | popq %r8; \ | |
82 | popq %rcx; \ | |
83 | popq %rsi; \ | |
84 | popq %rdi | |
bafaecd1 | 85 | |
a7386694 JB |
86 | #endif |
87 | ||
bafaecd1 LT |
88 | /* Fix up special calling conventions */ |
89 | ENTRY(call_rwsem_down_read_failed) | |
3387a535 | 90 | FRAME_BEGIN |
bafaecd1 | 91 | save_common_regs |
131484c8 | 92 | __ASM_SIZE(push,) %__ASM_REG(dx) |
bafaecd1 LT |
93 | movq %rax,%rdi |
94 | call rwsem_down_read_failed | |
131484c8 | 95 | __ASM_SIZE(pop,) %__ASM_REG(dx) |
bafaecd1 | 96 | restore_common_regs |
3387a535 | 97 | FRAME_END |
bafaecd1 | 98 | ret |
39f2205e | 99 | ENDPROC(call_rwsem_down_read_failed) |
bafaecd1 LT |
100 | |
101 | ENTRY(call_rwsem_down_write_failed) | |
3387a535 | 102 | FRAME_BEGIN |
bafaecd1 LT |
103 | save_common_regs |
104 | movq %rax,%rdi | |
105 | call rwsem_down_write_failed | |
106 | restore_common_regs | |
3387a535 | 107 | FRAME_END |
bafaecd1 | 108 | ret |
39f2205e | 109 | ENDPROC(call_rwsem_down_write_failed) |
bafaecd1 | 110 | |
664b4e24 | 111 | ENTRY(call_rwsem_down_write_failed_killable) |
00fb16e2 | 112 | FRAME_BEGIN |
664b4e24 MH |
113 | save_common_regs |
114 | movq %rax,%rdi | |
115 | call rwsem_down_write_failed_killable | |
116 | restore_common_regs | |
00fb16e2 | 117 | FRAME_END |
664b4e24 MH |
118 | ret |
119 | ENDPROC(call_rwsem_down_write_failed_killable) | |
120 | ||
bafaecd1 | 121 | ENTRY(call_rwsem_wake) |
3387a535 | 122 | FRAME_BEGIN |
a7386694 JB |
123 | /* do nothing if still outstanding active readers */ |
124 | __ASM_HALF_SIZE(dec) %__ASM_HALF_REG(dx) | |
bafaecd1 LT |
125 | jnz 1f |
126 | save_common_regs | |
127 | movq %rax,%rdi | |
128 | call rwsem_wake | |
129 | restore_common_regs | |
3387a535 JP |
130 | 1: FRAME_END |
131 | ret | |
39f2205e | 132 | ENDPROC(call_rwsem_wake) |
bafaecd1 | 133 | |
bafaecd1 | 134 | ENTRY(call_rwsem_downgrade_wake) |
3387a535 | 135 | FRAME_BEGIN |
bafaecd1 | 136 | save_common_regs |
131484c8 | 137 | __ASM_SIZE(push,) %__ASM_REG(dx) |
bafaecd1 LT |
138 | movq %rax,%rdi |
139 | call rwsem_downgrade_wake | |
131484c8 | 140 | __ASM_SIZE(pop,) %__ASM_REG(dx) |
bafaecd1 | 141 | restore_common_regs |
3387a535 | 142 | FRAME_END |
bafaecd1 | 143 | ret |
39f2205e | 144 | ENDPROC(call_rwsem_downgrade_wake) |