]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/CpuDxe/X64/CpuAsm.S
A complement fix for revision 11664 to update GCC assembly files : clear the directio...
[mirror_edk2.git] / UefiCpuPkg / CpuDxe / X64 / CpuAsm.S
CommitLineData
c2fd60f0 1# TITLE CpuAsm.S: \r
2\r
3#------------------------------------------------------------------------------\r
4#*\r
24f7e42c 5#* Copyright (c) 2008 - 2011, Intel Corporation. All rights reserved.<BR>\r
01a1c0fc 6#* This program and the accompanying materials\r
c2fd60f0 7#* are licensed and made available under the terms and conditions of the BSD License\r
8#* which accompanies this distribution. The full text of the license may be found at\r
9#* http://opensource.org/licenses/bsd-license.php\r
10#*\r
11#* THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
12#* WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
13#*\r
14#* CpuAsm.S\r
15#*\r
16#* Abstract:\r
17#*\r
18#------------------------------------------------------------------------------\r
19\r
20\r
21#text SEGMENT\r
22\r
23\r
24#EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions\r
25\r
26\r
27#\r
28# point to the external interrupt vector table\r
29#\r
30ExternalVectorTablePtr:\r
31 .byte 0, 0, 0, 0, 0, 0, 0, 0\r
32\r
33ASM_GLOBAL ASM_PFX(InitializeExternalVectorTablePtr)\r
34ASM_PFX(InitializeExternalVectorTablePtr):\r
35 lea ExternalVectorTablePtr(%rip), %rax # save vector number\r
36 mov %rcx, (%rax) \r
37 ret\r
38\r
39\r
40#------------------------------------------------------------------------------\r
41# VOID\r
42# SetCodeSelector (\r
43# UINT16 Selector\r
44# );\r
45#------------------------------------------------------------------------------\r
46ASM_GLOBAL ASM_PFX(SetCodeSelector)\r
47ASM_PFX(SetCodeSelector):\r
48 subq $0x10, %rsp \r
49 leaq setCodeSelectorLongJump(%rip), %rax \r
50 movq %rax, (%rsp) \r
51 movw %cx, 4(%rsp)\r
52 .byte 0xFF, 0x2C, 0x24 # jmp (%rsp) note:fword jmp\r
53setCodeSelectorLongJump:\r
54 addq $0x10, %rsp\r
55 ret\r
56\r
57#------------------------------------------------------------------------------\r
58# VOID\r
59# SetDataSelectors (\r
60# UINT16 Selector\r
61# );\r
62#------------------------------------------------------------------------------\r
63ASM_GLOBAL ASM_PFX(SetDataSelectors)\r
64ASM_PFX(SetDataSelectors):\r
65 movw %cx, %ss\r
66 movw %cx, %ds\r
67 movw %cx, %es\r
68 movw %cx, %fs\r
69 movw %cx, %gs\r
70 ret\r
71\r
72#---------------------------------------;\r
73# CommonInterruptEntry ;\r
74#---------------------------------------;\r
75# The follow algorithm is used for the common interrupt routine.\r
76\r
77ASM_GLOBAL ASM_PFX(CommonInterruptEntry)\r
78ASM_PFX(CommonInterruptEntry):\r
79 cli\r
80 #\r
81 # All interrupt handlers are invoked through interrupt gates, so\r
82 # IF flag automatically cleared at the entry point\r
83 #\r
84 #\r
85 # Calculate vector number\r
86 #\r
87 xchgq (%rsp), %rcx # get the return address of call, actually, it is the address of vector number.\r
88 movzwl (%rcx), %ecx \r
89 cmp $32, %ecx # Intel reserved vector for exceptions?\r
90 jae NoErrorCode\r
91 pushq %rax\r
92 leaq ASM_PFX(mErrorCodeFlag)(%rip), %rax\r
93 bt %ecx, (%rax) \r
94 popq %rax\r
95 jc CommonInterruptEntry_al_0000\r
96\r
97NoErrorCode:\r
98\r
99 #\r
100 # Push a dummy error code on the stack\r
101 # to maintain coherent stack map\r
102 #\r
103 pushq (%rsp)\r
104 movq $0, 8(%rsp)\r
105CommonInterruptEntry_al_0000:\r
106 pushq %rbp\r
107 movq %rsp, %rbp\r
108\r
109 #\r
110 # Stack:\r
111 # +---------------------+ <-- 16-byte aligned ensured by processor\r
112 # + Old SS +\r
113 # +---------------------+\r
114 # + Old RSP +\r
115 # +---------------------+\r
116 # + RFlags +\r
117 # +---------------------+\r
118 # + CS +\r
119 # +---------------------+\r
120 # + RIP +\r
121 # +---------------------+\r
122 # + Error Code +\r
123 # +---------------------+\r
124 # + RCX / Vector Number +\r
125 # +---------------------+\r
126 # + RBP +\r
127 # +---------------------+ <-- RBP, 16-byte aligned\r
128 #\r
129\r
130\r
131 #\r
132 # Since here the stack pointer is 16-byte aligned, so\r
133 # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64\r
134 # is 16-byte aligned\r
135 #\r
136\r
137#; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
138#; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
139 pushq %r15\r
140 pushq %r14\r
141 pushq %r13\r
142 pushq %r12\r
143 pushq %r11\r
144 pushq %r10\r
145 pushq %r9\r
146 pushq %r8\r
147 pushq %rax\r
148 pushq 8(%rbp) # RCX\r
149 pushq %rdx\r
150 pushq %rbx\r
151 pushq 48(%rbp) # RSP\r
152 pushq (%rbp) # RBP\r
153 pushq %rsi\r
154 pushq %rdi\r
155\r
156#; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero\r
157 movzwq 56(%rbp), %rax\r
158 pushq %rax # for ss\r
159 movzwq 32(%rbp), %rax\r
160 pushq %rax # for cs\r
161 movq %ds, %rax\r
162 pushq %rax\r
163 movq %es, %rax\r
164 pushq %rax\r
165 movq %fs, %rax\r
166 pushq %rax\r
167 movq %gs, %rax\r
168 pushq %rax\r
169\r
170 movq %rcx, 8(%rbp) # save vector number\r
171\r
172#; UINT64 Rip;\r
173 pushq 24(%rbp)\r
174\r
175#; UINT64 Gdtr[2], Idtr[2];\r
176 xorq %rax, %rax\r
177 pushq %rax\r
178 pushq %rax\r
179 sidt (%rsp)\r
180 xchgq 2(%rsp), %rax\r
181 xchgq (%rsp), %rax\r
182 xchgq 8(%rsp), %rax\r
183\r
184 xorq %rax, %rax\r
185 pushq %rax\r
186 pushq %rax\r
187 sgdt (%rsp)\r
188 xchgq 2(%rsp), %rax\r
189 xchgq (%rsp), %rax\r
190 xchgq 8(%rsp), %rax\r
191\r
192#; UINT64 Ldtr, Tr;\r
193 xorq %rax, %rax\r
194 str %ax\r
195 pushq %rax\r
196 sldt %ax\r
197 pushq %rax\r
198\r
199#; UINT64 RFlags;\r
200 pushq 40(%rbp)\r
201\r
202#; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
203 movq %cr8, %rax\r
204 pushq %rax\r
205 movq %cr4, %rax\r
206 orq $0x208, %rax \r
207 movq %rax, %cr4 \r
208 pushq %rax\r
209 mov %cr3, %rax \r
210 pushq %rax\r
211 mov %cr2, %rax \r
212 pushq %rax\r
213 xorq %rax, %rax\r
214 pushq %rax\r
215 mov %cr0, %rax \r
216 pushq %rax\r
217\r
218#; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
219 movq %dr7, %rax\r
220 pushq %rax\r
c2fd60f0 221 movq %dr6, %rax\r
222 pushq %rax\r
c2fd60f0 223 movq %dr3, %rax\r
224 pushq %rax\r
225 movq %dr2, %rax\r
226 pushq %rax\r
227 movq %dr1, %rax\r
228 pushq %rax\r
229 movq %dr0, %rax\r
230 pushq %rax\r
231\r
232#; FX_SAVE_STATE_X64 FxSaveState;\r
233 subq $512, %rsp\r
234 movq %rsp, %rdi\r
235 .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]\r
236\r
24f7e42c 237#; UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear\r
238 cld\r
239\r
c2fd60f0 240#; UINT32 ExceptionData;\r
241 pushq 16(%rbp)\r
242\r
243#; call into exception handler\r
244 movq 8(%rbp), %rcx\r
245 leaq ExternalVectorTablePtr(%rip), %rax\r
246 movl (%eax), %eax\r
247 movq (%rax,%rcx,8), %rax\r
248 orq %rax, %rax # NULL?\r
249\r
250 je nonNullValue#\r
251\r
252#; Prepare parameter and call\r
253# mov rcx, [rbp + 8]\r
254 mov %rsp, %rdx\r
255 #\r
256 # Per X64 calling convention, allocate maximum parameter stack space\r
257 # and make sure RSP is 16-byte aligned\r
258 #\r
259 subq $40, %rsp \r
260 call *%rax\r
261 addq $40, %rsp\r
262\r
263nonNullValue:\r
264 cli\r
265#; UINT64 ExceptionData;\r
266 addq $8, %rsp\r
267\r
268#; FX_SAVE_STATE_X64 FxSaveState;\r
269\r
270 movq %rsp, %rsi\r
271 .byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]\r
272 addq $512, %rsp\r
273\r
274#; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
61ece967 275#; Skip restoration of DRx registers to support in-circuit emualators\r
276#; or debuggers set breakpoint in interrupt/exception context\r
277 addq $48, %rsp\r
c2fd60f0 278\r
279#; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
280 popq %rax\r
281 movq %rax, %cr0\r
282 addq $8, %rsp # not for Cr1\r
283 popq %rax\r
284 movq %rax, %cr2\r
285 popq %rax\r
286 movq %rax, %cr3\r
287 popq %rax\r
288 movq %rax, %cr4\r
289 popq %rax\r
290 movq %rax, %cr8\r
291\r
292#; UINT64 RFlags;\r
293 popq 40(%rbp)\r
294\r
295#; UINT64 Ldtr, Tr;\r
296#; UINT64 Gdtr[2], Idtr[2];\r
297#; Best not let anyone mess with these particular registers...\r
298 addq $48, %rsp\r
299\r
300#; UINT64 Rip;\r
301 popq 24(%rbp)\r
302\r
303#; UINT64 Gs, Fs, Es, Ds, Cs, Ss;\r
304 popq %rax\r
305 # mov %rax, %gs ; not for gs\r
306 popq %rax\r
307 # mov %rax, %fs ; not for fs\r
308 # (X64 will not use fs and gs, so we do not restore it)\r
309 popq %rax\r
310 movq %rax, %es\r
311 popq %rax\r
312 movq %rax, %ds\r
313 popq 32(%rbp) # for cs\r
314 popq 56(%rbp) # for ss\r
315\r
316#; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
317#; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
318 popq %rdi\r
319 popq %rsi\r
320 addq $8, %rsp # not for rbp\r
321 popq 48(%rbp) # for rsp\r
322 popq %rbx\r
323 popq %rdx\r
324 popq %rcx\r
325 popq %rax\r
326 popq %r8\r
327 popq %r9\r
328 popq %r10\r
329 popq %r11\r
330 popq %r12\r
331 popq %r13\r
332 popq %r14\r
333 popq %r15\r
334\r
335 movq %rbp, %rsp\r
336 popq %rbp\r
337 addq $16, %rsp\r
338 iretq\r
339\r
340\r
341#text ENDS\r
342\r
343#END\r
344\r
345\r