]>
Commit | Line | Data |
---|---|---|
8f07f895 | 1 | #------------------------------------------------------------------------------ ;\r |
b9d56d0b | 2 | # Copyright (c) 2012 - 2017, Intel Corporation. All rights reserved.<BR>\r |
8f07f895 | 3 | # This program and the accompanying materials\r |
4 | # are licensed and made available under the terms and conditions of the BSD License\r | |
5 | # which accompanies this distribution. The full text of the license may be found at\r | |
6 | # http://opensource.org/licenses/bsd-license.php.\r | |
7 | #\r | |
8 | # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r | |
9 | # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r | |
10 | #\r | |
11 | # Module Name:\r | |
12 | #\r | |
13 | # ExceptionHandlerAsm.S\r | |
14 | #\r | |
15 | # Abstract:\r | |
16 | #\r | |
17 | # x64 CPU Exception Handler\r | |
18 | #\r | |
19 | # Notes:\r | |
20 | #\r | |
21 | #------------------------------------------------------------------------------\r | |
22 | \r | |
23 | \r | |
8f07f895 | 24 | \r |
25 | ASM_GLOBAL ASM_PFX(CommonExceptionHandler)\r | |
8f07f895 | 26 | \r |
07da1ac8 | 27 | #EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions\r |
e41aad15 | 28 | #EXTRN ASM_PFX(mDoFarReturnFlag):QWORD # Do far return flag\r |
8f07f895 | 29 | .text\r |
30 | \r | |
07da1ac8 | 31 | #ifdef __APPLE__\r |
dd563742 JF |
32 | # macros are different between GNU and Xcode as.\r |
33 | .macro IDT_MACRO\r | |
07da1ac8 AF |
34 | push $0\r |
35 | #else\r | |
36 | .macro IDT_MACRO arg\r | |
37 | push \arg\r | |
38 | #endif\r | |
b9d56d0b JF |
39 | .byte 0xe9 # jmp ASM_PFX(CommonInterruptEntry)\r |
40 | .long ASM_PFX(CommonInterruptEntry) - . - 4\r | |
07da1ac8 AF |
41 | .endm\r |
42 | \r | |
43 | AsmIdtVectorBegin:\r | |
44 | IDT_MACRO $0\r | |
45 | IDT_MACRO $1\r | |
46 | IDT_MACRO $2\r | |
47 | IDT_MACRO $3\r | |
48 | IDT_MACRO $4\r | |
49 | IDT_MACRO $5\r | |
50 | IDT_MACRO $6\r | |
51 | IDT_MACRO $7\r | |
52 | IDT_MACRO $8\r | |
53 | IDT_MACRO $9\r | |
54 | IDT_MACRO $10\r | |
55 | IDT_MACRO $11\r | |
56 | IDT_MACRO $12\r | |
57 | IDT_MACRO $13\r | |
58 | IDT_MACRO $14\r | |
59 | IDT_MACRO $15\r | |
60 | IDT_MACRO $16\r | |
61 | IDT_MACRO $17\r | |
62 | IDT_MACRO $18\r | |
63 | IDT_MACRO $19\r | |
64 | IDT_MACRO $20\r | |
65 | IDT_MACRO $21\r | |
66 | IDT_MACRO $22\r | |
67 | IDT_MACRO $23\r | |
68 | IDT_MACRO $24\r | |
69 | IDT_MACRO $25\r | |
70 | IDT_MACRO $26\r | |
71 | IDT_MACRO $27\r | |
72 | IDT_MACRO $28\r | |
73 | IDT_MACRO $29\r | |
74 | IDT_MACRO $30\r | |
75 | IDT_MACRO $31\r | |
76 | AsmIdtVectorEnd:\r | |
77 | \r | |
e41aad15 JF |
78 | HookAfterStubHeaderBegin:\r |
79 | .byte 0x6a # push\r | |
07da1ac8 | 80 | PatchVectorNum:\r |
dd563742 | 81 | .byte 0 # 0 will be fixed\r |
07da1ac8 AF |
82 | .byte 0xe9 # jmp ASM_PFX(HookAfterStubHeaderEnd)\r |
83 | PatchFuncAddress:\r | |
84 | .set HOOK_ADDRESS, ASM_PFX(HookAfterStubHeaderEnd) - . - 4\r | |
85 | .long HOOK_ADDRESS # will be fixed\r | |
e41aad15 JF |
86 | ASM_GLOBAL ASM_PFX(HookAfterStubHeaderEnd)\r |
87 | ASM_PFX(HookAfterStubHeaderEnd):\r | |
07da1ac8 | 88 | pushq %rax\r |
e41aad15 | 89 | movq %rsp, %rax\r |
554dddfc JF |
90 | andl $0x0fffffff0, %esp # make sure 16-byte aligned for exception context\r |
91 | subq $0x18, %rsp # reserve room for filling exception data later\r | |
e41aad15 JF |
92 | pushq %rcx\r |
93 | movq 8(%rax), %rcx\r | |
07da1ac8 | 94 | bt %ecx, ASM_PFX(mErrorCodeFlag)(%rip)\r |
554dddfc | 95 | jnc NoErrorData\r |
e41aad15 JF |
96 | pushq (%rsp) # push additional rcx to make stack alignment\r |
97 | NoErrorData:\r | |
98 | xchgq (%rsp), %rcx # restore rcx, save Exception Number in stack\r | |
07da1ac8 | 99 | movq (%rax), %rax # restore rax\r |
8f07f895 | 100 | \r |
101 | #---------------------------------------;\r | |
102 | # CommonInterruptEntry ;\r | |
103 | #---------------------------------------;\r | |
104 | # The follow algorithm is used for the common interrupt routine.\r | |
105 | \r | |
106 | ASM_GLOBAL ASM_PFX(CommonInterruptEntry)\r | |
107 | ASM_PFX(CommonInterruptEntry):\r | |
108 | cli\r | |
109 | #\r | |
110 | # All interrupt handlers are invoked through interrupt gates, so\r | |
111 | # IF flag automatically cleared at the entry point\r | |
112 | #\r | |
113 | #\r | |
114 | # Calculate vector number\r | |
115 | #\r | |
e41aad15 JF |
116 | xchgq (%rsp), %rcx # get the return address of call, actually, it is the address of vector number.\r |
117 | andq $0x0FF, %rcx\r | |
8f07f895 | 118 | cmp $32, %ecx # Intel reserved vector for exceptions?\r |
119 | jae NoErrorCode\r | |
120 | pushq %rax\r | |
07da1ac8 | 121 | movl ASM_PFX(mErrorCodeFlag)(%rip), %eax\r |
eac8082e | 122 | bt %ecx, %eax\r |
8f07f895 | 123 | popq %rax\r |
124 | jc CommonInterruptEntry_al_0000\r | |
125 | \r | |
126 | NoErrorCode:\r | |
127 | \r | |
128 | #\r | |
129 | # Push a dummy error code on the stack\r | |
130 | # to maintain coherent stack map\r | |
131 | #\r | |
132 | pushq (%rsp)\r | |
133 | movq $0, 8(%rsp)\r | |
134 | CommonInterruptEntry_al_0000:\r | |
135 | pushq %rbp\r | |
136 | movq %rsp, %rbp\r | |
e41aad15 JF |
137 | pushq $0 # check EXCEPTION_HANDLER_CONTEXT.OldIdtHandler\r |
138 | pushq $0 # check EXCEPTION_HANDLER_CONTEXT.ExceptionDataFlag\r | |
8f07f895 | 139 | \r |
140 | #\r | |
141 | # Stack:\r | |
142 | # +---------------------+ <-- 16-byte aligned ensured by processor\r | |
143 | # + Old SS +\r | |
144 | # +---------------------+\r | |
145 | # + Old RSP +\r | |
146 | # +---------------------+\r | |
147 | # + RFlags +\r | |
148 | # +---------------------+\r | |
149 | # + CS +\r | |
150 | # +---------------------+\r | |
151 | # + RIP +\r | |
152 | # +---------------------+\r | |
153 | # + Error Code +\r | |
154 | # +---------------------+\r | |
155 | # + RCX / Vector Number +\r | |
156 | # +---------------------+\r | |
157 | # + RBP +\r | |
158 | # +---------------------+ <-- RBP, 16-byte aligned\r | |
159 | #\r | |
160 | \r | |
161 | \r | |
162 | #\r | |
163 | # Since here the stack pointer is 16-byte aligned, so\r | |
164 | # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64\r | |
165 | # is 16-byte aligned\r | |
166 | #\r | |
167 | \r | |
168 | #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r | |
169 | #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r | |
170 | pushq %r15\r | |
171 | pushq %r14\r | |
172 | pushq %r13\r | |
173 | pushq %r12\r | |
174 | pushq %r11\r | |
175 | pushq %r10\r | |
176 | pushq %r9\r | |
177 | pushq %r8\r | |
178 | pushq %rax\r | |
179 | pushq 8(%rbp) # RCX\r | |
180 | pushq %rdx\r | |
181 | pushq %rbx\r | |
182 | pushq 48(%rbp) # RSP\r | |
183 | pushq (%rbp) # RBP\r | |
184 | pushq %rsi\r | |
185 | pushq %rdi\r | |
186 | \r | |
187 | #; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero\r | |
188 | movzwq 56(%rbp), %rax\r | |
189 | pushq %rax # for ss\r | |
190 | movzwq 32(%rbp), %rax\r | |
191 | pushq %rax # for cs\r | |
e5030c1e | 192 | mov %ds, %rax\r |
8f07f895 | 193 | pushq %rax\r |
e5030c1e | 194 | mov %es, %rax\r |
8f07f895 | 195 | pushq %rax\r |
e5030c1e | 196 | mov %fs, %rax\r |
8f07f895 | 197 | pushq %rax\r |
e5030c1e | 198 | mov %gs, %rax\r |
8f07f895 | 199 | pushq %rax\r |
200 | \r | |
201 | movq %rcx, 8(%rbp) # save vector number\r | |
202 | \r | |
203 | #; UINT64 Rip;\r | |
204 | pushq 24(%rbp)\r | |
205 | \r | |
206 | #; UINT64 Gdtr[2], Idtr[2];\r | |
207 | xorq %rax, %rax\r | |
208 | pushq %rax\r | |
209 | pushq %rax\r | |
210 | sidt (%rsp)\r | |
211 | xchgq 2(%rsp), %rax\r | |
212 | xchgq (%rsp), %rax\r | |
213 | xchgq 8(%rsp), %rax\r | |
214 | \r | |
215 | xorq %rax, %rax\r | |
216 | pushq %rax\r | |
217 | pushq %rax\r | |
218 | sgdt (%rsp)\r | |
219 | xchgq 2(%rsp), %rax\r | |
220 | xchgq (%rsp), %rax\r | |
221 | xchgq 8(%rsp), %rax\r | |
222 | \r | |
223 | #; UINT64 Ldtr, Tr;\r | |
224 | xorq %rax, %rax\r | |
225 | str %ax\r | |
226 | pushq %rax\r | |
227 | sldt %ax\r | |
228 | pushq %rax\r | |
229 | \r | |
230 | #; UINT64 RFlags;\r | |
231 | pushq 40(%rbp)\r | |
232 | \r | |
233 | #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r | |
234 | movq %cr8, %rax\r | |
235 | pushq %rax\r | |
236 | movq %cr4, %rax\r | |
dd563742 JF |
237 | orq $0x208, %rax\r |
238 | movq %rax, %cr4\r | |
8f07f895 | 239 | pushq %rax\r |
dd563742 | 240 | mov %cr3, %rax\r |
8f07f895 | 241 | pushq %rax\r |
dd563742 | 242 | mov %cr2, %rax\r |
8f07f895 | 243 | pushq %rax\r |
244 | xorq %rax, %rax\r | |
245 | pushq %rax\r | |
dd563742 | 246 | mov %cr0, %rax\r |
8f07f895 | 247 | pushq %rax\r |
248 | \r | |
249 | #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r | |
250 | movq %dr7, %rax\r | |
251 | pushq %rax\r | |
252 | movq %dr6, %rax\r | |
253 | pushq %rax\r | |
254 | movq %dr3, %rax\r | |
255 | pushq %rax\r | |
256 | movq %dr2, %rax\r | |
257 | pushq %rax\r | |
258 | movq %dr1, %rax\r | |
259 | pushq %rax\r | |
260 | movq %dr0, %rax\r | |
261 | pushq %rax\r | |
262 | \r | |
263 | #; FX_SAVE_STATE_X64 FxSaveState;\r | |
264 | subq $512, %rsp\r | |
265 | movq %rsp, %rdi\r | |
266 | .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]\r | |
267 | \r | |
268 | #; UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear\r | |
269 | cld\r | |
270 | \r | |
271 | #; UINT32 ExceptionData;\r | |
272 | pushq 16(%rbp)\r | |
273 | \r | |
274 | #; Prepare parameter and call\r | |
275 | mov 8(%rbp), %rcx\r | |
276 | mov %rsp, %rdx\r | |
277 | #\r | |
278 | # Per X64 calling convention, allocate maximum parameter stack space\r | |
279 | # and make sure RSP is 16-byte aligned\r | |
280 | #\r | |
dd563742 | 281 | subq $40, %rsp\r |
8f07f895 | 282 | call ASM_PFX(CommonExceptionHandler)\r |
283 | addq $40, %rsp\r | |
284 | \r | |
285 | cli\r | |
286 | #; UINT64 ExceptionData;\r | |
287 | addq $8, %rsp\r | |
288 | \r | |
289 | #; FX_SAVE_STATE_X64 FxSaveState;\r | |
290 | \r | |
291 | movq %rsp, %rsi\r | |
292 | .byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]\r | |
293 | addq $512, %rsp\r | |
294 | \r | |
295 | #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r | |
296 | #; Skip restoration of DRx registers to support in-circuit emualators\r | |
297 | #; or debuggers set breakpoint in interrupt/exception context\r | |
298 | addq $48, %rsp\r | |
299 | \r | |
300 | #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r | |
301 | popq %rax\r | |
302 | movq %rax, %cr0\r | |
303 | addq $8, %rsp # not for Cr1\r | |
304 | popq %rax\r | |
305 | movq %rax, %cr2\r | |
306 | popq %rax\r | |
307 | movq %rax, %cr3\r | |
308 | popq %rax\r | |
309 | movq %rax, %cr4\r | |
310 | popq %rax\r | |
311 | movq %rax, %cr8\r | |
312 | \r | |
313 | #; UINT64 RFlags;\r | |
314 | popq 40(%rbp)\r | |
315 | \r | |
316 | #; UINT64 Ldtr, Tr;\r | |
317 | #; UINT64 Gdtr[2], Idtr[2];\r | |
318 | #; Best not let anyone mess with these particular registers...\r | |
319 | addq $48, %rsp\r | |
320 | \r | |
321 | #; UINT64 Rip;\r | |
322 | popq 24(%rbp)\r | |
323 | \r | |
324 | #; UINT64 Gs, Fs, Es, Ds, Cs, Ss;\r | |
325 | popq %rax\r | |
326 | # mov %rax, %gs ; not for gs\r | |
327 | popq %rax\r | |
328 | # mov %rax, %fs ; not for fs\r | |
329 | # (X64 will not use fs and gs, so we do not restore it)\r | |
330 | popq %rax\r | |
e5030c1e | 331 | mov %rax, %es\r |
8f07f895 | 332 | popq %rax\r |
e5030c1e | 333 | mov %rax, %ds\r |
8f07f895 | 334 | popq 32(%rbp) # for cs\r |
335 | popq 56(%rbp) # for ss\r | |
336 | \r | |
337 | #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r | |
338 | #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r | |
339 | popq %rdi\r | |
340 | popq %rsi\r | |
341 | addq $8, %rsp # not for rbp\r | |
342 | popq 48(%rbp) # for rsp\r | |
343 | popq %rbx\r | |
344 | popq %rdx\r | |
345 | popq %rcx\r | |
346 | popq %rax\r | |
347 | popq %r8\r | |
348 | popq %r9\r | |
349 | popq %r10\r | |
350 | popq %r11\r | |
351 | popq %r12\r | |
352 | popq %r13\r | |
353 | popq %r14\r | |
354 | popq %r15\r | |
355 | \r | |
356 | movq %rbp, %rsp\r | |
357 | popq %rbp\r | |
358 | addq $16, %rsp\r | |
e41aad15 JF |
359 | cmpq $0, -32(%rsp) # check EXCEPTION_HANDLER_CONTEXT.OldIdtHandler\r |
360 | jz DoReturn # check EXCEPTION_HANDLER_CONTEXT.ExceptionDataFlag\r | |
361 | cmpb $1, -40(%rsp)\r | |
362 | jz ErrorCode\r | |
363 | jmp *-32(%rsp)\r | |
364 | ErrorCode:\r | |
365 | subq $8, %rsp\r | |
366 | jmp *-24(%rsp)\r | |
367 | \r | |
368 | DoReturn:\r | |
eac8082e | 369 | pushq %rax\r |
07da1ac8 | 370 | movq ASM_PFX(mDoFarReturnFlag)(%rip), %rax\r |
eac8082e LE |
371 | cmpq $0, %rax # Check if need to do far return instead of IRET\r |
372 | popq %rax\r | |
e41aad15 JF |
373 | jz DoIret\r |
374 | pushq %rax\r | |
375 | movq %rsp, %rax # save old RSP to rax\r | |
376 | movq 0x20(%rsp), %rsp\r | |
377 | pushq 0x10(%rax) # save CS in new location\r | |
378 | pushq 0x8(%rax) # save EIP in new location\r | |
379 | pushq 0x18(%rax) # save EFLAGS in new location\r | |
380 | movq (%rax), %rax # restore rax\r | |
381 | popfq # restore EFLAGS\r | |
b6341b26 | 382 | lretq # far return\r |
e41aad15 | 383 | DoIret:\r |
8f07f895 | 384 | iretq\r |
385 | \r | |
386 | \r | |
387 | #-------------------------------------------------------------------------------------\r | |
e41aad15 | 388 | # AsmGetTemplateAddressMap (&AddressMap);\r |
8f07f895 | 389 | #-------------------------------------------------------------------------------------\r |
390 | # comments here for definition of address map\r | |
e41aad15 JF |
391 | ASM_GLOBAL ASM_PFX(AsmGetTemplateAddressMap)\r |
392 | ASM_PFX(AsmGetTemplateAddressMap):\r | |
07da1ac8 AF |
393 | pushq %rbp\r |
394 | movq %rsp, %rbp\r | |
395 | \r | |
396 | leaq AsmIdtVectorBegin(%rip), %rax\r | |
397 | movq %rax, (%rcx)\r | |
398 | .set ENTRY_SIZE, ASM_PFX(HookAfterStubHeaderEnd) - HookAfterStubHeaderBegin\r | |
399 | movq $(ENTRY_SIZE), 0x08(%rcx)\r | |
400 | leaq HookAfterStubHeaderBegin(%rip), %rax\r | |
401 | movq %rax, 0x10(%rcx)\r | |
8f07f895 | 402 | \r |
07da1ac8 AF |
403 | popq %rbp\r |
404 | ret\r | |
8f07f895 | 405 | \r |
e41aad15 | 406 | #-------------------------------------------------------------------------------------\r |
07da1ac8 AF |
407 | # VOID\r |
408 | # EFIAPI\r | |
409 | # AsmVectorNumFixup (\r | |
410 | # IN VOID *NewVectorAddr, // RCX\r | |
411 | # IN UINT8 VectorNum // RDX\r | |
412 | # IN VOID *OldVectorAddr, // R8\r | |
413 | # );\r | |
e41aad15 JF |
414 | #-------------------------------------------------------------------------------------\r |
415 | ASM_GLOBAL ASM_PFX(AsmVectorNumFixup)\r | |
416 | ASM_PFX(AsmVectorNumFixup):\r | |
07da1ac8 AF |
417 | pushq %rbp\r |
418 | movq %rsp, %rbp\r | |
419 | \r | |
420 | # Patch vector #\r | |
421 | movb %dl, (PatchVectorNum - HookAfterStubHeaderBegin)(%rcx)\r | |
422 | \r | |
423 | # Patch Function address\r | |
424 | subq %rcx, %r8 # Calculate the offset value\r | |
425 | movl (PatchFuncAddress - HookAfterStubHeaderBegin)(%rcx), %eax\r | |
426 | addq %r8, %rax\r | |
427 | movl %eax, (PatchFuncAddress - HookAfterStubHeaderBegin)(%rcx)\r | |
428 | \r | |
429 | popq %rbp\r | |
430 | ret\r | |
8f07f895 | 431 | \r |
432 | #END\r | |
433 | \r | |
434 | \r |