]> git.proxmox.com Git - mirror_edk2.git/blob - UefiCpuPkg/Library/CpuExceptionHandlerLib/X64/ExceptionHandlerAsm.S
UefiCpuPkg/ExceptionHandlerAsm.S: Fix code length issue with GCC 5.4
[mirror_edk2.git] / UefiCpuPkg / Library / CpuExceptionHandlerLib / X64 / ExceptionHandlerAsm.S
1 #------------------------------------------------------------------------------ ;
2 # Copyright (c) 2012 - 2017, Intel Corporation. All rights reserved.<BR>
3 # This program and the accompanying materials
4 # are licensed and made available under the terms and conditions of the BSD License
5 # which accompanies this distribution. The full text of the license may be found at
6 # http://opensource.org/licenses/bsd-license.php.
7 #
8 # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
10 #
11 # Module Name:
12 #
13 # ExceptionHandlerAsm.S
14 #
15 # Abstract:
16 #
17 # x64 CPU Exception Handler
18 #
19 # Notes:
20 #
21 #------------------------------------------------------------------------------
22
23
24
25 ASM_GLOBAL ASM_PFX(CommonExceptionHandler)
26
27 #EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions
28 #EXTRN ASM_PFX(mDoFarReturnFlag):QWORD # Do far return flag
29 .text
30
31 #ifdef __APPLE__
32 # macros are different between GNU and Xcode as.
33 .macro IDT_MACRO
34 push $0
35 #else
36 .macro IDT_MACRO arg
37 push \arg
38 #endif
39 .byte 0xe9 # jmp ASM_PFX(CommonInterruptEntry)
40 .long ASM_PFX(CommonInterruptEntry) - . - 4
41 .endm
42
43 AsmIdtVectorBegin:
44 IDT_MACRO $0
45 IDT_MACRO $1
46 IDT_MACRO $2
47 IDT_MACRO $3
48 IDT_MACRO $4
49 IDT_MACRO $5
50 IDT_MACRO $6
51 IDT_MACRO $7
52 IDT_MACRO $8
53 IDT_MACRO $9
54 IDT_MACRO $10
55 IDT_MACRO $11
56 IDT_MACRO $12
57 IDT_MACRO $13
58 IDT_MACRO $14
59 IDT_MACRO $15
60 IDT_MACRO $16
61 IDT_MACRO $17
62 IDT_MACRO $18
63 IDT_MACRO $19
64 IDT_MACRO $20
65 IDT_MACRO $21
66 IDT_MACRO $22
67 IDT_MACRO $23
68 IDT_MACRO $24
69 IDT_MACRO $25
70 IDT_MACRO $26
71 IDT_MACRO $27
72 IDT_MACRO $28
73 IDT_MACRO $29
74 IDT_MACRO $30
75 IDT_MACRO $31
76 AsmIdtVectorEnd:
77
78 HookAfterStubHeaderBegin:
79 .byte 0x6a # push
80 PatchVectorNum:
81 .byte 0 # 0 will be fixed
82 .byte 0xe9 # jmp ASM_PFX(HookAfterStubHeaderEnd)
83 PatchFuncAddress:
84 .set HOOK_ADDRESS, ASM_PFX(HookAfterStubHeaderEnd) - . - 4
85 .long HOOK_ADDRESS # will be fixed
86 ASM_GLOBAL ASM_PFX(HookAfterStubHeaderEnd)
87 ASM_PFX(HookAfterStubHeaderEnd):
88 pushq %rax
89 movq %rsp, %rax
90 andl $0x0fffffff0, %esp # make sure 16-byte aligned for exception context
91 subq $0x18, %rsp # reserve room for filling exception data later
92 pushq %rcx
93 movq 8(%rax), %rcx
94 bt %ecx, ASM_PFX(mErrorCodeFlag)(%rip)
95 jnc NoErrorData
96 pushq (%rsp) # push additional rcx to make stack alignment
97 NoErrorData:
98 xchgq (%rsp), %rcx # restore rcx, save Exception Number in stack
99 movq (%rax), %rax # restore rax
100
101 #---------------------------------------;
102 # CommonInterruptEntry ;
103 #---------------------------------------;
104 # The follow algorithm is used for the common interrupt routine.
105
106 ASM_GLOBAL ASM_PFX(CommonInterruptEntry)
107 ASM_PFX(CommonInterruptEntry):
108 cli
109 #
110 # All interrupt handlers are invoked through interrupt gates, so
111 # IF flag automatically cleared at the entry point
112 #
113 #
114 # Calculate vector number
115 #
116 xchgq (%rsp), %rcx # get the return address of call, actually, it is the address of vector number.
117 andq $0x0FF, %rcx
118 cmp $32, %ecx # Intel reserved vector for exceptions?
119 jae NoErrorCode
120 pushq %rax
121 movl ASM_PFX(mErrorCodeFlag)(%rip), %eax
122 bt %ecx, %eax
123 popq %rax
124 jc CommonInterruptEntry_al_0000
125
126 NoErrorCode:
127
128 #
129 # Push a dummy error code on the stack
130 # to maintain coherent stack map
131 #
132 pushq (%rsp)
133 movq $0, 8(%rsp)
134 CommonInterruptEntry_al_0000:
135 pushq %rbp
136 movq %rsp, %rbp
137 pushq $0 # check EXCEPTION_HANDLER_CONTEXT.OldIdtHandler
138 pushq $0 # check EXCEPTION_HANDLER_CONTEXT.ExceptionDataFlag
139
140 #
141 # Stack:
142 # +---------------------+ <-- 16-byte aligned ensured by processor
143 # + Old SS +
144 # +---------------------+
145 # + Old RSP +
146 # +---------------------+
147 # + RFlags +
148 # +---------------------+
149 # + CS +
150 # +---------------------+
151 # + RIP +
152 # +---------------------+
153 # + Error Code +
154 # +---------------------+
155 # + RCX / Vector Number +
156 # +---------------------+
157 # + RBP +
158 # +---------------------+ <-- RBP, 16-byte aligned
159 #
160
161
162 #
163 # Since here the stack pointer is 16-byte aligned, so
164 # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64
165 # is 16-byte aligned
166 #
167
168 #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
169 #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
170 pushq %r15
171 pushq %r14
172 pushq %r13
173 pushq %r12
174 pushq %r11
175 pushq %r10
176 pushq %r9
177 pushq %r8
178 pushq %rax
179 pushq 8(%rbp) # RCX
180 pushq %rdx
181 pushq %rbx
182 pushq 48(%rbp) # RSP
183 pushq (%rbp) # RBP
184 pushq %rsi
185 pushq %rdi
186
187 #; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero
188 movzwq 56(%rbp), %rax
189 pushq %rax # for ss
190 movzwq 32(%rbp), %rax
191 pushq %rax # for cs
192 mov %ds, %rax
193 pushq %rax
194 mov %es, %rax
195 pushq %rax
196 mov %fs, %rax
197 pushq %rax
198 mov %gs, %rax
199 pushq %rax
200
201 movq %rcx, 8(%rbp) # save vector number
202
203 #; UINT64 Rip;
204 pushq 24(%rbp)
205
206 #; UINT64 Gdtr[2], Idtr[2];
207 xorq %rax, %rax
208 pushq %rax
209 pushq %rax
210 sidt (%rsp)
211 xchgq 2(%rsp), %rax
212 xchgq (%rsp), %rax
213 xchgq 8(%rsp), %rax
214
215 xorq %rax, %rax
216 pushq %rax
217 pushq %rax
218 sgdt (%rsp)
219 xchgq 2(%rsp), %rax
220 xchgq (%rsp), %rax
221 xchgq 8(%rsp), %rax
222
223 #; UINT64 Ldtr, Tr;
224 xorq %rax, %rax
225 str %ax
226 pushq %rax
227 sldt %ax
228 pushq %rax
229
230 #; UINT64 RFlags;
231 pushq 40(%rbp)
232
233 #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
234 movq %cr8, %rax
235 pushq %rax
236 movq %cr4, %rax
237 orq $0x208, %rax
238 movq %rax, %cr4
239 pushq %rax
240 mov %cr3, %rax
241 pushq %rax
242 mov %cr2, %rax
243 pushq %rax
244 xorq %rax, %rax
245 pushq %rax
246 mov %cr0, %rax
247 pushq %rax
248
249 #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
250 movq %dr7, %rax
251 pushq %rax
252 movq %dr6, %rax
253 pushq %rax
254 movq %dr3, %rax
255 pushq %rax
256 movq %dr2, %rax
257 pushq %rax
258 movq %dr1, %rax
259 pushq %rax
260 movq %dr0, %rax
261 pushq %rax
262
263 #; FX_SAVE_STATE_X64 FxSaveState;
264 subq $512, %rsp
265 movq %rsp, %rdi
266 .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]
267
268 #; UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear
269 cld
270
271 #; UINT32 ExceptionData;
272 pushq 16(%rbp)
273
274 #; Prepare parameter and call
275 mov 8(%rbp), %rcx
276 mov %rsp, %rdx
277 #
278 # Per X64 calling convention, allocate maximum parameter stack space
279 # and make sure RSP is 16-byte aligned
280 #
281 subq $40, %rsp
282 call ASM_PFX(CommonExceptionHandler)
283 addq $40, %rsp
284
285 cli
286 #; UINT64 ExceptionData;
287 addq $8, %rsp
288
289 #; FX_SAVE_STATE_X64 FxSaveState;
290
291 movq %rsp, %rsi
292 .byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]
293 addq $512, %rsp
294
295 #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
296 #; Skip restoration of DRx registers to support in-circuit emualators
297 #; or debuggers set breakpoint in interrupt/exception context
298 addq $48, %rsp
299
300 #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
301 popq %rax
302 movq %rax, %cr0
303 addq $8, %rsp # not for Cr1
304 popq %rax
305 movq %rax, %cr2
306 popq %rax
307 movq %rax, %cr3
308 popq %rax
309 movq %rax, %cr4
310 popq %rax
311 movq %rax, %cr8
312
313 #; UINT64 RFlags;
314 popq 40(%rbp)
315
316 #; UINT64 Ldtr, Tr;
317 #; UINT64 Gdtr[2], Idtr[2];
318 #; Best not let anyone mess with these particular registers...
319 addq $48, %rsp
320
321 #; UINT64 Rip;
322 popq 24(%rbp)
323
324 #; UINT64 Gs, Fs, Es, Ds, Cs, Ss;
325 popq %rax
326 # mov %rax, %gs ; not for gs
327 popq %rax
328 # mov %rax, %fs ; not for fs
329 # (X64 will not use fs and gs, so we do not restore it)
330 popq %rax
331 mov %rax, %es
332 popq %rax
333 mov %rax, %ds
334 popq 32(%rbp) # for cs
335 popq 56(%rbp) # for ss
336
337 #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
338 #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
339 popq %rdi
340 popq %rsi
341 addq $8, %rsp # not for rbp
342 popq 48(%rbp) # for rsp
343 popq %rbx
344 popq %rdx
345 popq %rcx
346 popq %rax
347 popq %r8
348 popq %r9
349 popq %r10
350 popq %r11
351 popq %r12
352 popq %r13
353 popq %r14
354 popq %r15
355
356 movq %rbp, %rsp
357 popq %rbp
358 addq $16, %rsp
359 cmpq $0, -32(%rsp) # check EXCEPTION_HANDLER_CONTEXT.OldIdtHandler
360 jz DoReturn # check EXCEPTION_HANDLER_CONTEXT.ExceptionDataFlag
361 cmpb $1, -40(%rsp)
362 jz ErrorCode
363 jmp *-32(%rsp)
364 ErrorCode:
365 subq $8, %rsp
366 jmp *-24(%rsp)
367
368 DoReturn:
369 pushq %rax
370 movq ASM_PFX(mDoFarReturnFlag)(%rip), %rax
371 cmpq $0, %rax # Check if need to do far return instead of IRET
372 popq %rax
373 jz DoIret
374 pushq %rax
375 movq %rsp, %rax # save old RSP to rax
376 movq 0x20(%rsp), %rsp
377 pushq 0x10(%rax) # save CS in new location
378 pushq 0x8(%rax) # save EIP in new location
379 pushq 0x18(%rax) # save EFLAGS in new location
380 movq (%rax), %rax # restore rax
381 popfq # restore EFLAGS
382 lretq # far return
383 DoIret:
384 iretq
385
386
387 #-------------------------------------------------------------------------------------
388 # AsmGetTemplateAddressMap (&AddressMap);
389 #-------------------------------------------------------------------------------------
390 # comments here for definition of address map
391 ASM_GLOBAL ASM_PFX(AsmGetTemplateAddressMap)
392 ASM_PFX(AsmGetTemplateAddressMap):
393 pushq %rbp
394 movq %rsp, %rbp
395
396 leaq AsmIdtVectorBegin(%rip), %rax
397 movq %rax, (%rcx)
398 .set ENTRY_SIZE, ASM_PFX(HookAfterStubHeaderEnd) - HookAfterStubHeaderBegin
399 movq $(ENTRY_SIZE), 0x08(%rcx)
400 leaq HookAfterStubHeaderBegin(%rip), %rax
401 movq %rax, 0x10(%rcx)
402
403 popq %rbp
404 ret
405
406 #-------------------------------------------------------------------------------------
407 # VOID
408 # EFIAPI
409 # AsmVectorNumFixup (
410 # IN VOID *NewVectorAddr, // RCX
411 # IN UINT8 VectorNum // RDX
412 # IN VOID *OldVectorAddr, // R8
413 # );
414 #-------------------------------------------------------------------------------------
415 ASM_GLOBAL ASM_PFX(AsmVectorNumFixup)
416 ASM_PFX(AsmVectorNumFixup):
417 pushq %rbp
418 movq %rsp, %rbp
419
420 # Patch vector #
421 movb %dl, (PatchVectorNum - HookAfterStubHeaderBegin)(%rcx)
422
423 # Patch Function address
424 subq %rcx, %r8 # Calculate the offset value
425 movl (PatchFuncAddress - HookAfterStubHeaderBegin)(%rcx), %eax
426 addq %r8, %rax
427 movl %eax, (PatchFuncAddress - HookAfterStubHeaderBegin)(%rcx)
428
429 popq %rbp
430 ret
431
432 #END
433
434