]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/CpuDxe/X64/CpuAsm.S
PcdDxeIplBuildShareCodeHobs, PcdDxeIplSupportCustomDecompress, PcdDxeIplSupportCustom...
[mirror_edk2.git] / UefiCpuPkg / CpuDxe / X64 / CpuAsm.S
CommitLineData
3668c083 1# TITLE CpuAsm.S:
a47463f2 2
3#------------------------------------------------------------------------------
4#*
5#* Copyright 2008 - 2009, Intel Corporation
6#* All rights reserved. This program and the accompanying materials
7#* are licensed and made available under the terms and conditions of the BSD License
8#* which accompanies this distribution. The full text of the license may be found at
9#* http://opensource.org/licenses/bsd-license.php
10#*
11#* THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
12#* WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
13#*
14#* CpuAsm.S
15#*
16#* Abstract:
17#*
18#------------------------------------------------------------------------------
19
20
21#text SEGMENT
22
23
24#EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions
25
26
27#
28# point to the external interrupt vector table
29#
30ExternalVectorTablePtr:
31 .byte 0, 0, 0, 0, 0, 0, 0, 0
32
a47463f2 33ASM_GLOBAL ASM_PFX(InitializeExternalVectorTablePtr)
34ASM_PFX(InitializeExternalVectorTablePtr):
3668c083 35 lea ExternalVectorTablePtr(%rip), %rax # save vector number
36 mov %rcx, (%rax)
a47463f2 37 ret
38
39
40#------------------------------------------------------------------------------
41# VOID
42# SetCodeSelector (
43# UINT16 Selector
44# );
45#------------------------------------------------------------------------------
a47463f2 46ASM_GLOBAL ASM_PFX(SetCodeSelector)
47ASM_PFX(SetCodeSelector):
3668c083 48 subq $0x10, %rsp
49 leaq setCodeSelectorLongJump(%rip), %rax
50 movq %rax, (%rsp)
51 movw %cx, 4(%rsp)
52 .byte 0xFF, 0x2C, 0x24 # jmp (%rsp) note:fword jmp
a47463f2 53setCodeSelectorLongJump:
3668c083 54 addq $0x10, %rsp
a47463f2 55 ret
56
57#------------------------------------------------------------------------------
58# VOID
59# SetDataSelectors (
60# UINT16 Selector
61# );
62#------------------------------------------------------------------------------
a47463f2 63ASM_GLOBAL ASM_PFX(SetDataSelectors)
64ASM_PFX(SetDataSelectors):
3668c083 65 movw %cx, %ss
66 movw %cx, %ds
67 movw %cx, %es
68 movw %cx, %fs
69 movw %cx, %gs
a47463f2 70 ret
71
72#---------------------------------------;
73# CommonInterruptEntry ;
74#---------------------------------------;
75# The follow algorithm is used for the common interrupt routine.
76
a47463f2 77ASM_GLOBAL ASM_PFX(CommonInterruptEntry)
78ASM_PFX(CommonInterruptEntry):
79 cli
80 #
81 # All interrupt handlers are invoked through interrupt gates, so
82 # IF flag automatically cleared at the entry point
83 #
84 #
85 # Calculate vector number
86 #
3668c083 87 xchgq (%rsp), %rcx # get the return address of call, actually, it is the address of vector number.
88 movzwl (%rcx), %ecx
89 cmp $32, %ecx # Intel reserved vector for exceptions?
a47463f2 90 jae NoErrorCode
3668c083 91 pushq %rax
92 leaq ASM_PFX(mErrorCodeFlag)(%rip), %rax
93 bt %ecx, (%rax)
94 popq %rax
a47463f2 95 jc CommonInterruptEntry_al_0000
96
97NoErrorCode:
98
99 #
100 # Push a dummy error code on the stack
101 # to maintain coherent stack map
102 #
3668c083 103 pushq (%rsp)
104 movq $0, 8(%rsp)
a47463f2 105CommonInterruptEntry_al_0000:
3668c083 106 pushq %rbp
107 movq %rsp, %rbp
a47463f2 108
109 #
110 # Stack:
111 # +---------------------+ <-- 16-byte aligned ensured by processor
112 # + Old SS +
113 # +---------------------+
114 # + Old RSP +
115 # +---------------------+
116 # + RFlags +
117 # +---------------------+
118 # + CS +
119 # +---------------------+
120 # + RIP +
121 # +---------------------+
122 # + Error Code +
123 # +---------------------+
124 # + RCX / Vector Number +
125 # +---------------------+
126 # + RBP +
127 # +---------------------+ <-- RBP, 16-byte aligned
128 #
129
130
131 #
132 # Since here the stack pointer is 16-byte aligned, so
133 # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64
134 # is 16-byte aligned
135 #
136
137#; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
138#; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
3668c083 139 pushq %r15
140 pushq %r14
141 pushq %r13
142 pushq %r12
143 pushq %r11
144 pushq %r10
145 pushq %r9
146 pushq %r8
147 pushq %rax
148 pushq 8(%rbp) # RCX
149 pushq %rdx
150 pushq %rbx
151 pushq 48(%rbp) # RSP
152 pushq (%rbp) # RBP
153 pushq %rsi
154 pushq %rdi
a47463f2 155
156#; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero
3668c083 157 movzwq 56(%rbp), %rax
158 pushq %rax # for ss
159 movzwq 32(%rbp), %rax
160 pushq %rax # for cs
161 movq %ds, %rax
162 pushq %rax
163 movq %es, %rax
164 pushq %rax
165 movq %fs, %rax
166 pushq %rax
167 movq %gs, %rax
168 pushq %rax
169
170 movq %rcx, 8(%rbp) # save vector number
a47463f2 171
172#; UINT64 Rip;
3668c083 173 pushq 24(%rbp)
a47463f2 174
175#; UINT64 Gdtr[2], Idtr[2];
3668c083 176 xorq %rax, %rax
177 pushq %rax
178 pushq %rax
179 sidt (%rsp)
180 xchgq 2(%rsp), %rax
181 xchgq (%rsp), %rax
182 xchgq 8(%rsp), %rax
183
184 xorq %rax, %rax
185 pushq %rax
186 pushq %rax
187 sgdt (%rsp)
188 xchgq 2(%rsp), %rax
189 xchgq (%rsp), %rax
190 xchgq 8(%rsp), %rax
a47463f2 191
192#; UINT64 Ldtr, Tr;
3668c083 193 xorq %rax, %rax
a47463f2 194 str %ax
3668c083 195 pushq %rax
a47463f2 196 sldt %ax
3668c083 197 pushq %rax
a47463f2 198
199#; UINT64 RFlags;
3668c083 200 pushq 40(%rbp)
a47463f2 201
202#; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
3668c083 203 movq %cr8, %rax
204 pushq %rax
205 movq %cr4, %rax
206 orq $0x208, %rax
207 movq %rax, %cr4
208 pushq %rax
209 mov %cr3, %rax
210 pushq %rax
211 mov %cr2, %rax
212 pushq %rax
213 xorq %rax, %rax
214 pushq %rax
215 mov %cr0, %rax
216 pushq %rax
a47463f2 217
218#; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
3668c083 219 movq %dr7, %rax
220 pushq %rax
a47463f2 221#; clear Dr7 while executing debugger itself
3668c083 222 xorq %rax, %rax
223 movq %rax, %dr7
a47463f2 224
3668c083 225 movq %dr6, %rax
226 pushq %rax
a47463f2 227#; insure all status bits in dr6 are clear...
3668c083 228 xorq %rax, %rax
229 movq %rax, %dr6
230
231 movq %dr3, %rax
232 pushq %rax
233 movq %dr2, %rax
234 pushq %rax
235 movq %dr1, %rax
236 pushq %rax
237 movq %dr0, %rax
238 pushq %rax
a47463f2 239
240#; FX_SAVE_STATE_X64 FxSaveState;
3668c083 241 subq $512, %rsp
242 movq %rsp, %rdi
a47463f2 243 .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]
244
245#; UINT32 ExceptionData;
3668c083 246 pushq 16(%rbp)
a47463f2 247
248#; call into exception handler
3668c083 249 movq 8(%rbp), %rcx
250 leaq ExternalVectorTablePtr(%rip), %rax
251 movl (%eax), %eax
252 movq (%rax,%rcx,8), %rax
253 orq %rax, %rax # NULL?
a47463f2 254
255 je nonNullValue#
256
257#; Prepare parameter and call
258# mov rcx, [rbp + 8]
3668c083 259 mov %rsp, %rdx
a47463f2 260 #
261 # Per X64 calling convention, allocate maximum parameter stack space
262 # and make sure RSP is 16-byte aligned
263 #
3668c083 264 subq $40, %rsp
265 call *%rax
266 addq $40, %rsp
a47463f2 267
268nonNullValue:
269 cli
270#; UINT64 ExceptionData;
3668c083 271 addq $8, %rsp
a47463f2 272
273#; FX_SAVE_STATE_X64 FxSaveState;
274
3668c083 275 movq %rsp, %rsi
a47463f2 276 .byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]
3668c083 277 addq $512, %rsp
a47463f2 278
279#; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
3668c083 280 popq %rax
281 movq %rax, %dr0
282 popq %rax
283 movq %rax, %dr1
284 popq %rax
285 movq %rax, %dr2
286 popq %rax
287 movq %rax, %dr3
a47463f2 288#; skip restore of dr6. We cleared dr6 during the context save.
3668c083 289 addq $8, %rsp
290 popq %rax
291 movq %rax, %dr7
a47463f2 292
293#; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
3668c083 294 popq %rax
295 movq %rax, %cr0
296 addq $8, %rsp # not for Cr1
297 popq %rax
298 movq %rax, %cr2
299 popq %rax
300 movq %rax, %cr3
301 popq %rax
302 movq %rax, %cr4
303 popq %rax
304 movq %rax, %cr8
a47463f2 305
306#; UINT64 RFlags;
3668c083 307 popq 40(%rbp)
a47463f2 308
309#; UINT64 Ldtr, Tr;
310#; UINT64 Gdtr[2], Idtr[2];
311#; Best not let anyone mess with these particular registers...
3668c083 312 addq $48, %rsp
a47463f2 313
314#; UINT64 Rip;
3668c083 315 popq 24(%rbp)
a47463f2 316
317#; UINT64 Gs, Fs, Es, Ds, Cs, Ss;
3668c083 318 popq %rax
319 # mov %rax, %gs ; not for gs
320 popq %rax
321 # mov %rax, %fs ; not for fs
a47463f2 322 # (X64 will not use fs and gs, so we do not restore it)
3668c083 323 popq %rax
324 movq %rax, %es
325 popq %rax
326 movq %rax, %ds
327 popq 32(%rbp) # for cs
328 popq 56(%rbp) # for ss
a47463f2 329
330#; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
331#; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
3668c083 332 popq %rdi
333 popq %rsi
334 addq $8, %rsp # not for rbp
335 popq 48(%rbp) # for rsp
336 popq %rbx
337 popq %rdx
338 popq %rcx
339 popq %rax
340 popq %r8
341 popq %r9
342 popq %r10
343 popq %r11
344 popq %r12
345 popq %r13
346 popq %r14
347 popq %r15
348
349 movq %rbp, %rsp
350 popq %rbp
351 addq $16, %rsp
a47463f2 352 iretq
353
354
355#text ENDS
356
357#END
358
359