]> git.proxmox.com Git - mirror_edk2.git/blob - UefiCpuPkg/CpuDxe/X64/CpuAsm.S
Remove svn:executable on *.c, *.h, *.asm, *.S, *.inf and *.asl*
[mirror_edk2.git] / UefiCpuPkg / CpuDxe / X64 / CpuAsm.S
1 # TITLE CpuAsm.S:
2
3 #------------------------------------------------------------------------------
4 #*
5 #* Copyright 2008 - 2009, Intel Corporation
6 #* All rights reserved. This program and the accompanying materials
7 #* are licensed and made available under the terms and conditions of the BSD License
8 #* which accompanies this distribution. The full text of the license may be found at
9 #* http://opensource.org/licenses/bsd-license.php
10 #*
11 #* THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
12 #* WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
13 #*
14 #* CpuAsm.S
15 #*
16 #* Abstract:
17 #*
18 #------------------------------------------------------------------------------
19
20
21 #text SEGMENT
22
23
24 #EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions
25
26
27 #
28 # point to the external interrupt vector table
29 #
30 ExternalVectorTablePtr:
31 .byte 0, 0, 0, 0, 0, 0, 0, 0
32
33 ASM_GLOBAL ASM_PFX(InitializeExternalVectorTablePtr)
34 ASM_PFX(InitializeExternalVectorTablePtr):
35 lea ExternalVectorTablePtr(%rip), %rax # save vector number
36 mov %rcx, (%rax)
37 ret
38
39
40 #------------------------------------------------------------------------------
41 # VOID
42 # SetCodeSelector (
43 # UINT16 Selector
44 # );
45 #------------------------------------------------------------------------------
46 ASM_GLOBAL ASM_PFX(SetCodeSelector)
47 ASM_PFX(SetCodeSelector):
48 subq $0x10, %rsp
49 leaq setCodeSelectorLongJump(%rip), %rax
50 movq %rax, (%rsp)
51 movw %cx, 4(%rsp)
52 .byte 0xFF, 0x2C, 0x24 # jmp (%rsp) note:fword jmp
53 setCodeSelectorLongJump:
54 addq $0x10, %rsp
55 ret
56
57 #------------------------------------------------------------------------------
58 # VOID
59 # SetDataSelectors (
60 # UINT16 Selector
61 # );
62 #------------------------------------------------------------------------------
63 ASM_GLOBAL ASM_PFX(SetDataSelectors)
64 ASM_PFX(SetDataSelectors):
65 movw %cx, %ss
66 movw %cx, %ds
67 movw %cx, %es
68 movw %cx, %fs
69 movw %cx, %gs
70 ret
71
72 #---------------------------------------;
73 # CommonInterruptEntry ;
74 #---------------------------------------;
75 # The follow algorithm is used for the common interrupt routine.
76
77 ASM_GLOBAL ASM_PFX(CommonInterruptEntry)
78 ASM_PFX(CommonInterruptEntry):
79 cli
80 #
81 # All interrupt handlers are invoked through interrupt gates, so
82 # IF flag automatically cleared at the entry point
83 #
84 #
85 # Calculate vector number
86 #
87 xchgq (%rsp), %rcx # get the return address of call, actually, it is the address of vector number.
88 movzwl (%rcx), %ecx
89 cmp $32, %ecx # Intel reserved vector for exceptions?
90 jae NoErrorCode
91 pushq %rax
92 leaq ASM_PFX(mErrorCodeFlag)(%rip), %rax
93 bt %ecx, (%rax)
94 popq %rax
95 jc CommonInterruptEntry_al_0000
96
97 NoErrorCode:
98
99 #
100 # Push a dummy error code on the stack
101 # to maintain coherent stack map
102 #
103 pushq (%rsp)
104 movq $0, 8(%rsp)
105 CommonInterruptEntry_al_0000:
106 pushq %rbp
107 movq %rsp, %rbp
108
109 #
110 # Stack:
111 # +---------------------+ <-- 16-byte aligned ensured by processor
112 # + Old SS +
113 # +---------------------+
114 # + Old RSP +
115 # +---------------------+
116 # + RFlags +
117 # +---------------------+
118 # + CS +
119 # +---------------------+
120 # + RIP +
121 # +---------------------+
122 # + Error Code +
123 # +---------------------+
124 # + RCX / Vector Number +
125 # +---------------------+
126 # + RBP +
127 # +---------------------+ <-- RBP, 16-byte aligned
128 #
129
130
131 #
132 # Since here the stack pointer is 16-byte aligned, so
133 # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64
134 # is 16-byte aligned
135 #
136
137 #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
138 #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
139 pushq %r15
140 pushq %r14
141 pushq %r13
142 pushq %r12
143 pushq %r11
144 pushq %r10
145 pushq %r9
146 pushq %r8
147 pushq %rax
148 pushq 8(%rbp) # RCX
149 pushq %rdx
150 pushq %rbx
151 pushq 48(%rbp) # RSP
152 pushq (%rbp) # RBP
153 pushq %rsi
154 pushq %rdi
155
156 #; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero
157 movzwq 56(%rbp), %rax
158 pushq %rax # for ss
159 movzwq 32(%rbp), %rax
160 pushq %rax # for cs
161 movq %ds, %rax
162 pushq %rax
163 movq %es, %rax
164 pushq %rax
165 movq %fs, %rax
166 pushq %rax
167 movq %gs, %rax
168 pushq %rax
169
170 movq %rcx, 8(%rbp) # save vector number
171
172 #; UINT64 Rip;
173 pushq 24(%rbp)
174
175 #; UINT64 Gdtr[2], Idtr[2];
176 xorq %rax, %rax
177 pushq %rax
178 pushq %rax
179 sidt (%rsp)
180 xchgq 2(%rsp), %rax
181 xchgq (%rsp), %rax
182 xchgq 8(%rsp), %rax
183
184 xorq %rax, %rax
185 pushq %rax
186 pushq %rax
187 sgdt (%rsp)
188 xchgq 2(%rsp), %rax
189 xchgq (%rsp), %rax
190 xchgq 8(%rsp), %rax
191
192 #; UINT64 Ldtr, Tr;
193 xorq %rax, %rax
194 str %ax
195 pushq %rax
196 sldt %ax
197 pushq %rax
198
199 #; UINT64 RFlags;
200 pushq 40(%rbp)
201
202 #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
203 movq %cr8, %rax
204 pushq %rax
205 movq %cr4, %rax
206 orq $0x208, %rax
207 movq %rax, %cr4
208 pushq %rax
209 mov %cr3, %rax
210 pushq %rax
211 mov %cr2, %rax
212 pushq %rax
213 xorq %rax, %rax
214 pushq %rax
215 mov %cr0, %rax
216 pushq %rax
217
218 #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
219 movq %dr7, %rax
220 pushq %rax
221 #; clear Dr7 while executing debugger itself
222 xorq %rax, %rax
223 movq %rax, %dr7
224
225 movq %dr6, %rax
226 pushq %rax
227 #; insure all status bits in dr6 are clear...
228 xorq %rax, %rax
229 movq %rax, %dr6
230
231 movq %dr3, %rax
232 pushq %rax
233 movq %dr2, %rax
234 pushq %rax
235 movq %dr1, %rax
236 pushq %rax
237 movq %dr0, %rax
238 pushq %rax
239
240 #; FX_SAVE_STATE_X64 FxSaveState;
241 subq $512, %rsp
242 movq %rsp, %rdi
243 .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]
244
245 #; UINT32 ExceptionData;
246 pushq 16(%rbp)
247
248 #; call into exception handler
249 movq 8(%rbp), %rcx
250 leaq ExternalVectorTablePtr(%rip), %rax
251 movl (%eax), %eax
252 movq (%rax,%rcx,8), %rax
253 orq %rax, %rax # NULL?
254
255 je nonNullValue#
256
257 #; Prepare parameter and call
258 # mov rcx, [rbp + 8]
259 mov %rsp, %rdx
260 #
261 # Per X64 calling convention, allocate maximum parameter stack space
262 # and make sure RSP is 16-byte aligned
263 #
264 subq $40, %rsp
265 call *%rax
266 addq $40, %rsp
267
268 nonNullValue:
269 cli
270 #; UINT64 ExceptionData;
271 addq $8, %rsp
272
273 #; FX_SAVE_STATE_X64 FxSaveState;
274
275 movq %rsp, %rsi
276 .byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]
277 addq $512, %rsp
278
279 #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
280 popq %rax
281 movq %rax, %dr0
282 popq %rax
283 movq %rax, %dr1
284 popq %rax
285 movq %rax, %dr2
286 popq %rax
287 movq %rax, %dr3
288 #; skip restore of dr6. We cleared dr6 during the context save.
289 addq $8, %rsp
290 popq %rax
291 movq %rax, %dr7
292
293 #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
294 popq %rax
295 movq %rax, %cr0
296 addq $8, %rsp # not for Cr1
297 popq %rax
298 movq %rax, %cr2
299 popq %rax
300 movq %rax, %cr3
301 popq %rax
302 movq %rax, %cr4
303 popq %rax
304 movq %rax, %cr8
305
306 #; UINT64 RFlags;
307 popq 40(%rbp)
308
309 #; UINT64 Ldtr, Tr;
310 #; UINT64 Gdtr[2], Idtr[2];
311 #; Best not let anyone mess with these particular registers...
312 addq $48, %rsp
313
314 #; UINT64 Rip;
315 popq 24(%rbp)
316
317 #; UINT64 Gs, Fs, Es, Ds, Cs, Ss;
318 popq %rax
319 # mov %rax, %gs ; not for gs
320 popq %rax
321 # mov %rax, %fs ; not for fs
322 # (X64 will not use fs and gs, so we do not restore it)
323 popq %rax
324 movq %rax, %es
325 popq %rax
326 movq %rax, %ds
327 popq 32(%rbp) # for cs
328 popq 56(%rbp) # for ss
329
330 #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
331 #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
332 popq %rdi
333 popq %rsi
334 addq $8, %rsp # not for rbp
335 popq 48(%rbp) # for rsp
336 popq %rbx
337 popq %rdx
338 popq %rcx
339 popq %rax
340 popq %r8
341 popq %r9
342 popq %r10
343 popq %r11
344 popq %r12
345 popq %r13
346 popq %r14
347 popq %r15
348
349 movq %rbp, %rsp
350 popq %rbp
351 addq $16, %rsp
352 iretq
353
354
355 #text ENDS
356
357 #END
358
359