]>
Commit | Line | Data |
---|---|---|
d94e5f67 | 1 | ;------------------------------------------------------------------------------ ;\r |
2aa107c0 | 2 | ; Copyright (c) 2015 - 2022, Intel Corporation. All rights reserved.<BR>\r |
0acd8697 | 3 | ; SPDX-License-Identifier: BSD-2-Clause-Patent\r |
d94e5f67 JF |
4 | ;\r |
5 | ; Module Name:\r | |
6 | ;\r | |
7 | ; MpFuncs.nasm\r | |
8 | ;\r | |
9 | ; Abstract:\r | |
10 | ;\r | |
11 | ; This is the assembly code for MP support\r | |
12 | ;\r | |
13 | ;-------------------------------------------------------------------------------\r | |
14 | \r | |
15 | %include "MpEqu.inc"\r | |
16 | extern ASM_PFX(InitializeFloatingPointUnits)\r | |
17 | \r | |
e2289d19 BS |
18 | %macro OneTimeCall 1\r |
19 | jmp %1\r | |
20 | %1 %+ OneTimerCallReturn:\r | |
21 | %endmacro\r | |
22 | \r | |
23 | %macro OneTimeCallRet 1\r | |
24 | jmp %1 %+ OneTimerCallReturn\r | |
25 | %endmacro\r | |
26 | \r | |
d94e5f67 JF |
27 | DEFAULT REL\r |
28 | \r | |
29 | SECTION .text\r | |
30 | \r | |
31 | ;-------------------------------------------------------------------------------------\r | |
32 | ;RendezvousFunnelProc procedure follows. All APs execute their procedure. This\r | |
33 | ;procedure serializes all the AP processors through an Init sequence. It must be\r | |
34 | ;noted that APs arrive here very raw...ie: real mode, no stack.\r | |
35 | ;ALSO THIS PROCEDURE IS EXECUTED BY APs ONLY ON 16 BIT MODE. HENCE THIS PROC\r | |
36 | ;IS IN MACHINE CODE.\r | |
37 | ;-------------------------------------------------------------------------------------\r | |
38 | global ASM_PFX(RendezvousFunnelProc)\r | |
39 | ASM_PFX(RendezvousFunnelProc):\r | |
40 | RendezvousFunnelProcStart:\r | |
41 | ; At this point CS = 0x(vv00) and ip= 0x0.\r | |
42 | ; Save BIST information to ebp firstly\r | |
43 | \r | |
44 | BITS 16\r | |
45 | mov ebp, eax ; Save BIST information\r | |
46 | \r | |
47 | mov ax, cs\r | |
48 | mov ds, ax\r | |
49 | mov es, ax\r | |
50 | mov ss, ax\r | |
51 | xor ax, ax\r | |
52 | mov fs, ax\r | |
53 | mov gs, ax\r | |
54 | \r | |
2fba7d4e | 55 | mov si, MP_CPU_EXCHANGE_INFO_FIELD (BufferStart)\r |
d94e5f67 JF |
56 | mov ebx, [si]\r |
57 | \r | |
2fba7d4e | 58 | mov si, MP_CPU_EXCHANGE_INFO_FIELD (DataSegment)\r |
f32bfe6d JW |
59 | mov edx, [si]\r |
60 | \r | |
61 | ;\r | |
62 | ; Get start address of 32-bit code in low memory (<1MB)\r | |
63 | ;\r | |
2fba7d4e | 64 | mov edi, MP_CPU_EXCHANGE_INFO_FIELD (ModeTransitionMemory)\r |
d94e5f67 | 65 | \r |
2fba7d4e | 66 | mov si, MP_CPU_EXCHANGE_INFO_FIELD (GdtrProfile)\r |
d94e5f67 JF |
67 | o32 lgdt [cs:si]\r |
68 | \r | |
2fba7d4e | 69 | mov si, MP_CPU_EXCHANGE_INFO_FIELD (IdtrProfile)\r |
d94e5f67 JF |
70 | o32 lidt [cs:si]\r |
71 | \r | |
f32bfe6d JW |
72 | ;\r |
73 | ; Switch to protected mode\r | |
74 | ;\r | |
75 | mov eax, cr0 ; Get control register 0\r | |
76 | or eax, 000000003h ; Set PE bit (bit #0) & MP\r | |
77 | mov cr0, eax\r | |
78 | \r | |
79 | ; Switch to 32-bit code (>1MB)\r | |
80 | o32 jmp far [cs:di]\r | |
81 | \r | |
82 | ;\r | |
83 | ; Following code must be copied to memory with type of EfiBootServicesCode.\r | |
84 | ; This is required if NX is enabled for EfiBootServicesCode of memory.\r | |
85 | ;\r | |
86 | BITS 32\r | |
87 | Flat32Start: ; protected mode entry point\r | |
88 | mov ds, dx\r | |
89 | mov es, dx\r | |
90 | mov fs, dx\r | |
91 | mov gs, dx\r | |
92 | mov ss, dx\r | |
5c66d125 JF |
93 | \r |
94 | ;\r | |
95 | ; Enable execute disable bit\r | |
96 | ;\r | |
2fba7d4e | 97 | mov esi, MP_CPU_EXCHANGE_INFO_FIELD (EnableExecuteDisable)\r |
f32bfe6d JW |
98 | cmp byte [ebx + esi], 0\r |
99 | jz SkipEnableExecuteDisableBit\r | |
100 | \r | |
5c66d125 JF |
101 | mov ecx, 0c0000080h ; EFER MSR number\r |
102 | rdmsr ; Read EFER\r | |
103 | bts eax, 11 ; Enable Execute Disable Bit\r | |
104 | wrmsr ; Write EFER\r | |
105 | \r | |
106 | SkipEnableExecuteDisableBit:\r | |
f32bfe6d JW |
107 | ;\r |
108 | ; Enable PAE\r | |
109 | ;\r | |
d94e5f67 JF |
110 | mov eax, cr4\r |
111 | bts eax, 5\r | |
09f69a87 | 112 | \r |
2fba7d4e | 113 | mov esi, MP_CPU_EXCHANGE_INFO_FIELD (Enable5LevelPaging)\r |
09f69a87 RN |
114 | cmp byte [ebx + esi], 0\r |
115 | jz SkipEnable5LevelPaging\r | |
116 | \r | |
117 | ;\r | |
118 | ; Enable 5 Level Paging\r | |
119 | ;\r | |
120 | bts eax, 12 ; Set LA57=1.\r | |
121 | \r | |
122 | SkipEnable5LevelPaging:\r | |
123 | \r | |
d94e5f67 JF |
124 | mov cr4, eax\r |
125 | \r | |
f32bfe6d JW |
126 | ;\r |
127 | ; Load page table\r | |
128 | ;\r | |
2fba7d4e | 129 | mov esi, MP_CPU_EXCHANGE_INFO_FIELD (Cr3) ; Save CR3 in ecx\r |
f32bfe6d | 130 | mov ecx, [ebx + esi]\r |
d94e5f67 JF |
131 | mov cr3, ecx ; Load CR3\r |
132 | \r | |
f32bfe6d JW |
133 | ;\r |
134 | ; Enable long mode\r | |
135 | ;\r | |
d94e5f67 JF |
136 | mov ecx, 0c0000080h ; EFER MSR number\r |
137 | rdmsr ; Read EFER\r | |
138 | bts eax, 8 ; Set LME=1\r | |
139 | wrmsr ; Write EFER\r | |
140 | \r | |
f32bfe6d JW |
141 | ;\r |
142 | ; Enable paging\r | |
143 | ;\r | |
d94e5f67 JF |
144 | mov eax, cr0 ; Read CR0\r |
145 | bts eax, 31 ; Set PG=1\r | |
146 | mov cr0, eax ; Write CR0\r | |
147 | \r | |
f32bfe6d JW |
148 | ;\r |
149 | ; Far jump to 64-bit code\r | |
150 | ;\r | |
2fba7d4e | 151 | mov edi, MP_CPU_EXCHANGE_INFO_FIELD (ModeHighMemory)\r |
f32bfe6d JW |
152 | add edi, ebx\r |
153 | jmp far [edi]\r | |
154 | \r | |
d94e5f67 | 155 | BITS 64\r |
e2289d19 BS |
156 | \r |
157 | ;\r | |
158 | ; Required for the AMD SEV helper functions\r | |
159 | ;\r | |
160 | %include "AmdSev.nasm"\r | |
161 | \r | |
d94e5f67 | 162 | LongModeStart:\r |
845c5be1 | 163 | mov esi, ebx\r |
2fba7d4e | 164 | lea edi, [esi + MP_CPU_EXCHANGE_INFO_FIELD (InitFlag)]\r |
845c5be1 JF |
165 | cmp qword [edi], 1 ; ApInitConfig\r |
166 | jnz GetApicId\r | |
167 | \r | |
0594ec41 ED |
168 | ; Increment the number of APs executing here as early as possible\r |
169 | ; This is decremented in C code when AP is finished executing\r | |
170 | mov edi, esi\r | |
2fba7d4e | 171 | add edi, MP_CPU_EXCHANGE_INFO_FIELD (NumApsExecuting)\r |
0594ec41 ED |
172 | lock inc dword [edi]\r |
173 | \r | |
845c5be1 | 174 | ; AP init\r |
62f2cf57 | 175 | mov edi, esi\r |
2fba7d4e | 176 | add edi, MP_CPU_EXCHANGE_INFO_FIELD (ApIndex)\r |
62f2cf57 RN |
177 | mov ebx, 1\r |
178 | lock xadd dword [edi], ebx ; EBX = ApIndex++\r | |
179 | inc ebx ; EBX is CpuNumber\r | |
d94e5f67 | 180 | \r |
845c5be1 | 181 | ; program stack\r |
d94e5f67 | 182 | mov edi, esi\r |
2fba7d4e | 183 | add edi, MP_CPU_EXCHANGE_INFO_FIELD (StackSize)\r |
845c5be1 JF |
184 | mov eax, dword [edi]\r |
185 | mov ecx, ebx\r | |
186 | inc ecx\r | |
187 | mul ecx ; EAX = StackSize * (CpuNumber + 1)\r | |
d94e5f67 | 188 | mov edi, esi\r |
2fba7d4e | 189 | add edi, MP_CPU_EXCHANGE_INFO_FIELD (StackStart)\r |
d94e5f67 JF |
190 | add rax, qword [edi]\r |
191 | mov rsp, rax\r | |
7b7508ad | 192 | \r |
7b7508ad | 193 | ;\r |
e2289d19 | 194 | ; Setup the GHCB when AMD SEV-ES active.\r |
7b7508ad | 195 | ;\r |
e2289d19 | 196 | OneTimeCall SevEsSetupGhcb\r |
845c5be1 JF |
197 | jmp CProcedureInvoke\r |
198 | \r | |
199 | GetApicId:\r | |
7b7508ad | 200 | ;\r |
e2289d19 | 201 | ; Use the GHCB protocol to get the ApicId when SEV-ES is active.\r |
7b7508ad | 202 | ;\r |
e2289d19 | 203 | OneTimeCall SevEsGetApicId\r |
7b7508ad TL |
204 | \r |
205 | DoCpuid:\r | |
845c5be1 JF |
206 | mov eax, 0\r |
207 | cpuid\r | |
208 | cmp eax, 0bh\r | |
1cbd8330 LE |
209 | jb NoX2Apic ; CPUID level below CPUID_EXTENDED_TOPOLOGY\r |
210 | \r | |
211 | mov eax, 0bh\r | |
212 | xor ecx, ecx\r | |
213 | cpuid\r | |
214 | test ebx, 0ffffh\r | |
215 | jz NoX2Apic ; CPUID.0BH:EBX[15:0] is zero\r | |
216 | \r | |
217 | ; Processor is x2APIC capable; 32-bit x2APIC ID is already in EDX\r | |
218 | jmp GetProcessorNumber\r | |
219 | \r | |
220 | NoX2Apic:\r | |
845c5be1 JF |
221 | ; Processor is not x2APIC capable, so get 8-bit APIC ID\r |
222 | mov eax, 1\r | |
223 | cpuid\r | |
224 | shr ebx, 24\r | |
225 | mov edx, ebx\r | |
845c5be1 | 226 | \r |
845c5be1 JF |
227 | GetProcessorNumber:\r |
228 | ;\r | |
229 | ; Get processor number for this AP\r | |
230 | ; Note that BSP may become an AP due to SwitchBsp()\r | |
231 | ;\r | |
232 | xor ebx, ebx\r | |
2fba7d4e | 233 | lea eax, [esi + MP_CPU_EXCHANGE_INFO_FIELD (CpuInfo)]\r |
edd74ad3 | 234 | mov rdi, [eax]\r |
d94e5f67 | 235 | \r |
845c5be1 | 236 | GetNextProcNumber:\r |
2fba7d4e | 237 | cmp dword [rdi + CPU_INFO_IN_HOB.InitialApicId], edx ; APIC ID match?\r |
845c5be1 | 238 | jz ProgramStack\r |
2fba7d4e | 239 | add rdi, CPU_INFO_IN_HOB_size\r |
845c5be1 | 240 | inc ebx\r |
7367cc6c | 241 | jmp GetNextProcNumber\r |
845c5be1 JF |
242 | \r |
243 | ProgramStack:\r | |
2fba7d4e | 244 | mov rsp, qword [rdi + CPU_INFO_IN_HOB.ApTopOfStack]\r |
d94e5f67 JF |
245 | \r |
246 | CProcedureInvoke:\r | |
8396e2dd JF |
247 | push rbp ; Push BIST data at top of AP stack\r |
248 | xor rbp, rbp ; Clear ebp for call stack trace\r | |
d94e5f67 JF |
249 | push rbp\r |
250 | mov rbp, rsp\r | |
251 | \r | |
2fba7d4e | 252 | mov rax, qword [esi + MP_CPU_EXCHANGE_INFO_FIELD (InitializeFloatingPointUnits)]\r |
d94e5f67 JF |
253 | sub rsp, 20h\r |
254 | call rax ; Call assembly function to initialize FPU per UEFI spec\r | |
255 | add rsp, 20h\r | |
256 | \r | |
37676b9f | 257 | mov edx, ebx ; edx is ApIndex\r |
d94e5f67 | 258 | mov ecx, esi\r |
2fba7d4e | 259 | add ecx, MP_CPU_EXCHANGE_INFO_OFFSET ; rcx is address of exchange info data buffer\r |
d94e5f67 JF |
260 | \r |
261 | mov edi, esi\r | |
2fba7d4e | 262 | add edi, MP_CPU_EXCHANGE_INFO_FIELD (CFunction)\r |
d94e5f67 JF |
263 | mov rax, qword [edi]\r |
264 | \r | |
265 | sub rsp, 20h\r | |
8396e2dd | 266 | call rax ; Invoke C function\r |
d94e5f67 | 267 | add rsp, 20h\r |
8396e2dd | 268 | jmp $ ; Should never reach here\r |
d94e5f67 JF |
269 | \r |
270 | RendezvousFunnelProcEnd:\r | |
271 | \r | |
7b7508ad TL |
272 | ;-------------------------------------------------------------------------------------\r |
273 | ;SwitchToRealProc procedure follows.\r | |
274 | ;ALSO THIS PROCEDURE IS EXECUTED BY APs TRANSITIONING TO 16 BIT MODE. HENCE THIS PROC\r | |
275 | ;IS IN MACHINE CODE.\r | |
276 | ; SwitchToRealProc (UINTN BufferStart, UINT16 Code16, UINT16 Code32, UINTN StackStart)\r | |
277 | ; rcx - Buffer Start\r | |
278 | ; rdx - Code16 Selector Offset\r | |
279 | ; r8 - Code32 Selector Offset\r | |
280 | ; r9 - Stack Start\r | |
281 | ;-------------------------------------------------------------------------------------\r | |
282 | global ASM_PFX(SwitchToRealProc)\r | |
283 | ASM_PFX(SwitchToRealProc):\r | |
284 | SwitchToRealProcStart:\r | |
285 | BITS 64\r | |
286 | cli\r | |
287 | \r | |
288 | ;\r | |
289 | ; Get RDX reset value before changing stacks since the\r | |
290 | ; new stack won't be able to accomodate a #VC exception.\r | |
291 | ;\r | |
292 | push rax\r | |
293 | push rbx\r | |
294 | push rcx\r | |
295 | push rdx\r | |
296 | \r | |
297 | mov rax, 1\r | |
298 | cpuid\r | |
299 | mov rsi, rax ; Save off the reset value for RDX\r | |
300 | \r | |
301 | pop rdx\r | |
302 | pop rcx\r | |
303 | pop rbx\r | |
304 | pop rax\r | |
305 | \r | |
306 | ;\r | |
307 | ; Establish stack below 1MB\r | |
308 | ;\r | |
309 | mov rsp, r9\r | |
310 | \r | |
311 | ;\r | |
312 | ; Push ultimate Reset Vector onto the stack\r | |
313 | ;\r | |
314 | mov rax, rcx\r | |
315 | shr rax, 4\r | |
316 | push word 0x0002 ; RFLAGS\r | |
317 | push ax ; CS\r | |
318 | push word 0x0000 ; RIP\r | |
319 | push word 0x0000 ; For alignment, will be discarded\r | |
320 | \r | |
321 | ;\r | |
322 | ; Get address of "16-bit operand size" label\r | |
323 | ;\r | |
324 | lea rbx, [PM16Mode]\r | |
325 | \r | |
326 | ;\r | |
327 | ; Push addresses used to change to compatibility mode\r | |
328 | ;\r | |
329 | lea rax, [CompatMode]\r | |
330 | push r8\r | |
331 | push rax\r | |
332 | \r | |
333 | ;\r | |
334 | ; Clear R8 - R15, for reset, before going into 32-bit mode\r | |
335 | ;\r | |
336 | xor r8, r8\r | |
337 | xor r9, r9\r | |
338 | xor r10, r10\r | |
339 | xor r11, r11\r | |
340 | xor r12, r12\r | |
341 | xor r13, r13\r | |
342 | xor r14, r14\r | |
343 | xor r15, r15\r | |
344 | \r | |
345 | ;\r | |
346 | ; Far return into 32-bit mode\r | |
347 | ;\r | |
2aa107c0 | 348 | retfq\r |
7b7508ad TL |
349 | \r |
350 | BITS 32\r | |
351 | CompatMode:\r | |
352 | ;\r | |
353 | ; Set up stack to prepare for exiting protected mode\r | |
354 | ;\r | |
355 | push edx ; Code16 CS\r | |
356 | push ebx ; PM16Mode label address\r | |
357 | \r | |
358 | ;\r | |
359 | ; Disable paging\r | |
360 | ;\r | |
361 | mov eax, cr0 ; Read CR0\r | |
362 | btr eax, 31 ; Set PG=0\r | |
363 | mov cr0, eax ; Write CR0\r | |
364 | \r | |
365 | ;\r | |
366 | ; Disable long mode\r | |
367 | ;\r | |
368 | mov ecx, 0c0000080h ; EFER MSR number\r | |
369 | rdmsr ; Read EFER\r | |
370 | btr eax, 8 ; Set LME=0\r | |
371 | wrmsr ; Write EFER\r | |
372 | \r | |
373 | ;\r | |
374 | ; Disable PAE\r | |
375 | ;\r | |
376 | mov eax, cr4 ; Read CR4\r | |
377 | btr eax, 5 ; Set PAE=0\r | |
378 | mov cr4, eax ; Write CR4\r | |
379 | \r | |
380 | mov edx, esi ; Restore RDX reset value\r | |
381 | \r | |
382 | ;\r | |
383 | ; Switch to 16-bit operand size\r | |
384 | ;\r | |
385 | retf\r | |
386 | \r | |
387 | BITS 16\r | |
388 | ;\r | |
389 | ; At entry to this label\r | |
390 | ; - RDX will have its reset value\r | |
391 | ; - On the top of the stack\r | |
392 | ; - Alignment data (two bytes) to be discarded\r | |
393 | ; - IP for Real Mode (two bytes)\r | |
394 | ; - CS for Real Mode (two bytes)\r | |
395 | ;\r | |
20da7ca4 TL |
396 | ; This label is also used with AsmRelocateApLoop. During MP finalization,\r |
397 | ; the code from PM16Mode to SwitchToRealProcEnd is copied to the start of\r | |
398 | ; the WakeupBuffer, allowing a parked AP to be booted by an OS.\r | |
399 | ;\r | |
7b7508ad TL |
400 | PM16Mode:\r |
401 | mov eax, cr0 ; Read CR0\r | |
402 | btr eax, 0 ; Set PE=0\r | |
403 | mov cr0, eax ; Write CR0\r | |
404 | \r | |
405 | pop ax ; Discard alignment data\r | |
406 | \r | |
407 | ;\r | |
408 | ; Clear registers (except RDX and RSP) before going into 16-bit mode\r | |
409 | ;\r | |
410 | xor eax, eax\r | |
411 | xor ebx, ebx\r | |
412 | xor ecx, ecx\r | |
413 | xor esi, esi\r | |
414 | xor edi, edi\r | |
415 | xor ebp, ebp\r | |
416 | \r | |
417 | iret\r | |
418 | \r | |
419 | SwitchToRealProcEnd:\r | |
420 | \r | |
76157021 | 421 | ;-------------------------------------------------------------------------------------\r |
20da7ca4 | 422 | ; AsmRelocateApLoop (MwaitSupport, ApTargetCState, PmCodeSegment, TopOfApStack, CountTofinish, Pm16CodeSegment, SevEsAPJumpTable, WakeupBuffer);\r |
76157021 JF |
423 | ;-------------------------------------------------------------------------------------\r |
424 | global ASM_PFX(AsmRelocateApLoop)\r | |
425 | ASM_PFX(AsmRelocateApLoop):\r | |
426 | AsmRelocateApLoopStart:\r | |
7b7508ad | 427 | BITS 64\r |
20da7ca4 TL |
428 | cmp qword [rsp + 56], 0 ; SevEsAPJumpTable\r |
429 | je NoSevEs\r | |
430 | \r | |
431 | ;\r | |
432 | ; Perform some SEV-ES related setup before leaving 64-bit mode\r | |
433 | ;\r | |
434 | push rcx\r | |
435 | push rdx\r | |
436 | \r | |
437 | ;\r | |
438 | ; Get the RDX reset value using CPUID\r | |
439 | ;\r | |
440 | mov rax, 1\r | |
441 | cpuid\r | |
442 | mov rsi, rax ; Save off the reset value for RDX\r | |
443 | \r | |
444 | ;\r | |
445 | ; Prepare the GHCB for the AP_HLT_LOOP VMGEXIT call\r | |
446 | ; - Must be done while in 64-bit long mode so that writes to\r | |
447 | ; the GHCB memory will be unencrypted.\r | |
448 | ; - No NAE events can be generated once this is set otherwise\r | |
449 | ; the AP_RESET_HOLD SW_EXITCODE will be overwritten.\r | |
450 | ;\r | |
451 | mov rcx, 0xc0010130\r | |
452 | rdmsr ; Retrieve current GHCB address\r | |
453 | shl rdx, 32\r | |
454 | or rdx, rax\r | |
455 | \r | |
456 | mov rdi, rdx\r | |
457 | xor rax, rax\r | |
458 | mov rcx, 0x800\r | |
459 | shr rcx, 3\r | |
460 | rep stosq ; Clear the GHCB\r | |
461 | \r | |
462 | mov rax, 0x80000004 ; VMGEXIT AP_RESET_HOLD\r | |
463 | mov [rdx + 0x390], rax\r | |
fb2a1a36 TL |
464 | mov rax, 114 ; Set SwExitCode valid bit\r |
465 | bts [rdx + 0x3f0], rax\r | |
466 | inc rax ; Set SwExitInfo1 valid bit\r | |
467 | bts [rdx + 0x3f0], rax\r | |
468 | inc rax ; Set SwExitInfo2 valid bit\r | |
469 | bts [rdx + 0x3f0], rax\r | |
20da7ca4 TL |
470 | \r |
471 | pop rdx\r | |
472 | pop rcx\r | |
473 | \r | |
474 | NoSevEs:\r | |
a7bbe9d2 | 475 | cli ; Disable interrupt before switching to 32-bit mode\r |
9f91cb01 JF |
476 | mov rax, [rsp + 40] ; CountTofinish\r |
477 | lock dec dword [rax] ; (*CountTofinish)--\r | |
76157021 | 478 | \r |
20da7ca4 TL |
479 | mov r10, [rsp + 48] ; Pm16CodeSegment\r |
480 | mov rax, [rsp + 56] ; SevEsAPJumpTable\r | |
481 | mov rbx, [rsp + 64] ; WakeupBuffer\r | |
482 | mov rsp, r9 ; TopOfApStack\r | |
483 | \r | |
484 | push rax ; Save SevEsAPJumpTable\r | |
485 | push rbx ; Save WakeupBuffer\r | |
486 | push r10 ; Save Pm16CodeSegment\r | |
487 | push rcx ; Save MwaitSupport\r | |
488 | push rdx ; Save ApTargetCState\r | |
489 | \r | |
490 | lea rax, [PmEntry] ; rax <- The start address of transition code\r | |
76157021 JF |
491 | \r |
492 | push r8\r | |
20da7ca4 TL |
493 | push rax\r |
494 | \r | |
495 | ;\r | |
496 | ; Clear R8 - R15, for reset, before going into 32-bit mode\r | |
497 | ;\r | |
498 | xor r8, r8\r | |
499 | xor r9, r9\r | |
500 | xor r10, r10\r | |
501 | xor r11, r11\r | |
502 | xor r12, r12\r | |
503 | xor r13, r13\r | |
504 | xor r14, r14\r | |
505 | xor r15, r15\r | |
506 | \r | |
507 | ;\r | |
508 | ; Far return into 32-bit mode\r | |
509 | ;\r | |
2aa107c0 | 510 | retfq\r |
20da7ca4 | 511 | \r |
76157021 JF |
512 | BITS 32\r |
513 | PmEntry:\r | |
514 | mov eax, cr0\r | |
515 | btr eax, 31 ; Clear CR0.PG\r | |
516 | mov cr0, eax ; Disable paging and caches\r | |
517 | \r | |
76157021 JF |
518 | mov ecx, 0xc0000080\r |
519 | rdmsr\r | |
520 | and ah, ~ 1 ; Clear LME\r | |
521 | wrmsr\r | |
522 | mov eax, cr4\r | |
523 | and al, ~ (1 << 5) ; Clear PAE\r | |
524 | mov cr4, eax\r | |
525 | \r | |
526 | pop edx\r | |
527 | add esp, 4\r | |
528 | pop ecx,\r | |
529 | add esp, 4\r | |
20da7ca4 TL |
530 | \r |
531 | MwaitCheck:\r | |
76157021 JF |
532 | cmp cl, 1 ; Check mwait-monitor support\r |
533 | jnz HltLoop\r | |
534 | mov ebx, edx ; Save C-State to ebx\r | |
535 | MwaitLoop:\r | |
a7bbe9d2 | 536 | cli\r |
76157021 JF |
537 | mov eax, esp ; Set Monitor Address\r |
538 | xor ecx, ecx ; ecx = 0\r | |
539 | xor edx, edx ; edx = 0\r | |
540 | monitor\r | |
76157021 | 541 | mov eax, ebx ; Mwait Cx, Target C-State per eax[7:4]\r |
f56379f3 | 542 | shl eax, 4\r |
76157021 JF |
543 | mwait\r |
544 | jmp MwaitLoop\r | |
20da7ca4 | 545 | \r |
76157021 | 546 | HltLoop:\r |
20da7ca4 TL |
547 | pop edx ; PM16CodeSegment\r |
548 | add esp, 4\r | |
549 | pop ebx ; WakeupBuffer\r | |
550 | add esp, 4\r | |
551 | pop eax ; SevEsAPJumpTable\r | |
552 | add esp, 4\r | |
553 | cmp eax, 0 ; Check for SEV-ES\r | |
554 | je DoHlt\r | |
555 | \r | |
556 | cli\r | |
557 | ;\r | |
558 | ; SEV-ES is enabled, use VMGEXIT (GHCB information already\r | |
559 | ; set by caller)\r | |
560 | ;\r | |
561 | BITS 64\r | |
562 | rep vmmcall\r | |
563 | BITS 32\r | |
564 | \r | |
565 | ;\r | |
566 | ; Back from VMGEXIT AP_HLT_LOOP\r | |
567 | ; Push the FLAGS/CS/IP values to use\r | |
568 | ;\r | |
569 | push word 0x0002 ; EFLAGS\r | |
570 | xor ecx, ecx\r | |
571 | mov cx, [eax + 2] ; CS\r | |
572 | push cx\r | |
573 | mov cx, [eax] ; IP\r | |
574 | push cx\r | |
575 | push word 0x0000 ; For alignment, will be discarded\r | |
576 | \r | |
577 | push edx\r | |
578 | push ebx\r | |
579 | \r | |
580 | mov edx, esi ; Restore RDX reset value\r | |
581 | \r | |
582 | retf\r | |
583 | \r | |
584 | DoHlt:\r | |
76157021 JF |
585 | cli\r |
586 | hlt\r | |
20da7ca4 TL |
587 | jmp DoHlt\r |
588 | \r | |
76157021 JF |
589 | BITS 64\r |
590 | AsmRelocateApLoopEnd:\r | |
591 | \r | |
d94e5f67 JF |
592 | ;-------------------------------------------------------------------------------------\r |
593 | ; AsmGetAddressMap (&AddressMap);\r | |
594 | ;-------------------------------------------------------------------------------------\r | |
595 | global ASM_PFX(AsmGetAddressMap)\r | |
596 | ASM_PFX(AsmGetAddressMap):\r | |
3b2928b4 | 597 | lea rax, [ASM_PFX(RendezvousFunnelProc)]\r |
2fba7d4e RN |
598 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RendezvousFunnelAddress], rax\r |
599 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.ModeEntryOffset], LongModeStart - RendezvousFunnelProcStart\r | |
600 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RendezvousFunnelSize], RendezvousFunnelProcEnd - RendezvousFunnelProcStart\r | |
3b2928b4 | 601 | lea rax, [ASM_PFX(AsmRelocateApLoop)]\r |
2fba7d4e RN |
602 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RelocateApLoopFuncAddress], rax\r |
603 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RelocateApLoopFuncSize], AsmRelocateApLoopEnd - AsmRelocateApLoopStart\r | |
604 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.ModeTransitionOffset], Flat32Start - RendezvousFunnelProcStart\r | |
605 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealSize], SwitchToRealProcEnd - SwitchToRealProcStart\r | |
606 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealOffset], SwitchToRealProcStart - RendezvousFunnelProcStart\r | |
607 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealNoNxOffset], SwitchToRealProcStart - Flat32Start\r | |
608 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealPM16ModeOffset], PM16Mode - RendezvousFunnelProcStart\r | |
609 | mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealPM16ModeSize], SwitchToRealProcEnd - PM16Mode\r | |
d94e5f67 JF |
610 | ret\r |
611 | \r | |
612 | ;-------------------------------------------------------------------------------------\r | |
613 | ;AsmExchangeRole procedure follows. This procedure executed by current BSP, that is\r | |
8396e2dd | 614 | ;about to become an AP. It switches its stack with the current AP.\r |
d94e5f67 JF |
615 | ;AsmExchangeRole (IN CPU_EXCHANGE_INFO *MyInfo, IN CPU_EXCHANGE_INFO *OthersInfo);\r |
616 | ;-------------------------------------------------------------------------------------\r | |
617 | global ASM_PFX(AsmExchangeRole)\r | |
618 | ASM_PFX(AsmExchangeRole):\r | |
619 | ; DO NOT call other functions in this function, since 2 CPU may use 1 stack\r | |
620 | ; at the same time. If 1 CPU try to call a function, stack will be corrupted.\r | |
621 | \r | |
622 | push rax\r | |
623 | push rbx\r | |
624 | push rcx\r | |
625 | push rdx\r | |
626 | push rsi\r | |
627 | push rdi\r | |
628 | push rbp\r | |
629 | push r8\r | |
630 | push r9\r | |
631 | push r10\r | |
632 | push r11\r | |
633 | push r12\r | |
634 | push r13\r | |
635 | push r14\r | |
636 | push r15\r | |
637 | \r | |
638 | mov rax, cr0\r | |
639 | push rax\r | |
640 | \r | |
641 | mov rax, cr4\r | |
642 | push rax\r | |
643 | \r | |
644 | ; rsi contains MyInfo pointer\r | |
645 | mov rsi, rcx\r | |
646 | \r | |
647 | ; rdi contains OthersInfo pointer\r | |
648 | mov rdi, rdx\r | |
649 | \r | |
650 | ;Store EFLAGS, GDTR and IDTR regiter to stack\r | |
651 | pushfq\r | |
2fba7d4e RN |
652 | sgdt [rsi + CPU_EXCHANGE_ROLE_INFO.Gdtr]\r |
653 | sidt [rsi + CPU_EXCHANGE_ROLE_INFO.Idtr]\r | |
d94e5f67 JF |
654 | \r |
655 | ; Store the its StackPointer\r | |
2fba7d4e | 656 | mov [rsi + CPU_EXCHANGE_ROLE_INFO.StackPointer], rsp\r |
d94e5f67 JF |
657 | \r |
658 | ; update its switch state to STORED\r | |
2fba7d4e | 659 | mov byte [rsi + CPU_EXCHANGE_ROLE_INFO.State], CPU_SWITCH_STATE_STORED\r |
d94e5f67 JF |
660 | \r |
661 | WaitForOtherStored:\r | |
662 | ; wait until the other CPU finish storing its state\r | |
2fba7d4e | 663 | cmp byte [rdi + CPU_EXCHANGE_ROLE_INFO.State], CPU_SWITCH_STATE_STORED\r |
d94e5f67 JF |
664 | jz OtherStored\r |
665 | pause\r | |
666 | jmp WaitForOtherStored\r | |
667 | \r | |
668 | OtherStored:\r | |
669 | ; Since another CPU already stored its state, load them\r | |
670 | ; load GDTR value\r | |
2fba7d4e | 671 | lgdt [rdi + CPU_EXCHANGE_ROLE_INFO.Gdtr]\r |
d94e5f67 JF |
672 | \r |
673 | ; load IDTR value\r | |
2fba7d4e | 674 | lidt [rdi + CPU_EXCHANGE_ROLE_INFO.Idtr]\r |
d94e5f67 JF |
675 | \r |
676 | ; load its future StackPointer\r | |
2fba7d4e | 677 | mov rsp, [rdi + CPU_EXCHANGE_ROLE_INFO.StackPointer]\r |
d94e5f67 JF |
678 | \r |
679 | ; update the other CPU's switch state to LOADED\r | |
2fba7d4e | 680 | mov byte [rdi + CPU_EXCHANGE_ROLE_INFO.State], CPU_SWITCH_STATE_LOADED\r |
d94e5f67 JF |
681 | \r |
682 | WaitForOtherLoaded:\r | |
683 | ; wait until the other CPU finish loading new state,\r | |
684 | ; otherwise the data in stack may corrupt\r | |
2fba7d4e | 685 | cmp byte [rsi + CPU_EXCHANGE_ROLE_INFO.State], CPU_SWITCH_STATE_LOADED\r |
d94e5f67 JF |
686 | jz OtherLoaded\r |
687 | pause\r | |
688 | jmp WaitForOtherLoaded\r | |
689 | \r | |
690 | OtherLoaded:\r | |
691 | ; since the other CPU already get the data it want, leave this procedure\r | |
692 | popfq\r | |
693 | \r | |
694 | pop rax\r | |
695 | mov cr4, rax\r | |
696 | \r | |
697 | pop rax\r | |
698 | mov cr0, rax\r | |
699 | \r | |
700 | pop r15\r | |
701 | pop r14\r | |
702 | pop r13\r | |
703 | pop r12\r | |
704 | pop r11\r | |
705 | pop r10\r | |
706 | pop r9\r | |
707 | pop r8\r | |
708 | pop rbp\r | |
709 | pop rdi\r | |
710 | pop rsi\r | |
711 | pop rdx\r | |
712 | pop rcx\r | |
713 | pop rbx\r | |
714 | pop rax\r | |
715 | \r | |
716 | ret\r |