iret\r
\r
SwitchToRealProcEnd:\r
+\r
+;-------------------------------------------------------------------------------------\r
+; AsmRelocateApLoopAmdSev (MwaitSupport, ApTargetCState, PmCodeSegment, TopOfApStack, CountTofinish, Pm16CodeSegment, SevEsAPJumpTable, WakeupBuffer);\r
+;-------------------------------------------------------------------------------------\r
+\r
+AsmRelocateApLoopAmdSevStart:\r
+BITS 64\r
+ cmp qword [rsp + 56], 0 ; SevEsAPJumpTable\r
+ je NoSevEsAmdSev\r
+\r
+ ;\r
+ ; Perform some SEV-ES related setup before leaving 64-bit mode\r
+ ;\r
+ push rcx\r
+ push rdx\r
+\r
+ ;\r
+ ; Get the RDX reset value using CPUID\r
+ ;\r
+ mov rax, 1\r
+ cpuid\r
+ mov rsi, rax ; Save off the reset value for RDX\r
+\r
+ ;\r
+ ; Prepare the GHCB for the AP_HLT_LOOP VMGEXIT call\r
+ ; - Must be done while in 64-bit long mode so that writes to\r
+ ; the GHCB memory will be unencrypted.\r
+ ; - No NAE events can be generated once this is set otherwise\r
+ ; the AP_RESET_HOLD SW_EXITCODE will be overwritten.\r
+ ;\r
+ mov rcx, 0xc0010130\r
+ rdmsr ; Retrieve current GHCB address\r
+ shl rdx, 32\r
+ or rdx, rax\r
+\r
+ mov rdi, rdx\r
+ xor rax, rax\r
+ mov rcx, 0x800\r
+ shr rcx, 3\r
+ rep stosq ; Clear the GHCB\r
+\r
+ mov rax, 0x80000004 ; VMGEXIT AP_RESET_HOLD\r
+ mov [rdx + 0x390], rax\r
+ mov rax, 114 ; Set SwExitCode valid bit\r
+ bts [rdx + 0x3f0], rax\r
+ inc rax ; Set SwExitInfo1 valid bit\r
+ bts [rdx + 0x3f0], rax\r
+ inc rax ; Set SwExitInfo2 valid bit\r
+ bts [rdx + 0x3f0], rax\r
+\r
+ pop rdx\r
+ pop rcx\r
+\r
+NoSevEsAmdSev:\r
+ cli ; Disable interrupt before switching to 32-bit mode\r
+ mov rax, [rsp + 40] ; CountTofinish\r
+ lock dec dword [rax] ; (*CountTofinish)--\r
+\r
+ mov r10, [rsp + 48] ; Pm16CodeSegment\r
+ mov rax, [rsp + 56] ; SevEsAPJumpTable\r
+ mov rbx, [rsp + 64] ; WakeupBuffer\r
+ mov rsp, r9 ; TopOfApStack\r
+\r
+ push rax ; Save SevEsAPJumpTable\r
+ push rbx ; Save WakeupBuffer\r
+ push r10 ; Save Pm16CodeSegment\r
+ push rcx ; Save MwaitSupport\r
+ push rdx ; Save ApTargetCState\r
+\r
+ lea rax, [PmEntryAmdSev] ; rax <- The start address of transition code\r
+\r
+ push r8\r
+ push rax\r
+\r
+ ;\r
+ ; Clear R8 - R15, for reset, before going into 32-bit mode\r
+ ;\r
+ xor r8, r8\r
+ xor r9, r9\r
+ xor r10, r10\r
+ xor r11, r11\r
+ xor r12, r12\r
+ xor r13, r13\r
+ xor r14, r14\r
+ xor r15, r15\r
+\r
+ ;\r
+ ; Far return into 32-bit mode\r
+ ;\r
+o64 retf\r
+\r
+BITS 32\r
+PmEntryAmdSev:\r
+ mov eax, cr0\r
+ btr eax, 31 ; Clear CR0.PG\r
+ mov cr0, eax ; Disable paging and caches\r
+\r
+ mov ecx, 0xc0000080\r
+ rdmsr\r
+ and ah, ~ 1 ; Clear LME\r
+ wrmsr\r
+ mov eax, cr4\r
+ and al, ~ (1 << 5) ; Clear PAE\r
+ mov cr4, eax\r
+\r
+ pop edx\r
+ add esp, 4\r
+ pop ecx,\r
+ add esp, 4\r
+\r
+MwaitCheckAmdSev:\r
+ cmp cl, 1 ; Check mwait-monitor support\r
+ jnz HltLoopAmdSev\r
+ mov ebx, edx ; Save C-State to ebx\r
+MwaitLoopAmdSev:\r
+ cli\r
+ mov eax, esp ; Set Monitor Address\r
+ xor ecx, ecx ; ecx = 0\r
+ xor edx, edx ; edx = 0\r
+ monitor\r
+ mov eax, ebx ; Mwait Cx, Target C-State per eax[7:4]\r
+ shl eax, 4\r
+ mwait\r
+ jmp MwaitLoopAmdSev\r
+\r
+HltLoopAmdSev:\r
+ pop edx ; PM16CodeSegment\r
+ add esp, 4\r
+ pop ebx ; WakeupBuffer\r
+ add esp, 4\r
+ pop eax ; SevEsAPJumpTable\r
+ add esp, 4\r
+ cmp eax, 0 ; Check for SEV-ES\r
+ je DoHltAmdSev\r
+\r
+ cli\r
+ ;\r
+ ; SEV-ES is enabled, use VMGEXIT (GHCB information already\r
+ ; set by caller)\r
+ ;\r
+BITS 64\r
+ rep vmmcall\r
+BITS 32\r
+\r
+ ;\r
+ ; Back from VMGEXIT AP_HLT_LOOP\r
+ ; Push the FLAGS/CS/IP values to use\r
+ ;\r
+ push word 0x0002 ; EFLAGS\r
+ xor ecx, ecx\r
+ mov cx, [eax + 2] ; CS\r
+ push cx\r
+ mov cx, [eax] ; IP\r
+ push cx\r
+ push word 0x0000 ; For alignment, will be discarded\r
+\r
+ push edx\r
+ push ebx\r
+\r
+ mov edx, esi ; Restore RDX reset value\r
+\r
+ retf\r
+\r
+DoHltAmdSev:\r
+ cli\r
+ hlt\r
+ jmp DoHltAmdSev\r
+\r
+BITS 64\r
+AsmRelocateApLoopAmdSevEnd:\r
%include "AmdSev.nasm"\r
\r
RendezvousFunnelProcEnd:\r
-;-------------------------------------------------------------------------------------\r
-; AsmRelocateApLoopAmdSev (MwaitSupport, ApTargetCState, PmCodeSegment, TopOfApStack, CountTofinish, Pm16CodeSegment, SevEsAPJumpTable, WakeupBuffer);\r
-;-------------------------------------------------------------------------------------\r
-\r
-AsmRelocateApLoopAmdSevStart:\r
-BITS 64\r
- cmp qword [rsp + 56], 0 ; SevEsAPJumpTable\r
- je NoSevEsAmdSev\r
-\r
- ;\r
- ; Perform some SEV-ES related setup before leaving 64-bit mode\r
- ;\r
- push rcx\r
- push rdx\r
-\r
- ;\r
- ; Get the RDX reset value using CPUID\r
- ;\r
- mov rax, 1\r
- cpuid\r
- mov rsi, rax ; Save off the reset value for RDX\r
-\r
- ;\r
- ; Prepare the GHCB for the AP_HLT_LOOP VMGEXIT call\r
- ; - Must be done while in 64-bit long mode so that writes to\r
- ; the GHCB memory will be unencrypted.\r
- ; - No NAE events can be generated once this is set otherwise\r
- ; the AP_RESET_HOLD SW_EXITCODE will be overwritten.\r
- ;\r
- mov rcx, 0xc0010130\r
- rdmsr ; Retrieve current GHCB address\r
- shl rdx, 32\r
- or rdx, rax\r
-\r
- mov rdi, rdx\r
- xor rax, rax\r
- mov rcx, 0x800\r
- shr rcx, 3\r
- rep stosq ; Clear the GHCB\r
-\r
- mov rax, 0x80000004 ; VMGEXIT AP_RESET_HOLD\r
- mov [rdx + 0x390], rax\r
- mov rax, 114 ; Set SwExitCode valid bit\r
- bts [rdx + 0x3f0], rax\r
- inc rax ; Set SwExitInfo1 valid bit\r
- bts [rdx + 0x3f0], rax\r
- inc rax ; Set SwExitInfo2 valid bit\r
- bts [rdx + 0x3f0], rax\r
-\r
- pop rdx\r
- pop rcx\r
-\r
-NoSevEsAmdSev:\r
- cli ; Disable interrupt before switching to 32-bit mode\r
- mov rax, [rsp + 40] ; CountTofinish\r
- lock dec dword [rax] ; (*CountTofinish)--\r
-\r
- mov r10, [rsp + 48] ; Pm16CodeSegment\r
- mov rax, [rsp + 56] ; SevEsAPJumpTable\r
- mov rbx, [rsp + 64] ; WakeupBuffer\r
- mov rsp, r9 ; TopOfApStack\r
-\r
- push rax ; Save SevEsAPJumpTable\r
- push rbx ; Save WakeupBuffer\r
- push r10 ; Save Pm16CodeSegment\r
- push rcx ; Save MwaitSupport\r
- push rdx ; Save ApTargetCState\r
-\r
- lea rax, [PmEntryAmdSev] ; rax <- The start address of transition code\r
-\r
- push r8\r
- push rax\r
-\r
- ;\r
- ; Clear R8 - R15, for reset, before going into 32-bit mode\r
- ;\r
- xor r8, r8\r
- xor r9, r9\r
- xor r10, r10\r
- xor r11, r11\r
- xor r12, r12\r
- xor r13, r13\r
- xor r14, r14\r
- xor r15, r15\r
-\r
- ;\r
- ; Far return into 32-bit mode\r
- ;\r
-o64 retf\r
-\r
-BITS 32\r
-PmEntryAmdSev:\r
- mov eax, cr0\r
- btr eax, 31 ; Clear CR0.PG\r
- mov cr0, eax ; Disable paging and caches\r
-\r
- mov ecx, 0xc0000080\r
- rdmsr\r
- and ah, ~ 1 ; Clear LME\r
- wrmsr\r
- mov eax, cr4\r
- and al, ~ (1 << 5) ; Clear PAE\r
- mov cr4, eax\r
-\r
- pop edx\r
- add esp, 4\r
- pop ecx,\r
- add esp, 4\r
-\r
-MwaitCheckAmdSev:\r
- cmp cl, 1 ; Check mwait-monitor support\r
- jnz HltLoopAmdSev\r
- mov ebx, edx ; Save C-State to ebx\r
-MwaitLoopAmdSev:\r
- cli\r
- mov eax, esp ; Set Monitor Address\r
- xor ecx, ecx ; ecx = 0\r
- xor edx, edx ; edx = 0\r
- monitor\r
- mov eax, ebx ; Mwait Cx, Target C-State per eax[7:4]\r
- shl eax, 4\r
- mwait\r
- jmp MwaitLoopAmdSev\r
-\r
-HltLoopAmdSev:\r
- pop edx ; PM16CodeSegment\r
- add esp, 4\r
- pop ebx ; WakeupBuffer\r
- add esp, 4\r
- pop eax ; SevEsAPJumpTable\r
- add esp, 4\r
- cmp eax, 0 ; Check for SEV-ES\r
- je DoHltAmdSev\r
-\r
- cli\r
- ;\r
- ; SEV-ES is enabled, use VMGEXIT (GHCB information already\r
- ; set by caller)\r
- ;\r
-BITS 64\r
- rep vmmcall\r
-BITS 32\r
-\r
- ;\r
- ; Back from VMGEXIT AP_HLT_LOOP\r
- ; Push the FLAGS/CS/IP values to use\r
- ;\r
- push word 0x0002 ; EFLAGS\r
- xor ecx, ecx\r
- mov cx, [eax + 2] ; CS\r
- push cx\r
- mov cx, [eax] ; IP\r
- push cx\r
- push word 0x0000 ; For alignment, will be discarded\r
-\r
- push edx\r
- push ebx\r
-\r
- mov edx, esi ; Restore RDX reset value\r
-\r
- retf\r
-\r
-DoHltAmdSev:\r
- cli\r
- hlt\r
- jmp DoHltAmdSev\r
-\r
-BITS 64\r
-AsmRelocateApLoopAmdSevEnd:\r
\r
;-------------------------------------------------------------------------------------\r
; AsmRelocateApLoop (MwaitSupport, ApTargetCState, TopOfApStack, CountTofinish, Cr3);\r