/** @file\r
MP initialize support functions for DXE phase.\r
\r
- Copyright (c) 2016 - 2020, Intel Corporation. All rights reserved.<BR>\r
+ Copyright (c) 2016 - 2022, Intel Corporation. All rights reserved.<BR>\r
SPDX-License-Identifier: BSD-2-Clause-Patent\r
\r
**/\r
IN OUT VOID *Buffer\r
)\r
{\r
- CPU_MP_DATA *CpuMpData;\r
- BOOLEAN MwaitSupport;\r
- ASM_RELOCATE_AP_LOOP AsmRelocateApLoopFunc;\r
- UINTN ProcessorNumber;\r
- UINTN StackStart;\r
+ CPU_MP_DATA *CpuMpData;\r
+ BOOLEAN MwaitSupport;\r
+ ASM_RELOCATE_AP_LOOP AsmRelocateApLoopFunc;\r
+ ASM_RELOCATE_AP_LOOP_AMD AsmRelocateApLoopFuncAmd;\r
+ UINTN ProcessorNumber;\r
+ UINTN StackStart;\r
\r
MpInitLibWhoAmI (&ProcessorNumber);\r
CpuMpData = GetCpuMpData ();\r
MwaitSupport = IsMwaitSupport ();\r
- if (CpuMpData->UseSevEsAPMethod) {\r
- StackStart = CpuMpData->SevEsAPResetStackStart;\r
+ if (StandardSignatureIsAuthenticAMD ()) {\r
+ StackStart = CpuMpData->UseSevEsAPMethod ? CpuMpData->SevEsAPResetStackStart : mReservedTopOfApStack;\r
+ AsmRelocateApLoopFuncAmd = (ASM_RELOCATE_AP_LOOP_AMD)(UINTN)mReservedApLoopFunc;\r
+ AsmRelocateApLoopFuncAmd (\r
+ MwaitSupport,\r
+ CpuMpData->ApTargetCState,\r
+ CpuMpData->PmCodeSegment,\r
+ StackStart - ProcessorNumber * AP_SAFE_STACK_SIZE,\r
+ (UINTN)&mNumberToFinish,\r
+ CpuMpData->Pm16CodeSegment,\r
+ CpuMpData->SevEsAPBuffer,\r
+ CpuMpData->WakeupBuffer\r
+ );\r
} else {\r
- StackStart = mReservedTopOfApStack;\r
+ StackStart = mReservedTopOfApStack;\r
+ AsmRelocateApLoopFunc = (ASM_RELOCATE_AP_LOOP)(UINTN)mReservedApLoopFunc;\r
+ AsmRelocateApLoopFunc (\r
+ MwaitSupport,\r
+ CpuMpData->ApTargetCState,\r
+ CpuMpData->PmCodeSegment,\r
+ StackStart - ProcessorNumber * AP_SAFE_STACK_SIZE,\r
+ (UINTN)&mNumberToFinish,\r
+ CpuMpData->Pm16CodeSegment,\r
+ CpuMpData->SevEsAPBuffer,\r
+ CpuMpData->WakeupBuffer\r
+ );\r
}\r
\r
- AsmRelocateApLoopFunc = (ASM_RELOCATE_AP_LOOP)(UINTN)mReservedApLoopFunc;\r
- AsmRelocateApLoopFunc (\r
- MwaitSupport,\r
- CpuMpData->ApTargetCState,\r
- CpuMpData->PmCodeSegment,\r
- StackStart - ProcessorNumber * AP_SAFE_STACK_SIZE,\r
- (UINTN)&mNumberToFinish,\r
- CpuMpData->Pm16CodeSegment,\r
- CpuMpData->SevEsAPBuffer,\r
- CpuMpData->WakeupBuffer\r
- );\r
//\r
// It should never reach here\r
//\r
.RendezvousFunnelSize CTYPE_UINTN 1\r
.RelocateApLoopFuncAddress CTYPE_UINTN 1\r
.RelocateApLoopFuncSize CTYPE_UINTN 1\r
+ .RelocateApLoopFuncAddressAmd CTYPE_UINTN 1\r
+ .RelocateApLoopFuncSizeAmd CTYPE_UINTN 1\r
.ModeTransitionOffset CTYPE_UINTN 1\r
.SwitchToRealNoNxOffset CTYPE_UINTN 1\r
.SwitchToRealPM16ModeOffset CTYPE_UINTN 1\r
UINTN RendezvousFunnelSize;\r
UINT8 *RelocateApLoopFuncAddress;\r
UINTN RelocateApLoopFuncSize;\r
+ UINT8 *RelocateApLoopFuncAddressAmd;\r
+ UINTN RelocateApLoopFuncSizeAmd;\r
UINTN ModeTransitionOffset;\r
UINTN SwitchToRealNoNxOffset;\r
UINTN SwitchToRealPM16ModeOffset;\r
\r
extern EFI_GUID mCpuInitMpLibHobGuid;\r
\r
+/**\r
+ Assembly code to place AP into safe loop mode for Amd.\r
+ Place AP into targeted C-State if MONITOR is supported, otherwise\r
+ place AP into hlt state.\r
+ Place AP in protected mode if the current is long mode. Due to AP maybe\r
+ wakeup by some hardware event. It could avoid accessing page table that\r
+ may not available during booting to OS.\r
+ @param[in] MwaitSupport TRUE indicates MONITOR is supported.\r
+ FALSE indicates MONITOR is not supported.\r
+ @param[in] ApTargetCState Target C-State value.\r
+ @param[in] PmCodeSegment Protected mode code segment value.\r
+**/\r
+typedef\r
+ VOID\r
+(EFIAPI *ASM_RELOCATE_AP_LOOP_AMD)(\r
+ IN BOOLEAN MwaitSupport,\r
+ IN UINTN ApTargetCState,\r
+ IN UINTN PmCodeSegment,\r
+ IN UINTN TopOfApStack,\r
+ IN UINTN NumberToFinish,\r
+ IN UINTN Pm16CodeSegment,\r
+ IN UINTN SevEsAPJumpTable,\r
+ IN UINTN WakeupBuffer\r
+ );\r
+\r
/**\r
Assembly code to place AP into safe loop mode.\r
\r
iret\r
\r
SwitchToRealProcEnd:\r
+;-------------------------------------------------------------------------------------\r
+; AsmRelocateApLoopAmd (MwaitSupport, ApTargetCState, PmCodeSegment, TopOfApStack, CountTofinish, Pm16CodeSegment, SevEsAPJumpTable, WakeupBuffer);\r
+;-------------------------------------------------------------------------------------\r
+\r
+AsmRelocateApLoopStartAmd:\r
+BITS 64\r
+ cmp qword [rsp + 56], 0 ; SevEsAPJumpTable\r
+ je NoSevEsAmd\r
+\r
+ ;\r
+ ; Perform some SEV-ES related setup before leaving 64-bit mode\r
+ ;\r
+ push rcx\r
+ push rdx\r
+\r
+ ;\r
+ ; Get the RDX reset value using CPUID\r
+ ;\r
+ mov rax, 1\r
+ cpuid\r
+ mov rsi, rax ; Save off the reset value for RDX\r
+\r
+ ;\r
+ ; Prepare the GHCB for the AP_HLT_LOOP VMGEXIT call\r
+ ; - Must be done while in 64-bit long mode so that writes to\r
+ ; the GHCB memory will be unencrypted.\r
+ ; - No NAE events can be generated once this is set otherwise\r
+ ; the AP_RESET_HOLD SW_EXITCODE will be overwritten.\r
+ ;\r
+ mov rcx, 0xc0010130\r
+ rdmsr ; Retrieve current GHCB address\r
+ shl rdx, 32\r
+ or rdx, rax\r
+\r
+ mov rdi, rdx\r
+ xor rax, rax\r
+ mov rcx, 0x800\r
+ shr rcx, 3\r
+ rep stosq ; Clear the GHCB\r
+\r
+ mov rax, 0x80000004 ; VMGEXIT AP_RESET_HOLD\r
+ mov [rdx + 0x390], rax\r
+ mov rax, 114 ; Set SwExitCode valid bit\r
+ bts [rdx + 0x3f0], rax\r
+ inc rax ; Set SwExitInfo1 valid bit\r
+ bts [rdx + 0x3f0], rax\r
+ inc rax ; Set SwExitInfo2 valid bit\r
+ bts [rdx + 0x3f0], rax\r
+\r
+ pop rdx\r
+ pop rcx\r
+\r
+NoSevEsAmd:\r
+ cli ; Disable interrupt before switching to 32-bit mode\r
+ mov rax, [rsp + 40] ; CountTofinish\r
+ lock dec dword [rax] ; (*CountTofinish)--\r
+\r
+ mov r10, [rsp + 48] ; Pm16CodeSegment\r
+ mov rax, [rsp + 56] ; SevEsAPJumpTable\r
+ mov rbx, [rsp + 64] ; WakeupBuffer\r
+ mov rsp, r9 ; TopOfApStack\r
+\r
+ push rax ; Save SevEsAPJumpTable\r
+ push rbx ; Save WakeupBuffer\r
+ push r10 ; Save Pm16CodeSegment\r
+ push rcx ; Save MwaitSupport\r
+ push rdx ; Save ApTargetCState\r
+\r
+ lea rax, [PmEntryAmd] ; rax <- The start address of transition code\r
+\r
+ push r8\r
+ push rax\r
+\r
+ ;\r
+ ; Clear R8 - R15, for reset, before going into 32-bit mode\r
+ ;\r
+ xor r8, r8\r
+ xor r9, r9\r
+ xor r10, r10\r
+ xor r11, r11\r
+ xor r12, r12\r
+ xor r13, r13\r
+ xor r14, r14\r
+ xor r15, r15\r
+\r
+ ;\r
+ ; Far return into 32-bit mode\r
+ ;\r
+o64 retf\r
+\r
+BITS 32\r
+PmEntryAmd:\r
+ mov eax, cr0\r
+ btr eax, 31 ; Clear CR0.PG\r
+ mov cr0, eax ; Disable paging and caches\r
+\r
+ mov ecx, 0xc0000080\r
+ rdmsr\r
+ and ah, ~ 1 ; Clear LME\r
+ wrmsr\r
+ mov eax, cr4\r
+ and al, ~ (1 << 5) ; Clear PAE\r
+ mov cr4, eax\r
+\r
+ pop edx\r
+ add esp, 4\r
+ pop ecx,\r
+ add esp, 4\r
+\r
+MwaitCheckAmd:\r
+ cmp cl, 1 ; Check mwait-monitor support\r
+ jnz HltLoopAmd\r
+ mov ebx, edx ; Save C-State to ebx\r
+MwaitLoopAmd:\r
+ cli\r
+ mov eax, esp ; Set Monitor Address\r
+ xor ecx, ecx ; ecx = 0\r
+ xor edx, edx ; edx = 0\r
+ monitor\r
+ mov eax, ebx ; Mwait Cx, Target C-State per eax[7:4]\r
+ shl eax, 4\r
+ mwait\r
+ jmp MwaitLoopAmd\r
+\r
+HltLoopAmd:\r
+ pop edx ; PM16CodeSegment\r
+ add esp, 4\r
+ pop ebx ; WakeupBuffer\r
+ add esp, 4\r
+ pop eax ; SevEsAPJumpTable\r
+ add esp, 4\r
+ cmp eax, 0 ; Check for SEV-ES\r
+ je DoHltAmd\r
+\r
+ cli\r
+ ;\r
+ ; SEV-ES is enabled, use VMGEXIT (GHCB information already\r
+ ; set by caller)\r
+ ;\r
+BITS 64\r
+ rep vmmcall\r
+BITS 32\r
+\r
+ ;\r
+ ; Back from VMGEXIT AP_HLT_LOOP\r
+ ; Push the FLAGS/CS/IP values to use\r
+ ;\r
+ push word 0x0002 ; EFLAGS\r
+ xor ecx, ecx\r
+ mov cx, [eax + 2] ; CS\r
+ push cx\r
+ mov cx, [eax] ; IP\r
+ push cx\r
+ push word 0x0000 ; For alignment, will be discarded\r
+\r
+ push edx\r
+ push ebx\r
+\r
+ mov edx, esi ; Restore RDX reset value\r
+\r
+ retf\r
+\r
+DoHltAmd:\r
+ cli\r
+ hlt\r
+ jmp DoHltAmd\r
+\r
+BITS 64\r
+AsmRelocateApLoopEndAmd:\r
mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RelocateApLoopFuncAddress], rax\r
mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RelocateApLoopFuncSize], AsmRelocateApLoopEnd - AsmRelocateApLoopStart\r
mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.ModeTransitionOffset], Flat32Start - RendezvousFunnelProcStart\r
+\r
+ lea rax, [AsmRelocateApLoopStartAmd]\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RelocateApLoopFuncAddressAmd], rax\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RelocateApLoopFuncSizeAmd], AsmRelocateApLoopEndAmd - AsmRelocateApLoopStartAmd\r
+\r
mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealNoNxOffset], SwitchToRealProcStart - Flat32Start\r
mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealPM16ModeOffset], PM16Mode - RendezvousFunnelProcStart\r
mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealPM16ModeSize], SwitchToRealProcEnd - PM16Mode\r