;------------------------------------------------------------------------------ ;\r
-; Copyright (c) 2015 - 2016, Intel Corporation. All rights reserved.<BR>\r
-; This program and the accompanying materials\r
-; are licensed and made available under the terms and conditions of the BSD License\r
-; which accompanies this distribution. The full text of the license may be found at\r
-; http://opensource.org/licenses/bsd-license.php.\r
-;\r
-; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
-; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+; Copyright (c) 2015 - 2022, Intel Corporation. All rights reserved.<BR>\r
+; SPDX-License-Identifier: BSD-2-Clause-Patent\r
;\r
; Module Name:\r
;\r
%include "MpEqu.inc"\r
extern ASM_PFX(InitializeFloatingPointUnits)\r
\r
+%macro OneTimeCall 1\r
+ jmp %1\r
+%1 %+ OneTimerCallReturn:\r
+%endmacro\r
+\r
+%macro OneTimeCallRet 1\r
+ jmp %1 %+ OneTimerCallReturn\r
+%endmacro\r
+\r
DEFAULT REL\r
\r
SECTION .text\r
;ALSO THIS PROCEDURE IS EXECUTED BY APs ONLY ON 16 BIT MODE. HENCE THIS PROC\r
;IS IN MACHINE CODE.\r
;-------------------------------------------------------------------------------------\r
-global ASM_PFX(RendezvousFunnelProc)\r
-ASM_PFX(RendezvousFunnelProc):\r
RendezvousFunnelProcStart:\r
; At this point CS = 0x(vv00) and ip= 0x0.\r
; Save BIST information to ebp firstly\r
mov fs, ax\r
mov gs, ax\r
\r
- mov si, BufferStartLocation\r
+ mov si, MP_CPU_EXCHANGE_INFO_FIELD (BufferStart)\r
mov ebx, [si]\r
\r
- mov di, ModeOffsetLocation\r
- mov eax, [di]\r
- mov di, CodeSegmentLocation\r
- mov edx, [di]\r
- mov di, ax\r
- sub di, 02h\r
- mov [di],dx ; Patch long mode CS\r
- sub di, 04h\r
- add eax, ebx\r
- mov [di],eax ; Patch address\r
-\r
- mov si, GdtrLocation\r
+ mov si, MP_CPU_EXCHANGE_INFO_FIELD (DataSegment)\r
+ mov edx, [si]\r
+\r
+ ;\r
+ ; Get start address of 32-bit code in low memory (<1MB)\r
+ ;\r
+ mov edi, MP_CPU_EXCHANGE_INFO_FIELD (ModeTransitionMemory)\r
+\r
+ mov si, MP_CPU_EXCHANGE_INFO_FIELD (GdtrProfile)\r
o32 lgdt [cs:si]\r
\r
- mov si, IdtrLocation\r
+ mov si, MP_CPU_EXCHANGE_INFO_FIELD (IdtrProfile)\r
o32 lidt [cs:si]\r
\r
- mov si, EnableExecuteDisableLocation\r
- cmp byte [si], 0\r
- jz SkipEnableExecuteDisableBit\r
+ ;\r
+ ; Switch to protected mode\r
+ ;\r
+ mov eax, cr0 ; Get control register 0\r
+ or eax, 000000003h ; Set PE bit (bit #0) & MP\r
+ mov cr0, eax\r
+\r
+ ; Switch to 32-bit code (>1MB)\r
+o32 jmp far [cs:di]\r
+\r
+;\r
+; Following code must be copied to memory with type of EfiBootServicesCode.\r
+; This is required if NX is enabled for EfiBootServicesCode of memory.\r
+;\r
+BITS 32\r
+Flat32Start: ; protected mode entry point\r
+ mov ds, dx\r
+ mov es, dx\r
+ mov fs, dx\r
+ mov gs, dx\r
+ mov ss, dx\r
\r
;\r
; Enable execute disable bit\r
;\r
+ mov esi, MP_CPU_EXCHANGE_INFO_FIELD (EnableExecuteDisable)\r
+ cmp byte [ebx + esi], 0\r
+ jz SkipEnableExecuteDisableBit\r
+\r
mov ecx, 0c0000080h ; EFER MSR number\r
rdmsr ; Read EFER\r
bts eax, 11 ; Enable Execute Disable Bit\r
wrmsr ; Write EFER\r
\r
SkipEnableExecuteDisableBit:\r
+ ;\r
+ ; Enable PAE\r
+ ;\r
+ mov eax, cr4\r
+ bts eax, 5\r
\r
- mov di, DataSegmentLocation\r
- mov edi, [di] ; Save long mode DS in edi\r
-\r
- mov si, Cr3Location ; Save CR3 in ecx\r
- mov ecx, [si]\r
+ mov esi, MP_CPU_EXCHANGE_INFO_FIELD (Enable5LevelPaging)\r
+ cmp byte [ebx + esi], 0\r
+ jz SkipEnable5LevelPaging\r
\r
- xor ax, ax\r
- mov ds, ax ; Clear data segment\r
+ ;\r
+ ; Enable 5 Level Paging\r
+ ;\r
+ bts eax, 12 ; Set LA57=1.\r
\r
- mov eax, cr0 ; Get control register 0\r
- or eax, 000000003h ; Set PE bit (bit #0) & MP\r
- mov cr0, eax\r
+SkipEnable5LevelPaging:\r
\r
- mov eax, cr4\r
- bts eax, 5\r
mov cr4, eax\r
\r
+ ;\r
+ ; Load page table\r
+ ;\r
+ mov esi, MP_CPU_EXCHANGE_INFO_FIELD (Cr3) ; Save CR3 in ecx\r
+ mov ecx, [ebx + esi]\r
mov cr3, ecx ; Load CR3\r
\r
+ ;\r
+ ; Enable long mode\r
+ ;\r
mov ecx, 0c0000080h ; EFER MSR number\r
rdmsr ; Read EFER\r
bts eax, 8 ; Set LME=1\r
wrmsr ; Write EFER\r
\r
+ ;\r
+ ; Enable paging\r
+ ;\r
mov eax, cr0 ; Read CR0\r
bts eax, 31 ; Set PG=1\r
mov cr0, eax ; Write CR0\r
\r
- jmp 0:strict dword 0 ; far jump to long mode\r
+ ;\r
+ ; Far jump to 64-bit code\r
+ ;\r
+ mov edi, MP_CPU_EXCHANGE_INFO_FIELD (ModeHighMemory)\r
+ add edi, ebx\r
+ jmp far [edi]\r
+\r
BITS 64\r
-LongModeStart:\r
- mov eax, edi\r
- mov ds, ax\r
- mov es, ax\r
- mov ss, ax\r
\r
+;\r
+; Required for the AMD SEV helper functions\r
+;\r
+%include "AmdSev.nasm"\r
+\r
+LongModeStart:\r
mov esi, ebx\r
- lea edi, [esi + InitFlagLocation]\r
+ lea edi, [esi + MP_CPU_EXCHANGE_INFO_FIELD (InitFlag)]\r
cmp qword [edi], 1 ; ApInitConfig\r
jnz GetApicId\r
\r
- ; AP init\r
- mov esi, ebx\r
+ ; Increment the number of APs executing here as early as possible\r
+ ; This is decremented in C code when AP is finished executing\r
mov edi, esi\r
- add edi, LockLocation\r
- mov rax, NotVacantFlag\r
-\r
-TestLock:\r
- xchg qword [edi], rax\r
- cmp rax, NotVacantFlag\r
- jz TestLock\r
+ add edi, MP_CPU_EXCHANGE_INFO_FIELD (NumApsExecuting)\r
+ lock inc dword [edi]\r
\r
- lea ecx, [esi + InitFlagLocation]\r
- inc dword [ecx]\r
- mov ebx, [ecx]\r
+ ; AP init\r
+ mov edi, esi\r
+ add edi, MP_CPU_EXCHANGE_INFO_FIELD (ApIndex)\r
+ mov ebx, 1\r
+ lock xadd dword [edi], ebx ; EBX = ApIndex++\r
+ inc ebx ; EBX is CpuNumber\r
\r
-Releaselock:\r
- mov rax, VacantFlag\r
- xchg qword [edi], rax\r
; program stack\r
mov edi, esi\r
- add edi, StackSizeLocation\r
+ add edi, MP_CPU_EXCHANGE_INFO_FIELD (StackSize)\r
mov eax, dword [edi]\r
mov ecx, ebx\r
inc ecx\r
mul ecx ; EAX = StackSize * (CpuNumber + 1)\r
mov edi, esi\r
- add edi, StackStartAddressLocation\r
+ add edi, MP_CPU_EXCHANGE_INFO_FIELD (StackStart)\r
add rax, qword [edi]\r
mov rsp, rax\r
+\r
+ ;\r
+ ; Setup the GHCB when AMD SEV-ES active.\r
+ ;\r
+ OneTimeCall SevEsSetupGhcb\r
jmp CProcedureInvoke\r
\r
GetApicId:\r
+ ;\r
+ ; Use the GHCB protocol to get the ApicId when SEV-ES is active.\r
+ ;\r
+ OneTimeCall SevEsGetApicId\r
+\r
+DoCpuid:\r
mov eax, 0\r
cpuid\r
cmp eax, 0bh\r
- jnb X2Apic\r
+ jb NoX2Apic ; CPUID level below CPUID_EXTENDED_TOPOLOGY\r
+\r
+ mov eax, 0bh\r
+ xor ecx, ecx\r
+ cpuid\r
+ test ebx, 0ffffh\r
+ jz NoX2Apic ; CPUID.0BH:EBX[15:0] is zero\r
+\r
+ ; Processor is x2APIC capable; 32-bit x2APIC ID is already in EDX\r
+ jmp GetProcessorNumber\r
+\r
+NoX2Apic:\r
; Processor is not x2APIC capable, so get 8-bit APIC ID\r
mov eax, 1\r
cpuid\r
shr ebx, 24\r
mov edx, ebx\r
- jmp GetProcessorNumber\r
\r
-X2Apic:\r
- ; Processor is x2APIC capable, so get 32-bit x2APIC ID\r
- mov eax, 0bh\r
- xor ecx, ecx\r
- cpuid \r
- ; edx save x2APIC ID\r
- \r
GetProcessorNumber:\r
;\r
; Get processor number for this AP\r
; Note that BSP may become an AP due to SwitchBsp()\r
;\r
xor ebx, ebx\r
- lea eax, [esi + CpuInfoLocation]\r
- mov edi, [eax]\r
+ lea eax, [esi + MP_CPU_EXCHANGE_INFO_FIELD (CpuInfo)]\r
+ mov rdi, [eax]\r
\r
GetNextProcNumber:\r
- cmp dword [edi], edx ; APIC ID match?\r
+ cmp dword [rdi + CPU_INFO_IN_HOB.InitialApicId], edx ; APIC ID match?\r
jz ProgramStack\r
- add edi, 16\r
+ add rdi, CPU_INFO_IN_HOB_size\r
inc ebx\r
- jmp GetNextProcNumber \r
+ jmp GetNextProcNumber\r
\r
ProgramStack:\r
- xor rsp, rsp\r
- mov esp, dword [edi + 12]\r
+ mov rsp, qword [rdi + CPU_INFO_IN_HOB.ApTopOfStack]\r
\r
CProcedureInvoke:\r
push rbp ; Push BIST data at top of AP stack\r
push rbp\r
mov rbp, rsp\r
\r
- mov rax, ASM_PFX(InitializeFloatingPointUnits)\r
+ mov rax, qword [esi + MP_CPU_EXCHANGE_INFO_FIELD (InitializeFloatingPointUnits)]\r
sub rsp, 20h\r
call rax ; Call assembly function to initialize FPU per UEFI spec\r
add rsp, 20h\r
\r
- mov edx, ebx ; edx is NumApsExecuting\r
+ mov edx, ebx ; edx is ApIndex\r
mov ecx, esi\r
- add ecx, LockLocation ; rcx is address of exchange info data buffer\r
+ add ecx, MP_CPU_EXCHANGE_INFO_OFFSET ; rcx is address of exchange info data buffer\r
\r
mov edi, esi\r
- add edi, ApProcedureLocation\r
+ add edi, MP_CPU_EXCHANGE_INFO_FIELD (CFunction)\r
mov rax, qword [edi]\r
\r
sub rsp, 20h\r
RendezvousFunnelProcEnd:\r
\r
;-------------------------------------------------------------------------------------\r
-; AsmRelocateApLoop (MwaitSupport, ApTargetCState, PmCodeSegment);\r
+;SwitchToRealProc procedure follows.\r
+;ALSO THIS PROCEDURE IS EXECUTED BY APs TRANSITIONING TO 16 BIT MODE. HENCE THIS PROC\r
+;IS IN MACHINE CODE.\r
+; SwitchToRealProc (UINTN BufferStart, UINT16 Code16, UINT16 Code32, UINTN StackStart)\r
+; rcx - Buffer Start\r
+; rdx - Code16 Selector Offset\r
+; r8 - Code32 Selector Offset\r
+; r9 - Stack Start\r
;-------------------------------------------------------------------------------------\r
-global ASM_PFX(AsmRelocateApLoop)\r
-ASM_PFX(AsmRelocateApLoop):\r
-AsmRelocateApLoopStart:\r
+SwitchToRealProcStart:\r
+BITS 64\r
+ cli\r
+\r
+ ;\r
+ ; Get RDX reset value before changing stacks since the\r
+ ; new stack won't be able to accomodate a #VC exception.\r
+ ;\r
+ push rax\r
+ push rbx\r
push rcx\r
push rdx\r
\r
- lea rsi, [PmEntry] ; rsi <- The start address of transition code\r
+ mov rax, 1\r
+ cpuid\r
+ mov rsi, rax ; Save off the reset value for RDX\r
+\r
+ pop rdx\r
+ pop rcx\r
+ pop rbx\r
+ pop rax\r
+\r
+ ;\r
+ ; Establish stack below 1MB\r
+ ;\r
+ mov rsp, r9\r
+\r
+ ;\r
+ ; Push ultimate Reset Vector onto the stack\r
+ ;\r
+ mov rax, rcx\r
+ shr rax, 4\r
+ push word 0x0002 ; RFLAGS\r
+ push ax ; CS\r
+ push word 0x0000 ; RIP\r
+ push word 0x0000 ; For alignment, will be discarded\r
+\r
+ ;\r
+ ; Get address of "16-bit operand size" label\r
+ ;\r
+ lea rbx, [PM16Mode]\r
\r
+ ;\r
+ ; Push addresses used to change to compatibility mode\r
+ ;\r
+ lea rax, [CompatMode]\r
push r8\r
- push rsi\r
- DB 0x48\r
+ push rax\r
+\r
+ ;\r
+ ; Clear R8 - R15, for reset, before going into 32-bit mode\r
+ ;\r
+ xor r8, r8\r
+ xor r9, r9\r
+ xor r10, r10\r
+ xor r11, r11\r
+ xor r12, r12\r
+ xor r13, r13\r
+ xor r14, r14\r
+ xor r15, r15\r
+\r
+ ;\r
+ ; Far return into 32-bit mode\r
+ ;\r
+ retfq\r
+\r
+BITS 32\r
+CompatMode:\r
+ ;\r
+ ; Set up stack to prepare for exiting protected mode\r
+ ;\r
+ push edx ; Code16 CS\r
+ push ebx ; PM16Mode label address\r
+\r
+ ;\r
+ ; Disable paging\r
+ ;\r
+ mov eax, cr0 ; Read CR0\r
+ btr eax, 31 ; Set PG=0\r
+ mov cr0, eax ; Write CR0\r
+\r
+ ;\r
+ ; Disable long mode\r
+ ;\r
+ mov ecx, 0c0000080h ; EFER MSR number\r
+ rdmsr ; Read EFER\r
+ btr eax, 8 ; Set LME=0\r
+ wrmsr ; Write EFER\r
+\r
+ ;\r
+ ; Disable PAE\r
+ ;\r
+ mov eax, cr4 ; Read CR4\r
+ btr eax, 5 ; Set PAE=0\r
+ mov cr4, eax ; Write CR4\r
+\r
+ mov edx, esi ; Restore RDX reset value\r
+\r
+ ;\r
+ ; Switch to 16-bit operand size\r
+ ;\r
retf\r
+\r
+BITS 16\r
+ ;\r
+ ; At entry to this label\r
+ ; - RDX will have its reset value\r
+ ; - On the top of the stack\r
+ ; - Alignment data (two bytes) to be discarded\r
+ ; - IP for Real Mode (two bytes)\r
+ ; - CS for Real Mode (two bytes)\r
+ ;\r
+ ; This label is also used with AsmRelocateApLoop. During MP finalization,\r
+ ; the code from PM16Mode to SwitchToRealProcEnd is copied to the start of\r
+ ; the WakeupBuffer, allowing a parked AP to be booted by an OS.\r
+ ;\r
+PM16Mode:\r
+ mov eax, cr0 ; Read CR0\r
+ btr eax, 0 ; Set PE=0\r
+ mov cr0, eax ; Write CR0\r
+\r
+ pop ax ; Discard alignment data\r
+\r
+ ;\r
+ ; Clear registers (except RDX and RSP) before going into 16-bit mode\r
+ ;\r
+ xor eax, eax\r
+ xor ebx, ebx\r
+ xor ecx, ecx\r
+ xor esi, esi\r
+ xor edi, edi\r
+ xor ebp, ebp\r
+\r
+ iret\r
+\r
+SwitchToRealProcEnd:\r
+\r
+;-------------------------------------------------------------------------------------\r
+; AsmRelocateApLoop (MwaitSupport, ApTargetCState, PmCodeSegment, TopOfApStack, CountTofinish, Pm16CodeSegment, SevEsAPJumpTable, WakeupBuffer);\r
+;-------------------------------------------------------------------------------------\r
+AsmRelocateApLoopStart:\r
+BITS 64\r
+ cmp qword [rsp + 56], 0 ; SevEsAPJumpTable\r
+ je NoSevEs\r
+\r
+ ;\r
+ ; Perform some SEV-ES related setup before leaving 64-bit mode\r
+ ;\r
+ push rcx\r
+ push rdx\r
+\r
+ ;\r
+ ; Get the RDX reset value using CPUID\r
+ ;\r
+ mov rax, 1\r
+ cpuid\r
+ mov rsi, rax ; Save off the reset value for RDX\r
+\r
+ ;\r
+ ; Prepare the GHCB for the AP_HLT_LOOP VMGEXIT call\r
+ ; - Must be done while in 64-bit long mode so that writes to\r
+ ; the GHCB memory will be unencrypted.\r
+ ; - No NAE events can be generated once this is set otherwise\r
+ ; the AP_RESET_HOLD SW_EXITCODE will be overwritten.\r
+ ;\r
+ mov rcx, 0xc0010130\r
+ rdmsr ; Retrieve current GHCB address\r
+ shl rdx, 32\r
+ or rdx, rax\r
+\r
+ mov rdi, rdx\r
+ xor rax, rax\r
+ mov rcx, 0x800\r
+ shr rcx, 3\r
+ rep stosq ; Clear the GHCB\r
+\r
+ mov rax, 0x80000004 ; VMGEXIT AP_RESET_HOLD\r
+ mov [rdx + 0x390], rax\r
+ mov rax, 114 ; Set SwExitCode valid bit\r
+ bts [rdx + 0x3f0], rax\r
+ inc rax ; Set SwExitInfo1 valid bit\r
+ bts [rdx + 0x3f0], rax\r
+ inc rax ; Set SwExitInfo2 valid bit\r
+ bts [rdx + 0x3f0], rax\r
+\r
+ pop rdx\r
+ pop rcx\r
+\r
+NoSevEs:\r
+ cli ; Disable interrupt before switching to 32-bit mode\r
+ mov rax, [rsp + 40] ; CountTofinish\r
+ lock dec dword [rax] ; (*CountTofinish)--\r
+\r
+ mov r10, [rsp + 48] ; Pm16CodeSegment\r
+ mov rax, [rsp + 56] ; SevEsAPJumpTable\r
+ mov rbx, [rsp + 64] ; WakeupBuffer\r
+ mov rsp, r9 ; TopOfApStack\r
+\r
+ push rax ; Save SevEsAPJumpTable\r
+ push rbx ; Save WakeupBuffer\r
+ push r10 ; Save Pm16CodeSegment\r
+ push rcx ; Save MwaitSupport\r
+ push rdx ; Save ApTargetCState\r
+\r
+ lea rax, [PmEntry] ; rax <- The start address of transition code\r
+\r
+ push r8\r
+ push rax\r
+\r
+ ;\r
+ ; Clear R8 - R15, for reset, before going into 32-bit mode\r
+ ;\r
+ xor r8, r8\r
+ xor r9, r9\r
+ xor r10, r10\r
+ xor r11, r11\r
+ xor r12, r12\r
+ xor r13, r13\r
+ xor r14, r14\r
+ xor r15, r15\r
+\r
+ ;\r
+ ; Far return into 32-bit mode\r
+ ;\r
+ retfq\r
+\r
BITS 32\r
PmEntry:\r
mov eax, cr0\r
btr eax, 31 ; Clear CR0.PG\r
mov cr0, eax ; Disable paging and caches\r
\r
- mov ebx, edx ; Save EntryPoint to rbx, for rdmsr will overwrite rdx\r
mov ecx, 0xc0000080\r
rdmsr\r
and ah, ~ 1 ; Clear LME\r
add esp, 4\r
pop ecx,\r
add esp, 4\r
+\r
+MwaitCheck:\r
cmp cl, 1 ; Check mwait-monitor support\r
jnz HltLoop\r
mov ebx, edx ; Save C-State to ebx\r
MwaitLoop:\r
+ cli\r
mov eax, esp ; Set Monitor Address\r
xor ecx, ecx ; ecx = 0\r
xor edx, edx ; edx = 0\r
monitor\r
- shl ebx, 4\r
mov eax, ebx ; Mwait Cx, Target C-State per eax[7:4]\r
+ shl eax, 4\r
mwait\r
jmp MwaitLoop\r
+\r
HltLoop:\r
+ pop edx ; PM16CodeSegment\r
+ add esp, 4\r
+ pop ebx ; WakeupBuffer\r
+ add esp, 4\r
+ pop eax ; SevEsAPJumpTable\r
+ add esp, 4\r
+ cmp eax, 0 ; Check for SEV-ES\r
+ je DoHlt\r
+\r
+ cli\r
+ ;\r
+ ; SEV-ES is enabled, use VMGEXIT (GHCB information already\r
+ ; set by caller)\r
+ ;\r
+BITS 64\r
+ rep vmmcall\r
+BITS 32\r
+\r
+ ;\r
+ ; Back from VMGEXIT AP_HLT_LOOP\r
+ ; Push the FLAGS/CS/IP values to use\r
+ ;\r
+ push word 0x0002 ; EFLAGS\r
+ xor ecx, ecx\r
+ mov cx, [eax + 2] ; CS\r
+ push cx\r
+ mov cx, [eax] ; IP\r
+ push cx\r
+ push word 0x0000 ; For alignment, will be discarded\r
+\r
+ push edx\r
+ push ebx\r
+\r
+ mov edx, esi ; Restore RDX reset value\r
+\r
+ retf\r
+\r
+DoHlt:\r
cli\r
hlt\r
- jmp HltLoop\r
- ret\r
+ jmp DoHlt\r
+\r
BITS 64\r
AsmRelocateApLoopEnd:\r
\r
;-------------------------------------------------------------------------------------\r
global ASM_PFX(AsmGetAddressMap)\r
ASM_PFX(AsmGetAddressMap):\r
- mov rax, ASM_PFX(RendezvousFunnelProc)\r
- mov qword [rcx], rax\r
- mov qword [rcx + 8h], LongModeStart - RendezvousFunnelProcStart\r
- mov qword [rcx + 10h], RendezvousFunnelProcEnd - RendezvousFunnelProcStart\r
- mov rax, ASM_PFX(AsmRelocateApLoop)\r
- mov qword [rcx + 18h], rax\r
- mov qword [rcx + 20h], AsmRelocateApLoopEnd - AsmRelocateApLoopStart\r
+ lea rax, [RendezvousFunnelProcStart]\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RendezvousFunnelAddress], rax\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.ModeEntryOffset], LongModeStart - RendezvousFunnelProcStart\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RendezvousFunnelSize], RendezvousFunnelProcEnd - RendezvousFunnelProcStart\r
+ lea rax, [AsmRelocateApLoopStart]\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RelocateApLoopFuncAddress], rax\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.RelocateApLoopFuncSize], AsmRelocateApLoopEnd - AsmRelocateApLoopStart\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.ModeTransitionOffset], Flat32Start - RendezvousFunnelProcStart\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealSize], SwitchToRealProcEnd - SwitchToRealProcStart\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealOffset], SwitchToRealProcStart - RendezvousFunnelProcStart\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealNoNxOffset], SwitchToRealProcStart - Flat32Start\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealPM16ModeOffset], PM16Mode - RendezvousFunnelProcStart\r
+ mov qword [rcx + MP_ASSEMBLY_ADDRESS_MAP.SwitchToRealPM16ModeSize], SwitchToRealProcEnd - PM16Mode\r
ret\r
\r
;-------------------------------------------------------------------------------------\r
\r
;Store EFLAGS, GDTR and IDTR regiter to stack\r
pushfq\r
- sgdt [rsi + 16]\r
- sidt [rsi + 26]\r
+ sgdt [rsi + CPU_EXCHANGE_ROLE_INFO.Gdtr]\r
+ sidt [rsi + CPU_EXCHANGE_ROLE_INFO.Idtr]\r
\r
; Store the its StackPointer\r
- mov [rsi + 8], rsp\r
+ mov [rsi + CPU_EXCHANGE_ROLE_INFO.StackPointer], rsp\r
\r
; update its switch state to STORED\r
- mov byte [rsi], CPU_SWITCH_STATE_STORED\r
+ mov byte [rsi + CPU_EXCHANGE_ROLE_INFO.State], CPU_SWITCH_STATE_STORED\r
\r
WaitForOtherStored:\r
; wait until the other CPU finish storing its state\r
- cmp byte [rdi], CPU_SWITCH_STATE_STORED\r
+ cmp byte [rdi + CPU_EXCHANGE_ROLE_INFO.State], CPU_SWITCH_STATE_STORED\r
jz OtherStored\r
pause\r
jmp WaitForOtherStored\r
OtherStored:\r
; Since another CPU already stored its state, load them\r
; load GDTR value\r
- lgdt [rdi + 16]\r
+ lgdt [rdi + CPU_EXCHANGE_ROLE_INFO.Gdtr]\r
\r
; load IDTR value\r
- lidt [rdi + 26]\r
+ lidt [rdi + CPU_EXCHANGE_ROLE_INFO.Idtr]\r
\r
; load its future StackPointer\r
- mov rsp, [rdi + 8]\r
+ mov rsp, [rdi + CPU_EXCHANGE_ROLE_INFO.StackPointer]\r
\r
; update the other CPU's switch state to LOADED\r
- mov byte [rdi], CPU_SWITCH_STATE_LOADED\r
+ mov byte [rdi + CPU_EXCHANGE_ROLE_INFO.State], CPU_SWITCH_STATE_LOADED\r
\r
WaitForOtherLoaded:\r
; wait until the other CPU finish loading new state,\r
; otherwise the data in stack may corrupt\r
- cmp byte [rsi], CPU_SWITCH_STATE_LOADED\r
+ cmp byte [rsi + CPU_EXCHANGE_ROLE_INFO.State], CPU_SWITCH_STATE_LOADED\r
jz OtherLoaded\r
pause\r
jmp WaitForOtherLoaded\r