2 Ia32-specific functionality for DxeLoad.
4 Copyright (c) 2006 - 2018, Intel Corporation. All rights reserved.<BR>
5 Copyright (c) 2017, AMD Incorporated. All rights reserved.<BR>
7 SPDX-License-Identifier: BSD-2-Clause-Patent
12 #include "VirtualMemory.h"
14 #define IDT_ENTRY_COUNT 32
16 typedef struct _X64_IDT_TABLE
{
18 // Reserved 4 bytes preceding PeiService and IdtTable,
19 // since IDT base address should be 8-byte alignment.
22 CONST EFI_PEI_SERVICES
**PeiService
;
23 X64_IDT_GATE_DESCRIPTOR IdtTable
[IDT_ENTRY_COUNT
];
27 // Global Descriptor Table (GDT)
29 GLOBAL_REMOVE_IF_UNREFERENCED IA32_GDT gGdtEntries
[] = {
30 /* selector { Global Segment Descriptor } */
31 /* 0x00 */ {{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}}, //null descriptor
32 /* 0x08 */ {{0xffff, 0, 0, 0x2, 1, 0, 1, 0xf, 0, 0, 1, 1, 0}}, //linear data segment descriptor
33 /* 0x10 */ {{0xffff, 0, 0, 0xf, 1, 0, 1, 0xf, 0, 0, 1, 1, 0}}, //linear code segment descriptor
34 /* 0x18 */ {{0xffff, 0, 0, 0x3, 1, 0, 1, 0xf, 0, 0, 1, 1, 0}}, //system data segment descriptor
35 /* 0x20 */ {{0xffff, 0, 0, 0xa, 1, 0, 1, 0xf, 0, 0, 1, 1, 0}}, //system code segment descriptor
36 /* 0x28 */ {{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}}, //spare segment descriptor
37 /* 0x30 */ {{0xffff, 0, 0, 0x2, 1, 0, 1, 0xf, 0, 0, 1, 1, 0}}, //system data segment descriptor
38 /* 0x38 */ {{0xffff, 0, 0, 0xa, 1, 0, 1, 0xf, 0, 1, 0, 1, 0}}, //system code segment descriptor
39 /* 0x40 */ {{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}}, //spare segment descriptor
45 GLOBAL_REMOVE_IF_UNREFERENCED CONST IA32_DESCRIPTOR gGdt
= {
46 sizeof (gGdtEntries
) - 1,
50 GLOBAL_REMOVE_IF_UNREFERENCED IA32_DESCRIPTOR gLidtDescriptor
= {
51 sizeof (X64_IDT_GATE_DESCRIPTOR
) * IDT_ENTRY_COUNT
- 1,
56 Allocates and fills in the Page Directory and Page Table Entries to
57 establish a 4G page table.
59 @param[in] StackBase Stack base address.
60 @param[in] StackSize Stack size.
62 @return The address of page table.
66 Create4GPageTablesIa32Pae (
67 IN EFI_PHYSICAL_ADDRESS StackBase
,
71 UINT8 PhysicalAddressBits
;
72 EFI_PHYSICAL_ADDRESS PhysicalAddress
;
73 UINTN IndexOfPdpEntries
;
74 UINTN IndexOfPageDirectoryEntries
;
75 UINT32 NumberOfPdpEntriesNeeded
;
76 PAGE_MAP_AND_DIRECTORY_POINTER
*PageMap
;
77 PAGE_MAP_AND_DIRECTORY_POINTER
*PageDirectoryPointerEntry
;
78 PAGE_TABLE_ENTRY
*PageDirectoryEntry
;
81 UINT64 AddressEncMask
;
84 // Make sure AddressEncMask is contained to smallest supported address field
86 AddressEncMask
= PcdGet64 (PcdPteMemoryEncryptionAddressOrMask
) & PAGING_1G_ADDRESS_MASK_64
;
88 PhysicalAddressBits
= 32;
91 // Calculate the table entries needed.
93 NumberOfPdpEntriesNeeded
= (UINT32
) LShiftU64 (1, (PhysicalAddressBits
- 30));
95 TotalPagesNum
= NumberOfPdpEntriesNeeded
+ 1;
96 PageAddress
= (UINTN
) AllocatePageTableMemory (TotalPagesNum
);
97 ASSERT (PageAddress
!= 0);
99 PageMap
= (VOID
*) PageAddress
;
100 PageAddress
+= SIZE_4KB
;
102 PageDirectoryPointerEntry
= PageMap
;
105 for (IndexOfPdpEntries
= 0; IndexOfPdpEntries
< NumberOfPdpEntriesNeeded
; IndexOfPdpEntries
++, PageDirectoryPointerEntry
++) {
107 // Each Directory Pointer entries points to a page of Page Directory entires.
108 // So allocate space for them and fill them in in the IndexOfPageDirectoryEntries loop.
110 PageDirectoryEntry
= (VOID
*) PageAddress
;
111 PageAddress
+= SIZE_4KB
;
114 // Fill in a Page Directory Pointer Entries
116 PageDirectoryPointerEntry
->Uint64
= (UINT64
) (UINTN
) PageDirectoryEntry
| AddressEncMask
;
117 PageDirectoryPointerEntry
->Bits
.Present
= 1;
119 for (IndexOfPageDirectoryEntries
= 0; IndexOfPageDirectoryEntries
< 512; IndexOfPageDirectoryEntries
++, PageDirectoryEntry
++, PhysicalAddress
+= SIZE_2MB
) {
120 if ((IsNullDetectionEnabled () && PhysicalAddress
== 0)
121 || ((PhysicalAddress
< StackBase
+ StackSize
)
122 && ((PhysicalAddress
+ SIZE_2MB
) > StackBase
))) {
124 // Need to split this 2M page that covers stack range.
126 Split2MPageTo4K (PhysicalAddress
, (UINT64
*) PageDirectoryEntry
, StackBase
, StackSize
);
129 // Fill in the Page Directory entries
131 PageDirectoryEntry
->Uint64
= (UINT64
) PhysicalAddress
| AddressEncMask
;
132 PageDirectoryEntry
->Bits
.ReadWrite
= 1;
133 PageDirectoryEntry
->Bits
.Present
= 1;
134 PageDirectoryEntry
->Bits
.MustBe1
= 1;
139 for (; IndexOfPdpEntries
< 512; IndexOfPdpEntries
++, PageDirectoryPointerEntry
++) {
141 PageDirectoryPointerEntry
,
142 sizeof (PAGE_MAP_AND_DIRECTORY_POINTER
)
147 // Protect the page table by marking the memory used for page table to be
150 EnablePageTableProtection ((UINTN
)PageMap
, FALSE
);
152 return (UINTN
) PageMap
;
156 The function will check if IA32 PAE is supported.
158 @retval TRUE IA32 PAE is supported.
159 @retval FALSE IA32 PAE is not supported.
169 BOOLEAN Ia32PaeSupport
;
171 Ia32PaeSupport
= FALSE
;
172 AsmCpuid (0x0, &RegEax
, NULL
, NULL
, NULL
);
174 AsmCpuid (0x1, NULL
, NULL
, NULL
, &RegEdx
);
175 if ((RegEdx
& BIT6
) != 0) {
176 Ia32PaeSupport
= TRUE
;
180 return Ia32PaeSupport
;
184 The function will check if page table should be setup or not.
186 @retval TRUE Page table should be created.
187 @retval FALSE Page table should not be created.
195 if (!IsIa32PaeSupport ()) {
199 if (IsNullDetectionEnabled ()) {
203 if (PcdGet8 (PcdHeapGuardPropertyMask
) != 0) {
207 if (PcdGetBool (PcdCpuStackGuard
)) {
211 if (IsEnableNonExecNeeded ()) {
219 Transfers control to DxeCore.
221 This function performs a CPU architecture specific operations to execute
222 the entry point of DxeCore with the parameters of HobList.
223 It also installs EFI_END_OF_PEI_PPI to signal the end of PEI phase.
225 @param DxeCoreEntryPoint The entry point of DxeCore.
226 @param HobList The start of HobList passed to DxeCore.
231 IN EFI_PHYSICAL_ADDRESS DxeCoreEntryPoint
,
232 IN EFI_PEI_HOB_POINTERS HobList
236 EFI_PHYSICAL_ADDRESS BaseOfStack
;
237 EFI_PHYSICAL_ADDRESS TopOfStack
;
239 X64_IDT_GATE_DESCRIPTOR
*IdtTable
;
240 UINTN SizeOfTemplate
;
242 EFI_PHYSICAL_ADDRESS VectorAddress
;
244 X64_IDT_TABLE
*IdtTableForX64
;
245 EFI_VECTOR_HANDOFF_INFO
*VectorInfo
;
246 EFI_PEI_VECTOR_HANDOFF_INFO_PPI
*VectorHandoffInfoPpi
;
247 BOOLEAN BuildPageTablesIa32Pae
;
249 if (IsNullDetectionEnabled ()) {
250 ClearFirst4KPage (HobList
.Raw
);
253 Status
= PeiServicesAllocatePages (EfiBootServicesData
, EFI_SIZE_TO_PAGES (STACK_SIZE
), &BaseOfStack
);
254 ASSERT_EFI_ERROR (Status
);
256 if (FeaturePcdGet(PcdDxeIplSwitchToLongMode
)) {
258 // Compute the top of the stack we were allocated, which is used to load X64 dxe core.
259 // Pre-allocate a 32 bytes which confroms to x64 calling convention.
261 // The first four parameters to a function are passed in rcx, rdx, r8 and r9.
262 // Any further parameters are pushed on the stack. Furthermore, space (4 * 8bytes) for the
263 // register parameters is reserved on the stack, in case the called function
264 // wants to spill them; this is important if the function is variadic.
266 TopOfStack
= BaseOfStack
+ EFI_SIZE_TO_PAGES (STACK_SIZE
) * EFI_PAGE_SIZE
- 32;
269 // x64 Calling Conventions requires that the stack must be aligned to 16 bytes
271 TopOfStack
= (EFI_PHYSICAL_ADDRESS
) (UINTN
) ALIGN_POINTER (TopOfStack
, 16);
274 // Load the GDT of Go64. Since the GDT of 32-bit Tiano locates in the BS_DATA
275 // memory, it may be corrupted when copying FV to high-end memory
277 AsmWriteGdtr (&gGdt
);
279 // Create page table and save PageMapLevel4 to CR3
281 PageTables
= CreateIdentityMappingPageTables (BaseOfStack
, STACK_SIZE
);
284 // End of PEI phase signal
286 PERF_EVENT_SIGNAL_BEGIN (gEndOfPeiSignalPpi
.Guid
);
287 Status
= PeiServicesInstallPpi (&gEndOfPeiSignalPpi
);
288 PERF_EVENT_SIGNAL_END (gEndOfPeiSignalPpi
.Guid
);
289 ASSERT_EFI_ERROR (Status
);
292 // Paging might be already enabled. To avoid conflict configuration,
293 // disable paging first anyway.
295 AsmWriteCr0 (AsmReadCr0 () & (~BIT31
));
296 AsmWriteCr3 (PageTables
);
299 // Update the contents of BSP stack HOB to reflect the real stack info passed to DxeCore.
301 UpdateStackHob (BaseOfStack
, STACK_SIZE
);
303 SizeOfTemplate
= AsmGetVectorTemplatInfo (&TemplateBase
);
305 Status
= PeiServicesAllocatePages (
307 EFI_SIZE_TO_PAGES(sizeof (X64_IDT_TABLE
) + SizeOfTemplate
* IDT_ENTRY_COUNT
),
310 ASSERT_EFI_ERROR (Status
);
313 // Store EFI_PEI_SERVICES** in the 4 bytes immediately preceding IDT to avoid that
314 // it may not be gotten correctly after IDT register is re-written.
316 IdtTableForX64
= (X64_IDT_TABLE
*) (UINTN
) VectorAddress
;
317 IdtTableForX64
->PeiService
= GetPeiServicesTablePointer ();
319 VectorAddress
= (EFI_PHYSICAL_ADDRESS
) (UINTN
) (IdtTableForX64
+ 1);
320 IdtTable
= IdtTableForX64
->IdtTable
;
321 for (Index
= 0; Index
< IDT_ENTRY_COUNT
; Index
++) {
322 IdtTable
[Index
].Ia32IdtEntry
.Bits
.GateType
= 0x8e;
323 IdtTable
[Index
].Ia32IdtEntry
.Bits
.Reserved_0
= 0;
324 IdtTable
[Index
].Ia32IdtEntry
.Bits
.Selector
= SYS_CODE64_SEL
;
326 IdtTable
[Index
].Ia32IdtEntry
.Bits
.OffsetLow
= (UINT16
) VectorAddress
;
327 IdtTable
[Index
].Ia32IdtEntry
.Bits
.OffsetHigh
= (UINT16
) (RShiftU64 (VectorAddress
, 16));
328 IdtTable
[Index
].Offset32To63
= (UINT32
) (RShiftU64 (VectorAddress
, 32));
329 IdtTable
[Index
].Reserved
= 0;
331 CopyMem ((VOID
*) (UINTN
) VectorAddress
, TemplateBase
, SizeOfTemplate
);
332 AsmVectorFixup ((VOID
*) (UINTN
) VectorAddress
, (UINT8
) Index
);
334 VectorAddress
+= SizeOfTemplate
;
337 gLidtDescriptor
.Base
= (UINTN
) IdtTable
;
340 // Disable interrupt of Debug timer, since new IDT table cannot handle it.
342 SaveAndSetDebugTimerInterrupt (FALSE
);
344 AsmWriteIdtr (&gLidtDescriptor
);
348 "%a() Stack Base: 0x%lx, Stack Size: 0x%x\n",
355 // Go to Long Mode and transfer control to DxeCore.
356 // Interrupts will not get turned on until the CPU AP is loaded.
357 // Call x64 drivers passing in single argument, a pointer to the HOBs.
362 (EFI_PHYSICAL_ADDRESS
)(UINTN
)(HobList
.Raw
),
368 // Get Vector Hand-off Info PPI and build Guided HOB
370 Status
= PeiServicesLocatePpi (
371 &gEfiVectorHandoffInfoPpiGuid
,
374 (VOID
**)&VectorHandoffInfoPpi
376 if (Status
== EFI_SUCCESS
) {
377 DEBUG ((EFI_D_INFO
, "Vector Hand-off Info PPI is gotten, GUIDed HOB is created!\n"));
378 VectorInfo
= VectorHandoffInfoPpi
->Info
;
380 while (VectorInfo
->Attribute
!= EFI_VECTOR_HANDOFF_LAST_ENTRY
) {
385 &gEfiVectorHandoffInfoPpiGuid
,
386 VectorHandoffInfoPpi
->Info
,
387 sizeof (EFI_VECTOR_HANDOFF_INFO
) * Index
392 // Compute the top of the stack we were allocated. Pre-allocate a UINTN
395 TopOfStack
= BaseOfStack
+ EFI_SIZE_TO_PAGES (STACK_SIZE
) * EFI_PAGE_SIZE
- CPU_STACK_ALIGNMENT
;
396 TopOfStack
= (EFI_PHYSICAL_ADDRESS
) (UINTN
) ALIGN_POINTER (TopOfStack
, CPU_STACK_ALIGNMENT
);
399 BuildPageTablesIa32Pae
= ToBuildPageTable ();
400 if (BuildPageTablesIa32Pae
) {
401 PageTables
= Create4GPageTablesIa32Pae (BaseOfStack
, STACK_SIZE
);
402 if (IsEnableNonExecNeeded ()) {
403 EnableExecuteDisableBit();
408 // End of PEI phase signal
410 PERF_EVENT_SIGNAL_BEGIN (gEndOfPeiSignalPpi
.Guid
);
411 Status
= PeiServicesInstallPpi (&gEndOfPeiSignalPpi
);
412 PERF_EVENT_SIGNAL_END (gEndOfPeiSignalPpi
.Guid
);
413 ASSERT_EFI_ERROR (Status
);
415 if (BuildPageTablesIa32Pae
) {
417 // Paging might be already enabled. To avoid conflict configuration,
418 // disable paging first anyway.
420 AsmWriteCr0 (AsmReadCr0 () & (~BIT31
));
421 AsmWriteCr3 (PageTables
);
423 // Set Physical Address Extension (bit 5 of CR4).
425 AsmWriteCr4 (AsmReadCr4 () | BIT5
);
429 // Update the contents of BSP stack HOB to reflect the real stack info passed to DxeCore.
431 UpdateStackHob (BaseOfStack
, STACK_SIZE
);
435 "%a() Stack Base: 0x%lx, Stack Size: 0x%x\n",
442 // Transfer the control to the entry point of DxeCore.
444 if (BuildPageTablesIa32Pae
) {
446 (SWITCH_STACK_ENTRY_POINT
)(UINTN
)DxeCoreEntryPoint
,
449 (VOID
*) (UINTN
) TopOfStack
453 (SWITCH_STACK_ENTRY_POINT
)(UINTN
)DxeCoreEntryPoint
,
456 (VOID
*) (UINTN
) TopOfStack