]> git.proxmox.com Git - mirror_edk2.git/blame_incremental - MdeModulePkg/Core/DxeIplPeim/Ia32/DxeLoadFunc.c
MdeModulePkg: Removing ipf which is no longer supported from edk2.
[mirror_edk2.git] / MdeModulePkg / Core / DxeIplPeim / Ia32 / DxeLoadFunc.c
... / ...
CommitLineData
1/** @file\r
2 Ia32-specific functionality for DxeLoad.\r
3\r
4Copyright (c) 2006 - 2018, Intel Corporation. All rights reserved.<BR>\r
5Copyright (c) 2017, AMD Incorporated. All rights reserved.<BR>\r
6\r
7This program and the accompanying materials\r
8are licensed and made available under the terms and conditions of the BSD License\r
9which accompanies this distribution. The full text of the license may be found at\r
10http://opensource.org/licenses/bsd-license.php\r
11\r
12THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
13WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
14\r
15**/\r
16\r
17#include "DxeIpl.h"\r
18#include "VirtualMemory.h"\r
19\r
20#define IDT_ENTRY_COUNT 32\r
21\r
22typedef struct _X64_IDT_TABLE {\r
23 //\r
24 // Reserved 4 bytes preceding PeiService and IdtTable,\r
25 // since IDT base address should be 8-byte alignment.\r
26 //\r
27 UINT32 Reserved;\r
28 CONST EFI_PEI_SERVICES **PeiService;\r
29 X64_IDT_GATE_DESCRIPTOR IdtTable[IDT_ENTRY_COUNT];\r
30} X64_IDT_TABLE;\r
31\r
32//\r
33// Global Descriptor Table (GDT)\r
34//\r
35GLOBAL_REMOVE_IF_UNREFERENCED IA32_GDT gGdtEntries[] = {\r
36/* selector { Global Segment Descriptor } */\r
37/* 0x00 */ {{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}}, //null descriptor\r
38/* 0x08 */ {{0xffff, 0, 0, 0x2, 1, 0, 1, 0xf, 0, 0, 1, 1, 0}}, //linear data segment descriptor\r
39/* 0x10 */ {{0xffff, 0, 0, 0xf, 1, 0, 1, 0xf, 0, 0, 1, 1, 0}}, //linear code segment descriptor\r
40/* 0x18 */ {{0xffff, 0, 0, 0x3, 1, 0, 1, 0xf, 0, 0, 1, 1, 0}}, //system data segment descriptor\r
41/* 0x20 */ {{0xffff, 0, 0, 0xa, 1, 0, 1, 0xf, 0, 0, 1, 1, 0}}, //system code segment descriptor\r
42/* 0x28 */ {{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}}, //spare segment descriptor\r
43/* 0x30 */ {{0xffff, 0, 0, 0x2, 1, 0, 1, 0xf, 0, 0, 1, 1, 0}}, //system data segment descriptor\r
44/* 0x38 */ {{0xffff, 0, 0, 0xa, 1, 0, 1, 0xf, 0, 1, 0, 1, 0}}, //system code segment descriptor\r
45/* 0x40 */ {{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}}, //spare segment descriptor\r
46};\r
47\r
48//\r
49// IA32 Gdt register\r
50//\r
51GLOBAL_REMOVE_IF_UNREFERENCED CONST IA32_DESCRIPTOR gGdt = {\r
52 sizeof (gGdtEntries) - 1,\r
53 (UINTN) gGdtEntries\r
54 };\r
55\r
56GLOBAL_REMOVE_IF_UNREFERENCED IA32_DESCRIPTOR gLidtDescriptor = {\r
57 sizeof (X64_IDT_GATE_DESCRIPTOR) * IDT_ENTRY_COUNT - 1,\r
58 0\r
59};\r
60\r
61/**\r
62 Allocates and fills in the Page Directory and Page Table Entries to\r
63 establish a 4G page table.\r
64\r
65 @param[in] StackBase Stack base address.\r
66 @param[in] StackSize Stack size.\r
67\r
68 @return The address of page table.\r
69\r
70**/\r
71UINTN\r
72Create4GPageTablesIa32Pae (\r
73 IN EFI_PHYSICAL_ADDRESS StackBase,\r
74 IN UINTN StackSize\r
75 )\r
76{\r
77 UINT8 PhysicalAddressBits;\r
78 EFI_PHYSICAL_ADDRESS PhysicalAddress;\r
79 UINTN IndexOfPdpEntries;\r
80 UINTN IndexOfPageDirectoryEntries;\r
81 UINT32 NumberOfPdpEntriesNeeded;\r
82 PAGE_MAP_AND_DIRECTORY_POINTER *PageMap;\r
83 PAGE_MAP_AND_DIRECTORY_POINTER *PageDirectoryPointerEntry;\r
84 PAGE_TABLE_ENTRY *PageDirectoryEntry;\r
85 UINTN TotalPagesNum;\r
86 UINTN PageAddress;\r
87 UINT64 AddressEncMask;\r
88\r
89 //\r
90 // Make sure AddressEncMask is contained to smallest supported address field\r
91 //\r
92 AddressEncMask = PcdGet64 (PcdPteMemoryEncryptionAddressOrMask) & PAGING_1G_ADDRESS_MASK_64;\r
93\r
94 PhysicalAddressBits = 32;\r
95\r
96 //\r
97 // Calculate the table entries needed.\r
98 //\r
99 NumberOfPdpEntriesNeeded = (UINT32) LShiftU64 (1, (PhysicalAddressBits - 30));\r
100\r
101 TotalPagesNum = NumberOfPdpEntriesNeeded + 1;\r
102 PageAddress = (UINTN) AllocatePageTableMemory (TotalPagesNum);\r
103 ASSERT (PageAddress != 0);\r
104\r
105 PageMap = (VOID *) PageAddress;\r
106 PageAddress += SIZE_4KB;\r
107\r
108 PageDirectoryPointerEntry = PageMap;\r
109 PhysicalAddress = 0;\r
110\r
111 for (IndexOfPdpEntries = 0; IndexOfPdpEntries < NumberOfPdpEntriesNeeded; IndexOfPdpEntries++, PageDirectoryPointerEntry++) {\r
112 //\r
113 // Each Directory Pointer entries points to a page of Page Directory entires.\r
114 // So allocate space for them and fill them in in the IndexOfPageDirectoryEntries loop.\r
115 //\r
116 PageDirectoryEntry = (VOID *) PageAddress;\r
117 PageAddress += SIZE_4KB;\r
118\r
119 //\r
120 // Fill in a Page Directory Pointer Entries\r
121 //\r
122 PageDirectoryPointerEntry->Uint64 = (UINT64) (UINTN) PageDirectoryEntry | AddressEncMask;\r
123 PageDirectoryPointerEntry->Bits.Present = 1;\r
124\r
125 for (IndexOfPageDirectoryEntries = 0; IndexOfPageDirectoryEntries < 512; IndexOfPageDirectoryEntries++, PageDirectoryEntry++, PhysicalAddress += SIZE_2MB) {\r
126 if ((IsNullDetectionEnabled () && PhysicalAddress == 0)\r
127 || ((PhysicalAddress < StackBase + StackSize)\r
128 && ((PhysicalAddress + SIZE_2MB) > StackBase))) {\r
129 //\r
130 // Need to split this 2M page that covers stack range.\r
131 //\r
132 Split2MPageTo4K (PhysicalAddress, (UINT64 *) PageDirectoryEntry, StackBase, StackSize);\r
133 } else {\r
134 //\r
135 // Fill in the Page Directory entries\r
136 //\r
137 PageDirectoryEntry->Uint64 = (UINT64) PhysicalAddress | AddressEncMask;\r
138 PageDirectoryEntry->Bits.ReadWrite = 1;\r
139 PageDirectoryEntry->Bits.Present = 1;\r
140 PageDirectoryEntry->Bits.MustBe1 = 1;\r
141 }\r
142 }\r
143 }\r
144\r
145 for (; IndexOfPdpEntries < 512; IndexOfPdpEntries++, PageDirectoryPointerEntry++) {\r
146 ZeroMem (\r
147 PageDirectoryPointerEntry,\r
148 sizeof (PAGE_MAP_AND_DIRECTORY_POINTER)\r
149 );\r
150 }\r
151\r
152 //\r
153 // Protect the page table by marking the memory used for page table to be\r
154 // read-only.\r
155 //\r
156 EnablePageTableProtection ((UINTN)PageMap, FALSE);\r
157\r
158 return (UINTN) PageMap;\r
159}\r
160\r
161/**\r
162 The function will check if IA32 PAE is supported.\r
163\r
164 @retval TRUE IA32 PAE is supported.\r
165 @retval FALSE IA32 PAE is not supported.\r
166\r
167**/\r
168BOOLEAN\r
169IsIa32PaeSupport (\r
170 VOID\r
171 )\r
172{\r
173 UINT32 RegEax;\r
174 UINT32 RegEdx;\r
175 BOOLEAN Ia32PaeSupport;\r
176\r
177 Ia32PaeSupport = FALSE;\r
178 AsmCpuid (0x0, &RegEax, NULL, NULL, NULL);\r
179 if (RegEax >= 0x1) {\r
180 AsmCpuid (0x1, NULL, NULL, NULL, &RegEdx);\r
181 if ((RegEdx & BIT6) != 0) {\r
182 Ia32PaeSupport = TRUE;\r
183 }\r
184 }\r
185\r
186 return Ia32PaeSupport;\r
187}\r
188\r
189/**\r
190 The function will check if Execute Disable Bit is available.\r
191\r
192 @retval TRUE Execute Disable Bit is available.\r
193 @retval FALSE Execute Disable Bit is not available.\r
194\r
195**/\r
196BOOLEAN\r
197IsExecuteDisableBitAvailable (\r
198 VOID\r
199 )\r
200{\r
201 UINT32 RegEax;\r
202 UINT32 RegEdx;\r
203 BOOLEAN Available;\r
204\r
205 Available = FALSE;\r
206 AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);\r
207 if (RegEax >= 0x80000001) {\r
208 AsmCpuid (0x80000001, NULL, NULL, NULL, &RegEdx);\r
209 if ((RegEdx & BIT20) != 0) {\r
210 //\r
211 // Bit 20: Execute Disable Bit available.\r
212 //\r
213 Available = TRUE;\r
214 }\r
215 }\r
216\r
217 return Available;\r
218}\r
219\r
220/**\r
221 The function will check if page table should be setup or not.\r
222\r
223 @retval TRUE Page table should be created.\r
224 @retval FALSE Page table should not be created.\r
225\r
226**/\r
227BOOLEAN\r
228ToBuildPageTable (\r
229 VOID\r
230 )\r
231{\r
232 if (!IsIa32PaeSupport ()) {\r
233 return FALSE;\r
234 }\r
235\r
236 if (IsNullDetectionEnabled ()) {\r
237 return TRUE;\r
238 }\r
239\r
240 if (PcdGet8 (PcdHeapGuardPropertyMask) != 0) {\r
241 return TRUE;\r
242 }\r
243\r
244 if (PcdGetBool (PcdCpuStackGuard)) {\r
245 return TRUE;\r
246 }\r
247\r
248 if (PcdGetBool (PcdSetNxForStack) && IsExecuteDisableBitAvailable ()) {\r
249 return TRUE;\r
250 }\r
251\r
252 return FALSE;\r
253}\r
254\r
255/**\r
256 Transfers control to DxeCore.\r
257\r
258 This function performs a CPU architecture specific operations to execute\r
259 the entry point of DxeCore with the parameters of HobList.\r
260 It also installs EFI_END_OF_PEI_PPI to signal the end of PEI phase.\r
261\r
262 @param DxeCoreEntryPoint The entry point of DxeCore.\r
263 @param HobList The start of HobList passed to DxeCore.\r
264\r
265**/\r
266VOID\r
267HandOffToDxeCore (\r
268 IN EFI_PHYSICAL_ADDRESS DxeCoreEntryPoint,\r
269 IN EFI_PEI_HOB_POINTERS HobList\r
270 )\r
271{\r
272 EFI_STATUS Status;\r
273 EFI_PHYSICAL_ADDRESS BaseOfStack;\r
274 EFI_PHYSICAL_ADDRESS TopOfStack;\r
275 UINTN PageTables;\r
276 X64_IDT_GATE_DESCRIPTOR *IdtTable;\r
277 UINTN SizeOfTemplate;\r
278 VOID *TemplateBase;\r
279 EFI_PHYSICAL_ADDRESS VectorAddress;\r
280 UINT32 Index;\r
281 X64_IDT_TABLE *IdtTableForX64;\r
282 EFI_VECTOR_HANDOFF_INFO *VectorInfo;\r
283 EFI_PEI_VECTOR_HANDOFF_INFO_PPI *VectorHandoffInfoPpi;\r
284 BOOLEAN BuildPageTablesIa32Pae;\r
285\r
286 if (IsNullDetectionEnabled ()) {\r
287 ClearFirst4KPage (HobList.Raw);\r
288 }\r
289\r
290 Status = PeiServicesAllocatePages (EfiBootServicesData, EFI_SIZE_TO_PAGES (STACK_SIZE), &BaseOfStack);\r
291 ASSERT_EFI_ERROR (Status);\r
292\r
293 if (FeaturePcdGet(PcdDxeIplSwitchToLongMode)) {\r
294 //\r
295 // Compute the top of the stack we were allocated, which is used to load X64 dxe core.\r
296 // Pre-allocate a 32 bytes which confroms to x64 calling convention.\r
297 //\r
298 // The first four parameters to a function are passed in rcx, rdx, r8 and r9.\r
299 // Any further parameters are pushed on the stack. Furthermore, space (4 * 8bytes) for the\r
300 // register parameters is reserved on the stack, in case the called function\r
301 // wants to spill them; this is important if the function is variadic.\r
302 //\r
303 TopOfStack = BaseOfStack + EFI_SIZE_TO_PAGES (STACK_SIZE) * EFI_PAGE_SIZE - 32;\r
304\r
305 //\r
306 // x64 Calling Conventions requires that the stack must be aligned to 16 bytes\r
307 //\r
308 TopOfStack = (EFI_PHYSICAL_ADDRESS) (UINTN) ALIGN_POINTER (TopOfStack, 16);\r
309\r
310 //\r
311 // Load the GDT of Go64. Since the GDT of 32-bit Tiano locates in the BS_DATA\r
312 // memory, it may be corrupted when copying FV to high-end memory\r
313 //\r
314 AsmWriteGdtr (&gGdt);\r
315 //\r
316 // Create page table and save PageMapLevel4 to CR3\r
317 //\r
318 PageTables = CreateIdentityMappingPageTables (BaseOfStack, STACK_SIZE);\r
319\r
320 //\r
321 // End of PEI phase signal\r
322 //\r
323 PERF_EVENT_SIGNAL_BEGIN (gEndOfPeiSignalPpi.Guid);\r
324 Status = PeiServicesInstallPpi (&gEndOfPeiSignalPpi);\r
325 PERF_EVENT_SIGNAL_END (gEndOfPeiSignalPpi.Guid);\r
326 ASSERT_EFI_ERROR (Status);\r
327\r
328 AsmWriteCr3 (PageTables);\r
329\r
330 //\r
331 // Update the contents of BSP stack HOB to reflect the real stack info passed to DxeCore.\r
332 //\r
333 UpdateStackHob (BaseOfStack, STACK_SIZE);\r
334\r
335 SizeOfTemplate = AsmGetVectorTemplatInfo (&TemplateBase);\r
336\r
337 Status = PeiServicesAllocatePages (\r
338 EfiBootServicesData,\r
339 EFI_SIZE_TO_PAGES(sizeof (X64_IDT_TABLE) + SizeOfTemplate * IDT_ENTRY_COUNT),\r
340 &VectorAddress\r
341 );\r
342 ASSERT_EFI_ERROR (Status);\r
343\r
344 //\r
345 // Store EFI_PEI_SERVICES** in the 4 bytes immediately preceding IDT to avoid that\r
346 // it may not be gotten correctly after IDT register is re-written.\r
347 //\r
348 IdtTableForX64 = (X64_IDT_TABLE *) (UINTN) VectorAddress;\r
349 IdtTableForX64->PeiService = GetPeiServicesTablePointer ();\r
350\r
351 VectorAddress = (EFI_PHYSICAL_ADDRESS) (UINTN) (IdtTableForX64 + 1);\r
352 IdtTable = IdtTableForX64->IdtTable;\r
353 for (Index = 0; Index < IDT_ENTRY_COUNT; Index++) {\r
354 IdtTable[Index].Ia32IdtEntry.Bits.GateType = 0x8e;\r
355 IdtTable[Index].Ia32IdtEntry.Bits.Reserved_0 = 0;\r
356 IdtTable[Index].Ia32IdtEntry.Bits.Selector = SYS_CODE64_SEL;\r
357\r
358 IdtTable[Index].Ia32IdtEntry.Bits.OffsetLow = (UINT16) VectorAddress;\r
359 IdtTable[Index].Ia32IdtEntry.Bits.OffsetHigh = (UINT16) (RShiftU64 (VectorAddress, 16));\r
360 IdtTable[Index].Offset32To63 = (UINT32) (RShiftU64 (VectorAddress, 32));\r
361 IdtTable[Index].Reserved = 0;\r
362\r
363 CopyMem ((VOID *) (UINTN) VectorAddress, TemplateBase, SizeOfTemplate);\r
364 AsmVectorFixup ((VOID *) (UINTN) VectorAddress, (UINT8) Index);\r
365\r
366 VectorAddress += SizeOfTemplate;\r
367 }\r
368\r
369 gLidtDescriptor.Base = (UINTN) IdtTable;\r
370\r
371 //\r
372 // Disable interrupt of Debug timer, since new IDT table cannot handle it.\r
373 //\r
374 SaveAndSetDebugTimerInterrupt (FALSE);\r
375\r
376 AsmWriteIdtr (&gLidtDescriptor);\r
377\r
378 DEBUG ((\r
379 DEBUG_INFO,\r
380 "%a() Stack Base: 0x%lx, Stack Size: 0x%x\n",\r
381 __FUNCTION__,\r
382 BaseOfStack,\r
383 STACK_SIZE\r
384 ));\r
385\r
386 //\r
387 // Go to Long Mode and transfer control to DxeCore.\r
388 // Interrupts will not get turned on until the CPU AP is loaded.\r
389 // Call x64 drivers passing in single argument, a pointer to the HOBs.\r
390 //\r
391 AsmEnablePaging64 (\r
392 SYS_CODE64_SEL,\r
393 DxeCoreEntryPoint,\r
394 (EFI_PHYSICAL_ADDRESS)(UINTN)(HobList.Raw),\r
395 0,\r
396 TopOfStack\r
397 );\r
398 } else {\r
399 //\r
400 // Get Vector Hand-off Info PPI and build Guided HOB\r
401 //\r
402 Status = PeiServicesLocatePpi (\r
403 &gEfiVectorHandoffInfoPpiGuid,\r
404 0,\r
405 NULL,\r
406 (VOID **)&VectorHandoffInfoPpi\r
407 );\r
408 if (Status == EFI_SUCCESS) {\r
409 DEBUG ((EFI_D_INFO, "Vector Hand-off Info PPI is gotten, GUIDed HOB is created!\n"));\r
410 VectorInfo = VectorHandoffInfoPpi->Info;\r
411 Index = 1;\r
412 while (VectorInfo->Attribute != EFI_VECTOR_HANDOFF_LAST_ENTRY) {\r
413 VectorInfo ++;\r
414 Index ++;\r
415 }\r
416 BuildGuidDataHob (\r
417 &gEfiVectorHandoffInfoPpiGuid,\r
418 VectorHandoffInfoPpi->Info,\r
419 sizeof (EFI_VECTOR_HANDOFF_INFO) * Index\r
420 );\r
421 }\r
422\r
423 //\r
424 // Compute the top of the stack we were allocated. Pre-allocate a UINTN\r
425 // for safety.\r
426 //\r
427 TopOfStack = BaseOfStack + EFI_SIZE_TO_PAGES (STACK_SIZE) * EFI_PAGE_SIZE - CPU_STACK_ALIGNMENT;\r
428 TopOfStack = (EFI_PHYSICAL_ADDRESS) (UINTN) ALIGN_POINTER (TopOfStack, CPU_STACK_ALIGNMENT);\r
429\r
430 PageTables = 0;\r
431 BuildPageTablesIa32Pae = ToBuildPageTable ();\r
432 if (BuildPageTablesIa32Pae) {\r
433 PageTables = Create4GPageTablesIa32Pae (BaseOfStack, STACK_SIZE);\r
434 if (IsExecuteDisableBitAvailable ()) {\r
435 EnableExecuteDisableBit();\r
436 }\r
437 }\r
438\r
439 //\r
440 // End of PEI phase signal\r
441 //\r
442 PERF_EVENT_SIGNAL_BEGIN (gEndOfPeiSignalPpi.Guid);\r
443 Status = PeiServicesInstallPpi (&gEndOfPeiSignalPpi);\r
444 PERF_EVENT_SIGNAL_END (gEndOfPeiSignalPpi.Guid);\r
445 ASSERT_EFI_ERROR (Status);\r
446\r
447 if (BuildPageTablesIa32Pae) {\r
448 AsmWriteCr3 (PageTables);\r
449 //\r
450 // Set Physical Address Extension (bit 5 of CR4).\r
451 //\r
452 AsmWriteCr4 (AsmReadCr4 () | BIT5);\r
453 }\r
454\r
455 //\r
456 // Update the contents of BSP stack HOB to reflect the real stack info passed to DxeCore.\r
457 //\r
458 UpdateStackHob (BaseOfStack, STACK_SIZE);\r
459\r
460 DEBUG ((\r
461 DEBUG_INFO,\r
462 "%a() Stack Base: 0x%lx, Stack Size: 0x%x\n",\r
463 __FUNCTION__,\r
464 BaseOfStack,\r
465 STACK_SIZE\r
466 ));\r
467\r
468 //\r
469 // Transfer the control to the entry point of DxeCore.\r
470 //\r
471 if (BuildPageTablesIa32Pae) {\r
472 AsmEnablePaging32 (\r
473 (SWITCH_STACK_ENTRY_POINT)(UINTN)DxeCoreEntryPoint,\r
474 HobList.Raw,\r
475 NULL,\r
476 (VOID *) (UINTN) TopOfStack\r
477 );\r
478 } else {\r
479 SwitchStack (\r
480 (SWITCH_STACK_ENTRY_POINT)(UINTN)DxeCoreEntryPoint,\r
481 HobList.Raw,\r
482 NULL,\r
483 (VOID *) (UINTN) TopOfStack\r
484 );\r
485 }\r
486 }\r
487}\r
488\r