]> git.proxmox.com Git - mirror_edk2.git/commitdiff
MdeModulePkg/DxeIpl: Create 5-level page table for long mode
authorNi, Ray <ray.ni@intel.com>
Thu, 1 Aug 2019 09:58:29 +0000 (17:58 +0800)
committerEric Dong <eric.dong@intel.com>
Fri, 9 Aug 2019 00:52:09 +0000 (08:52 +0800)
REF: https://bugzilla.tianocore.org/show_bug.cgi?id=2008

DxeIpl is responsible to create page table for DXE phase running
either in long mode or in 32bit mode with certain protection
mechanism enabled (refer to ToBuildPageTable()).

The patch updates DxeIpl to create 5-level page table for DXE phase
running in long mode when PcdUse5LevelPageTable is TRUE and CPU
supports 5-level page table.

Signed-off-by: Ray Ni <ray.ni@intel.com>
Reviewed-by: Eric Dong <eric.dong@intel.com>
Regression-tested-by: Laszlo Ersek <lersek@redhat.com>
Reviewed-by: Hao A Wu <hao.a.wu@intel.com>
Signed-off-by: Eric Dong <eric.dong@intel.com>
MdeModulePkg/Core/DxeIplPeim/DxeIpl.inf
MdeModulePkg/Core/DxeIplPeim/X64/VirtualMemory.c

index abc3217b0179b7bf43808659c968ecfdd654f560..98bc17fc9d1f6117c6963fb5dccebafe95b34482 100644 (file)
   gEfiMdeModulePkgTokenSpaceGuid.PcdNullPointerDetectionPropertyMask    ## CONSUMES\r
   gEfiMdeModulePkgTokenSpaceGuid.PcdHeapGuardPropertyMask               ## CONSUMES\r
   gEfiMdeModulePkgTokenSpaceGuid.PcdCpuStackGuard                       ## CONSUMES\r
+  gEfiMdeModulePkgTokenSpaceGuid.PcdUse5LevelPageTable                  ## SOMETIMES_CONSUMES\r
 \r
 [Pcd.IA32,Pcd.X64,Pcd.ARM,Pcd.AARCH64]\r
   gEfiMdeModulePkgTokenSpaceGuid.PcdSetNxForStack               ## SOMETIMES_CONSUMES\r
index edc38e4525c4e875ad629454cda90b20979680d7..b40b7e0c9813b06459f7008c25919ab778a64d18 100644 (file)
     2) IA-32 Intel(R) Architecture Software Developer's Manual Volume 2:Instruction Set Reference, Intel\r
     3) IA-32 Intel(R) Architecture Software Developer's Manual Volume 3:System Programmer's Guide, Intel\r
 \r
-Copyright (c) 2006 - 2018, Intel Corporation. All rights reserved.<BR>\r
+Copyright (c) 2006 - 2019, Intel Corporation. All rights reserved.<BR>\r
 Copyright (c) 2017, AMD Incorporated. All rights reserved.<BR>\r
 \r
 SPDX-License-Identifier: BSD-2-Clause-Patent\r
 \r
 **/\r
 \r
+#include <Register/Intel/Cpuid.h>\r
 #include "DxeIpl.h"\r
 #include "VirtualMemory.h"\r
 \r
@@ -626,14 +627,18 @@ CreateIdentityMappingPageTables (
   )\r
 {\r
   UINT32                                        RegEax;\r
+  CPUID_STRUCTURED_EXTENDED_FEATURE_FLAGS_ECX   EcxFlags;\r
   UINT32                                        RegEdx;\r
   UINT8                                         PhysicalAddressBits;\r
   EFI_PHYSICAL_ADDRESS                          PageAddress;\r
+  UINTN                                         IndexOfPml5Entries;\r
   UINTN                                         IndexOfPml4Entries;\r
   UINTN                                         IndexOfPdpEntries;\r
   UINTN                                         IndexOfPageDirectoryEntries;\r
+  UINT32                                        NumberOfPml5EntriesNeeded;\r
   UINT32                                        NumberOfPml4EntriesNeeded;\r
   UINT32                                        NumberOfPdpEntriesNeeded;\r
+  PAGE_MAP_AND_DIRECTORY_POINTER                *PageMapLevel5Entry;\r
   PAGE_MAP_AND_DIRECTORY_POINTER                *PageMapLevel4Entry;\r
   PAGE_MAP_AND_DIRECTORY_POINTER                *PageMap;\r
   PAGE_MAP_AND_DIRECTORY_POINTER                *PageDirectoryPointerEntry;\r
@@ -641,9 +646,11 @@ CreateIdentityMappingPageTables (
   UINTN                                         TotalPagesNum;\r
   UINTN                                         BigPageAddress;\r
   VOID                                          *Hob;\r
+  BOOLEAN                                       Page5LevelSupport;\r
   BOOLEAN                                       Page1GSupport;\r
   PAGE_TABLE_1G_ENTRY                           *PageDirectory1GEntry;\r
   UINT64                                        AddressEncMask;\r
+  IA32_CR4                                      Cr4;\r
 \r
   //\r
   // Make sure AddressEncMask is contained to smallest supported address field\r
@@ -677,33 +684,68 @@ CreateIdentityMappingPageTables (
     }\r
   }\r
 \r
+  Page5LevelSupport = FALSE;\r
+  if (PcdGetBool (PcdUse5LevelPageTable)) {\r
+    AsmCpuidEx (\r
+      CPUID_STRUCTURED_EXTENDED_FEATURE_FLAGS, CPUID_STRUCTURED_EXTENDED_FEATURE_FLAGS_SUB_LEAF_INFO, NULL,\r
+      &EcxFlags.Uint32, NULL, NULL\r
+      );\r
+    if (EcxFlags.Bits.FiveLevelPage != 0) {\r
+      Page5LevelSupport = TRUE;\r
+    }\r
+  }\r
+\r
+  DEBUG ((DEBUG_INFO, "AddressBits=%u 5LevelPaging=%u 1GPage=%u\n", PhysicalAddressBits, Page5LevelSupport, Page1GSupport));\r
+\r
   //\r
-  // IA-32e paging translates 48-bit linear addresses to 52-bit physical addresses.\r
+  // IA-32e paging translates 48-bit linear addresses to 52-bit physical addresses\r
+  //  when 5-Level Paging is disabled,\r
+  //  due to either unsupported by HW, or disabled by PCD.\r
   //\r
   ASSERT (PhysicalAddressBits <= 52);\r
-  if (PhysicalAddressBits > 48) {\r
+  if (!Page5LevelSupport && PhysicalAddressBits > 48) {\r
     PhysicalAddressBits = 48;\r
   }\r
 \r
   //\r
   // Calculate the table entries needed.\r
   //\r
-  if (PhysicalAddressBits <= 39 ) {\r
-    NumberOfPml4EntriesNeeded = 1;\r
-    NumberOfPdpEntriesNeeded = (UINT32)LShiftU64 (1, (PhysicalAddressBits - 30));\r
-  } else {\r
-    NumberOfPml4EntriesNeeded = (UINT32)LShiftU64 (1, (PhysicalAddressBits - 39));\r
-    NumberOfPdpEntriesNeeded = 512;\r
+  NumberOfPml5EntriesNeeded = 1;\r
+  if (PhysicalAddressBits > 48) {\r
+    NumberOfPml5EntriesNeeded = (UINT32) LShiftU64 (1, PhysicalAddressBits - 48);\r
+    PhysicalAddressBits = 48;\r
+  }\r
+\r
+  NumberOfPml4EntriesNeeded = 1;\r
+  if (PhysicalAddressBits > 39) {\r
+    NumberOfPml4EntriesNeeded = (UINT32) LShiftU64 (1, PhysicalAddressBits - 39);\r
+    PhysicalAddressBits = 39;\r
   }\r
 \r
+  NumberOfPdpEntriesNeeded = 1;\r
+  ASSERT (PhysicalAddressBits > 30);\r
+  NumberOfPdpEntriesNeeded = (UINT32) LShiftU64 (1, PhysicalAddressBits - 30);\r
+\r
   //\r
   // Pre-allocate big pages to avoid later allocations.\r
   //\r
   if (!Page1GSupport) {\r
-    TotalPagesNum = (NumberOfPdpEntriesNeeded + 1) * NumberOfPml4EntriesNeeded + 1;\r
+    TotalPagesNum = ((NumberOfPdpEntriesNeeded + 1) * NumberOfPml4EntriesNeeded + 1) * NumberOfPml5EntriesNeeded + 1;\r
   } else {\r
-    TotalPagesNum = NumberOfPml4EntriesNeeded + 1;\r
+    TotalPagesNum = (NumberOfPml4EntriesNeeded + 1) * NumberOfPml5EntriesNeeded + 1;\r
+  }\r
+\r
+  //\r
+  // Substract the one page occupied by PML5 entries if 5-Level Paging is disabled.\r
+  //\r
+  if (!Page5LevelSupport) {\r
+    TotalPagesNum--;\r
   }\r
+\r
+  DEBUG ((DEBUG_INFO, "Pml5=%u Pml4=%u Pdp=%u TotalPage=%Lu\n",\r
+    NumberOfPml5EntriesNeeded, NumberOfPml4EntriesNeeded,\r
+    NumberOfPdpEntriesNeeded, (UINT64)TotalPagesNum));\r
+\r
   BigPageAddress = (UINTN) AllocatePageTableMemory (TotalPagesNum);\r
   ASSERT (BigPageAddress != 0);\r
 \r
@@ -711,92 +753,125 @@ CreateIdentityMappingPageTables (
   // By architecture only one PageMapLevel4 exists - so lets allocate storage for it.\r
   //\r
   PageMap         = (VOID *) BigPageAddress;\r
-  BigPageAddress += SIZE_4KB;\r
-\r
-  PageMapLevel4Entry = PageMap;\r
-  PageAddress        = 0;\r
-  for (IndexOfPml4Entries = 0; IndexOfPml4Entries < NumberOfPml4EntriesNeeded; IndexOfPml4Entries++, PageMapLevel4Entry++) {\r
+  if (Page5LevelSupport) {\r
     //\r
-    // Each PML4 entry points to a page of Page Directory Pointer entires.\r
-    // So lets allocate space for them and fill them in in the IndexOfPdpEntries loop.\r
+    // By architecture only one PageMapLevel5 exists - so lets allocate storage for it.\r
     //\r
-    PageDirectoryPointerEntry = (VOID *) BigPageAddress;\r
-    BigPageAddress += SIZE_4KB;\r
+    PageMapLevel5Entry = PageMap;\r
+    BigPageAddress    += SIZE_4KB;\r
+  }\r
+  PageAddress        = 0;\r
 \r
+  for ( IndexOfPml5Entries = 0\r
+      ; IndexOfPml5Entries < NumberOfPml5EntriesNeeded\r
+      ; IndexOfPml5Entries++, PageMapLevel5Entry++) {\r
     //\r
-    // Make a PML4 Entry\r
+    // Each PML5 entry points to a page of PML4 entires.\r
+    // So lets allocate space for them and fill them in in the IndexOfPml4Entries loop.\r
+    // When 5-Level Paging is disabled, below allocation happens only once.\r
     //\r
-    PageMapLevel4Entry->Uint64 = (UINT64)(UINTN)PageDirectoryPointerEntry | AddressEncMask;\r
-    PageMapLevel4Entry->Bits.ReadWrite = 1;\r
-    PageMapLevel4Entry->Bits.Present = 1;\r
+    PageMapLevel4Entry = (VOID *) BigPageAddress;\r
+    BigPageAddress    += SIZE_4KB;\r
 \r
-    if (Page1GSupport) {\r
-      PageDirectory1GEntry = (VOID *) PageDirectoryPointerEntry;\r
+    if (Page5LevelSupport) {\r
+      //\r
+      // Make a PML5 Entry\r
+      //\r
+      PageMapLevel5Entry->Uint64 = (UINT64) (UINTN) PageMapLevel4Entry | AddressEncMask;\r
+      PageMapLevel5Entry->Bits.ReadWrite = 1;\r
+      PageMapLevel5Entry->Bits.Present   = 1;\r
+    }\r
 \r
-      for (IndexOfPageDirectoryEntries = 0; IndexOfPageDirectoryEntries < 512; IndexOfPageDirectoryEntries++, PageDirectory1GEntry++, PageAddress += SIZE_1GB) {\r
-        if (ToSplitPageTable (PageAddress, SIZE_1GB, StackBase, StackSize)) {\r
-          Split1GPageTo2M (PageAddress, (UINT64 *) PageDirectory1GEntry, StackBase, StackSize);\r
-        } else {\r
-          //\r
-          // Fill in the Page Directory entries\r
-          //\r
-          PageDirectory1GEntry->Uint64 = (UINT64)PageAddress | AddressEncMask;\r
-          PageDirectory1GEntry->Bits.ReadWrite = 1;\r
-          PageDirectory1GEntry->Bits.Present = 1;\r
-          PageDirectory1GEntry->Bits.MustBe1 = 1;\r
-        }\r
-      }\r
-    } else {\r
-      for (IndexOfPdpEntries = 0; IndexOfPdpEntries < NumberOfPdpEntriesNeeded; IndexOfPdpEntries++, PageDirectoryPointerEntry++) {\r
-        //\r
-        // Each Directory Pointer entries points to a page of Page Directory entires.\r
-        // So allocate space for them and fill them in in the IndexOfPageDirectoryEntries loop.\r
-        //\r
-        PageDirectoryEntry = (VOID *) BigPageAddress;\r
-        BigPageAddress += SIZE_4KB;\r
+    for ( IndexOfPml4Entries = 0\r
+        ; IndexOfPml4Entries < (NumberOfPml5EntriesNeeded == 1 ? NumberOfPml4EntriesNeeded : 512)\r
+        ; IndexOfPml4Entries++, PageMapLevel4Entry++) {\r
+      //\r
+      // Each PML4 entry points to a page of Page Directory Pointer entires.\r
+      // So lets allocate space for them and fill them in in the IndexOfPdpEntries loop.\r
+      //\r
+      PageDirectoryPointerEntry = (VOID *) BigPageAddress;\r
+      BigPageAddress += SIZE_4KB;\r
 \r
-        //\r
-        // Fill in a Page Directory Pointer Entries\r
-        //\r
-        PageDirectoryPointerEntry->Uint64 = (UINT64)(UINTN)PageDirectoryEntry | AddressEncMask;\r
-        PageDirectoryPointerEntry->Bits.ReadWrite = 1;\r
-        PageDirectoryPointerEntry->Bits.Present = 1;\r
+      //\r
+      // Make a PML4 Entry\r
+      //\r
+      PageMapLevel4Entry->Uint64 = (UINT64)(UINTN)PageDirectoryPointerEntry | AddressEncMask;\r
+      PageMapLevel4Entry->Bits.ReadWrite = 1;\r
+      PageMapLevel4Entry->Bits.Present = 1;\r
 \r
-        for (IndexOfPageDirectoryEntries = 0; IndexOfPageDirectoryEntries < 512; IndexOfPageDirectoryEntries++, PageDirectoryEntry++, PageAddress += SIZE_2MB) {\r
-          if (ToSplitPageTable (PageAddress, SIZE_2MB, StackBase, StackSize)) {\r
-            //\r
-            // Need to split this 2M page that covers NULL or stack range.\r
-            //\r
-            Split2MPageTo4K (PageAddress, (UINT64 *) PageDirectoryEntry, StackBase, StackSize);\r
+      if (Page1GSupport) {\r
+        PageDirectory1GEntry = (VOID *) PageDirectoryPointerEntry;\r
+\r
+        for (IndexOfPageDirectoryEntries = 0; IndexOfPageDirectoryEntries < 512; IndexOfPageDirectoryEntries++, PageDirectory1GEntry++, PageAddress += SIZE_1GB) {\r
+          if (ToSplitPageTable (PageAddress, SIZE_1GB, StackBase, StackSize)) {\r
+            Split1GPageTo2M (PageAddress, (UINT64 *) PageDirectory1GEntry, StackBase, StackSize);\r
           } else {\r
             //\r
             // Fill in the Page Directory entries\r
             //\r
-            PageDirectoryEntry->Uint64 = (UINT64)PageAddress | AddressEncMask;\r
-            PageDirectoryEntry->Bits.ReadWrite = 1;\r
-            PageDirectoryEntry->Bits.Present = 1;\r
-            PageDirectoryEntry->Bits.MustBe1 = 1;\r
+            PageDirectory1GEntry->Uint64 = (UINT64)PageAddress | AddressEncMask;\r
+            PageDirectory1GEntry->Bits.ReadWrite = 1;\r
+            PageDirectory1GEntry->Bits.Present = 1;\r
+            PageDirectory1GEntry->Bits.MustBe1 = 1;\r
           }\r
         }\r
-      }\r
+      } else {\r
+        for ( IndexOfPdpEntries = 0\r
+            ; IndexOfPdpEntries < (NumberOfPml4EntriesNeeded == 1 ? NumberOfPdpEntriesNeeded : 512)\r
+            ; IndexOfPdpEntries++, PageDirectoryPointerEntry++) {\r
+          //\r
+          // Each Directory Pointer entries points to a page of Page Directory entires.\r
+          // So allocate space for them and fill them in in the IndexOfPageDirectoryEntries loop.\r
+          //\r
+          PageDirectoryEntry = (VOID *) BigPageAddress;\r
+          BigPageAddress += SIZE_4KB;\r
 \r
-      for (; IndexOfPdpEntries < 512; IndexOfPdpEntries++, PageDirectoryPointerEntry++) {\r
-        ZeroMem (\r
-          PageDirectoryPointerEntry,\r
-          sizeof(PAGE_MAP_AND_DIRECTORY_POINTER)\r
-          );\r
+          //\r
+          // Fill in a Page Directory Pointer Entries\r
+          //\r
+          PageDirectoryPointerEntry->Uint64 = (UINT64)(UINTN)PageDirectoryEntry | AddressEncMask;\r
+          PageDirectoryPointerEntry->Bits.ReadWrite = 1;\r
+          PageDirectoryPointerEntry->Bits.Present = 1;\r
+\r
+          for (IndexOfPageDirectoryEntries = 0; IndexOfPageDirectoryEntries < 512; IndexOfPageDirectoryEntries++, PageDirectoryEntry++, PageAddress += SIZE_2MB) {\r
+            if (ToSplitPageTable (PageAddress, SIZE_2MB, StackBase, StackSize)) {\r
+              //\r
+              // Need to split this 2M page that covers NULL or stack range.\r
+              //\r
+              Split2MPageTo4K (PageAddress, (UINT64 *) PageDirectoryEntry, StackBase, StackSize);\r
+            } else {\r
+              //\r
+              // Fill in the Page Directory entries\r
+              //\r
+              PageDirectoryEntry->Uint64 = (UINT64)PageAddress | AddressEncMask;\r
+              PageDirectoryEntry->Bits.ReadWrite = 1;\r
+              PageDirectoryEntry->Bits.Present = 1;\r
+              PageDirectoryEntry->Bits.MustBe1 = 1;\r
+            }\r
+          }\r
+        }\r
+\r
+        //\r
+        // Fill with null entry for unused PDPTE\r
+        //\r
+        ZeroMem (PageDirectoryPointerEntry, (512 - IndexOfPdpEntries) * sizeof(PAGE_MAP_AND_DIRECTORY_POINTER));\r
       }\r
     }\r
+\r
+    //\r
+    // For the PML4 entries we are not using fill in a null entry.\r
+    //\r
+    ZeroMem (PageMapLevel4Entry, (512 - IndexOfPml4Entries) * sizeof (PAGE_MAP_AND_DIRECTORY_POINTER));\r
   }\r
 \r
-  //\r
-  // For the PML4 entries we are not using fill in a null entry.\r
-  //\r
-  for (; IndexOfPml4Entries < 512; IndexOfPml4Entries++, PageMapLevel4Entry++) {\r
-    ZeroMem (\r
-      PageMapLevel4Entry,\r
-      sizeof (PAGE_MAP_AND_DIRECTORY_POINTER)\r
-      );\r
+  if (Page5LevelSupport) {\r
+    Cr4.UintN = AsmReadCr4 ();\r
+    Cr4.Bits.LA57 = 1;\r
+    AsmWriteCr4 (Cr4.UintN);\r
+    //\r
+    // For the PML5 entries we are not using fill in a null entry.\r
+    //\r
+    ZeroMem (PageMapLevel5Entry, (512 - IndexOfPml5Entries) * sizeof (PAGE_MAP_AND_DIRECTORY_POINTER));\r
   }\r
 \r
   //\r