/** @file\r
MTRR setting library\r
\r
- Copyright (c) 2008 - 2015, Intel Corporation. All rights reserved.<BR>\r
- This program and the accompanying materials\r
- are licensed and made available under the terms and conditions of the BSD License\r
- which accompanies this distribution. The full text of the license may be found at\r
- http://opensource.org/licenses/bsd-license.php\r
+ @par Note:\r
+ Most of services in this library instance are suggested to be invoked by BSP only,\r
+ except for MtrrSetAllMtrrs() which is used to sync BSP's MTRR setting to APs.\r
\r
- THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
- WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+ Copyright (c) 2008 - 2020, Intel Corporation. All rights reserved.<BR>\r
+ SPDX-License-Identifier: BSD-2-Clause-Patent\r
\r
**/\r
\r
-#include <Base.h>\r
+#include <Uefi.h>\r
+#include <Register/Intel/Cpuid.h>\r
+#include <Register/Intel/Msr.h>\r
\r
#include <Library/MtrrLib.h>\r
#include <Library/BaseLib.h>\r
#include <Library/BaseMemoryLib.h>\r
#include <Library/DebugLib.h>\r
\r
+#define OR_SEED 0x0101010101010101ull\r
+#define CLEAR_SEED 0xFFFFFFFFFFFFFFFFull\r
+#define MAX_WEIGHT MAX_UINT8\r
+#define SCRATCH_BUFFER_SIZE (4 * SIZE_4KB)\r
+#define MTRR_LIB_ASSERT_ALIGNED(B, L) ASSERT ((B & ~(L - 1)) == B);\r
+\r
+#define M(x, y) ((x) * VertexCount + (y))\r
+#define O(x, y) ((y) * VertexCount + (x))\r
+\r
//\r
// Context to save and restore when MTRRs are programmed\r
//\r
typedef struct {\r
- UINTN Cr4;\r
- BOOLEAN InterruptState;\r
+ UINTN Cr4;\r
+ BOOLEAN InterruptState;\r
} MTRR_CONTEXT;\r
\r
+typedef struct {\r
+ UINT64 Address;\r
+ UINT64 Alignment;\r
+ UINT64 Length;\r
+ MTRR_MEMORY_CACHE_TYPE Type : 7;\r
+\r
+ //\r
+ // Temprary use for calculating the best MTRR settings.\r
+ //\r
+ BOOLEAN Visited : 1;\r
+ UINT8 Weight;\r
+ UINT16 Previous;\r
+} MTRR_LIB_ADDRESS;\r
+\r
//\r
// This table defines the offset, base and length of the fixed MTRRs\r
//\r
CONST FIXED_MTRR mMtrrLibFixedMtrrTable[] = {\r
{\r
- MTRR_LIB_IA32_MTRR_FIX64K_00000,\r
+ MSR_IA32_MTRR_FIX64K_00000,\r
0,\r
SIZE_64KB\r
},\r
{\r
- MTRR_LIB_IA32_MTRR_FIX16K_80000,\r
+ MSR_IA32_MTRR_FIX16K_80000,\r
0x80000,\r
SIZE_16KB\r
},\r
{\r
- MTRR_LIB_IA32_MTRR_FIX16K_A0000,\r
+ MSR_IA32_MTRR_FIX16K_A0000,\r
0xA0000,\r
SIZE_16KB\r
},\r
{\r
- MTRR_LIB_IA32_MTRR_FIX4K_C0000,\r
+ MSR_IA32_MTRR_FIX4K_C0000,\r
0xC0000,\r
SIZE_4KB\r
},\r
{\r
- MTRR_LIB_IA32_MTRR_FIX4K_C8000,\r
+ MSR_IA32_MTRR_FIX4K_C8000,\r
0xC8000,\r
SIZE_4KB\r
},\r
{\r
- MTRR_LIB_IA32_MTRR_FIX4K_D0000,\r
+ MSR_IA32_MTRR_FIX4K_D0000,\r
0xD0000,\r
SIZE_4KB\r
},\r
{\r
- MTRR_LIB_IA32_MTRR_FIX4K_D8000,\r
+ MSR_IA32_MTRR_FIX4K_D8000,\r
0xD8000,\r
SIZE_4KB\r
},\r
{\r
- MTRR_LIB_IA32_MTRR_FIX4K_E0000,\r
+ MSR_IA32_MTRR_FIX4K_E0000,\r
0xE0000,\r
SIZE_4KB\r
},\r
{\r
- MTRR_LIB_IA32_MTRR_FIX4K_E8000,\r
+ MSR_IA32_MTRR_FIX4K_E8000,\r
0xE8000,\r
SIZE_4KB\r
},\r
{\r
- MTRR_LIB_IA32_MTRR_FIX4K_F0000,\r
+ MSR_IA32_MTRR_FIX4K_F0000,\r
0xF0000,\r
SIZE_4KB\r
},\r
{\r
- MTRR_LIB_IA32_MTRR_FIX4K_F8000,\r
+ MSR_IA32_MTRR_FIX4K_F8000,\r
0xF8000,\r
SIZE_4KB\r
}\r
//\r
// Lookup table used to print MTRRs\r
//\r
-GLOBAL_REMOVE_IF_UNREFERENCED CONST CHAR8 *mMtrrMemoryCacheTypeShortName[] = {\r
+GLOBAL_REMOVE_IF_UNREFERENCED CONST CHAR8 *mMtrrMemoryCacheTypeShortName[] = {\r
"UC", // CacheUncacheable\r
"WC", // CacheWriteCombining\r
"R*", // Invalid\r
"R*" // Invalid\r
};\r
\r
+/**\r
+ Worker function prints all MTRRs for debugging.\r
+\r
+ If MtrrSetting is not NULL, print MTRR settings from input MTRR\r
+ settings buffer.\r
+ If MtrrSetting is NULL, print MTRR settings from MTRRs.\r
+\r
+ @param MtrrSetting A buffer holding all MTRRs content.\r
+**/\r
+VOID\r
+MtrrDebugPrintAllMtrrsWorker (\r
+ IN MTRR_SETTINGS *MtrrSetting\r
+ );\r
+\r
/**\r
Worker function returns the variable MTRR count for the CPU.\r
\r
VOID\r
)\r
{\r
- UINT32 VariableMtrrCount;\r
+ MSR_IA32_MTRRCAP_REGISTER MtrrCap;\r
\r
- VariableMtrrCount = (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK);\r
- ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
- return VariableMtrrCount;\r
+ MtrrCap.Uint64 = AsmReadMsr64 (MSR_IA32_MTRRCAP);\r
+ ASSERT (MtrrCap.Bits.VCNT <= ARRAY_SIZE (((MTRR_VARIABLE_SETTINGS *)0)->Mtrr));\r
+ return MtrrCap.Bits.VCNT;\r
}\r
\r
/**\r
if (!IsMtrrSupported ()) {\r
return 0;\r
}\r
+\r
return GetVariableMtrrCountWorker ();\r
}\r
\r
UINT32 VariableMtrrCount;\r
UINT32 ReservedMtrrNumber;\r
\r
- VariableMtrrCount = GetVariableMtrrCountWorker ();\r
+ VariableMtrrCount = GetVariableMtrrCountWorker ();\r
ReservedMtrrNumber = PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);\r
if (VariableMtrrCount < ReservedMtrrNumber) {\r
return 0;\r
if (!IsMtrrSupported ()) {\r
return 0;\r
}\r
+\r
return GetFirmwareVariableMtrrCountWorker ();\r
}\r
\r
/**\r
Worker function returns the default MTRR cache type for the system.\r
\r
+ If MtrrSetting is not NULL, returns the default MTRR cache type from input\r
+ MTRR settings buffer.\r
+ If MtrrSetting is NULL, returns the default MTRR cache type from MSR.\r
+\r
+ @param[in] MtrrSetting A buffer holding all MTRRs content.\r
+\r
@return The default MTRR cache type.\r
\r
**/\r
MTRR_MEMORY_CACHE_TYPE\r
MtrrGetDefaultMemoryTypeWorker (\r
- VOID\r
+ IN MTRR_SETTINGS *MtrrSetting\r
)\r
{\r
- return (MTRR_MEMORY_CACHE_TYPE) (AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE) & 0x7);\r
-}\r
+ MSR_IA32_MTRR_DEF_TYPE_REGISTER DefType;\r
+\r
+ if (MtrrSetting == NULL) {\r
+ DefType.Uint64 = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);\r
+ } else {\r
+ DefType.Uint64 = MtrrSetting->MtrrDefType;\r
+ }\r
\r
+ return (MTRR_MEMORY_CACHE_TYPE)DefType.Bits.Type;\r
+}\r
\r
/**\r
Returns the default MTRR cache type for the system.\r
if (!IsMtrrSupported ()) {\r
return CacheUncacheable;\r
}\r
- return MtrrGetDefaultMemoryTypeWorker ();\r
+\r
+ return MtrrGetDefaultMemoryTypeWorker (NULL);\r
}\r
\r
/**\r
\r
**/\r
VOID\r
-PreMtrrChange (\r
+MtrrLibPreMtrrChange (\r
OUT MTRR_CONTEXT *MtrrContext\r
)\r
{\r
+ MSR_IA32_MTRR_DEF_TYPE_REGISTER DefType;\r
+\r
//\r
// Disable interrupts and save current interrupt state\r
//\r
- MtrrContext->InterruptState = SaveAndDisableInterrupts();\r
+ MtrrContext->InterruptState = SaveAndDisableInterrupts ();\r
\r
//\r
// Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)\r
//\r
// Disable MTRRs\r
//\r
- AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 0);\r
+ DefType.Uint64 = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);\r
+ DefType.Bits.E = 0;\r
+ AsmWriteMsr64 (MSR_IA32_MTRR_DEF_TYPE, DefType.Uint64);\r
}\r
\r
/**\r
\r
**/\r
VOID\r
-PostMtrrChangeEnableCache (\r
+MtrrLibPostMtrrChangeEnableCache (\r
IN MTRR_CONTEXT *MtrrContext\r
)\r
{\r
\r
**/\r
VOID\r
-PostMtrrChange (\r
+MtrrLibPostMtrrChange (\r
IN MTRR_CONTEXT *MtrrContext\r
)\r
{\r
+ MSR_IA32_MTRR_DEF_TYPE_REGISTER DefType;\r
+\r
//\r
// Enable Cache MTRR\r
//\r
- AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 3);\r
+ DefType.Uint64 = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);\r
+ DefType.Bits.E = 1;\r
+ DefType.Bits.FE = 1;\r
+ AsmWriteMsr64 (MSR_IA32_MTRR_DEF_TYPE, DefType.Uint64);\r
\r
- PostMtrrChangeEnableCache (MtrrContext);\r
+ MtrrLibPostMtrrChangeEnableCache (MtrrContext);\r
}\r
\r
/**\r
@retval The pointer of FixedSettings\r
\r
**/\r
-MTRR_FIXED_SETTINGS*\r
+MTRR_FIXED_SETTINGS *\r
MtrrGetFixedMtrrWorker (\r
- OUT MTRR_FIXED_SETTINGS *FixedSettings\r
+ OUT MTRR_FIXED_SETTINGS *FixedSettings\r
)\r
{\r
UINT32 Index;\r
\r
for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- FixedSettings->Mtrr[Index] =\r
- AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);\r
+ FixedSettings->Mtrr[Index] =\r
+ AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);\r
}\r
\r
return FixedSettings;\r
}\r
\r
-\r
/**\r
This function gets the content in fixed MTRRs\r
\r
@retval The pointer of FixedSettings\r
\r
**/\r
-MTRR_FIXED_SETTINGS*\r
+MTRR_FIXED_SETTINGS *\r
EFIAPI\r
MtrrGetFixedMtrr (\r
- OUT MTRR_FIXED_SETTINGS *FixedSettings\r
+ OUT MTRR_FIXED_SETTINGS *FixedSettings\r
)\r
{\r
if (!IsMtrrSupported ()) {\r
return MtrrGetFixedMtrrWorker (FixedSettings);\r
}\r
\r
-\r
/**\r
Worker function will get the raw value in variable MTRRs\r
\r
+ If MtrrSetting is not NULL, gets the variable MTRRs raw value from input\r
+ MTRR settings buffer.\r
+ If MtrrSetting is NULL, gets the variable MTRRs raw value from MTRRs.\r
+\r
+ @param[in] MtrrSetting A buffer holding all MTRRs content.\r
+ @param[in] VariableMtrrCount Number of variable MTRRs.\r
@param[out] VariableSettings A buffer to hold variable MTRRs content.\r
\r
@return The VariableSettings input pointer\r
\r
**/\r
-MTRR_VARIABLE_SETTINGS*\r
+MTRR_VARIABLE_SETTINGS *\r
MtrrGetVariableMtrrWorker (\r
+ IN MTRR_SETTINGS *MtrrSetting,\r
IN UINT32 VariableMtrrCount,\r
OUT MTRR_VARIABLE_SETTINGS *VariableSettings\r
)\r
{\r
UINT32 Index;\r
\r
- ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
+ ASSERT (VariableMtrrCount <= ARRAY_SIZE (VariableSettings->Mtrr));\r
\r
for (Index = 0; Index < VariableMtrrCount; Index++) {\r
- VariableSettings->Mtrr[Index].Base =\r
- AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1));\r
- VariableSettings->Mtrr[Index].Mask =\r
- AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1) + 1);\r
- }\r
-\r
- return VariableSettings;\r
-}\r
-\r
-/**\r
- This function will get the raw value in variable MTRRs\r
-\r
- @param[out] VariableSettings A buffer to hold variable MTRRs content.\r
-\r
- @return The VariableSettings input pointer\r
-\r
-**/\r
-MTRR_VARIABLE_SETTINGS*\r
-EFIAPI\r
-MtrrGetVariableMtrr (\r
- OUT MTRR_VARIABLE_SETTINGS *VariableSettings\r
- )\r
-{\r
- if (!IsMtrrSupported ()) {\r
- return VariableSettings;\r
+ if (MtrrSetting == NULL) {\r
+ VariableSettings->Mtrr[Index].Base =\r
+ AsmReadMsr64 (MSR_IA32_MTRR_PHYSBASE0 + (Index << 1));\r
+ VariableSettings->Mtrr[Index].Mask =\r
+ AsmReadMsr64 (MSR_IA32_MTRR_PHYSMASK0 + (Index << 1));\r
+ } else {\r
+ VariableSettings->Mtrr[Index].Base = MtrrSetting->Variables.Mtrr[Index].Base;\r
+ VariableSettings->Mtrr[Index].Mask = MtrrSetting->Variables.Mtrr[Index].Mask;\r
+ }\r
}\r
\r
- return MtrrGetVariableMtrrWorker (\r
- GetVariableMtrrCountWorker (),\r
- VariableSettings\r
- );\r
+ return VariableSettings;\r
}\r
\r
/**\r
Programs fixed MTRRs registers.\r
\r
- @param[in] MemoryCacheType The memory type to set.\r
+ @param[in] Type The memory type to set.\r
@param[in, out] Base The base address of memory range.\r
@param[in, out] Length The length of memory range.\r
- @param[out] ReturnMsrNum The index of the fixed MTRR MSR to program.\r
- @param[out] ReturnClearMask The bits to clear in the fixed MTRR MSR.\r
- @param[out] ReturnOrMask The bits to set in the fixed MTRR MSR.\r
+ @param[in, out] LastMsrIndex On input, the last index of the fixed MTRR MSR to program.\r
+ On return, the current index of the fixed MTRR MSR to program.\r
+ @param[out] ClearMask The bits to clear in the fixed MTRR MSR.\r
+ @param[out] OrMask The bits to set in the fixed MTRR MSR.\r
\r
@retval RETURN_SUCCESS The cache type was updated successfully\r
@retval RETURN_UNSUPPORTED The requested range or cache type was invalid\r
\r
**/\r
RETURN_STATUS\r
-ProgramFixedMtrr (\r
- IN UINT64 MemoryCacheType,\r
- IN OUT UINT64 *Base,\r
- IN OUT UINT64 *Length,\r
- OUT UINT32 *ReturnMsrNum,\r
- OUT UINT64 *ReturnClearMask,\r
- OUT UINT64 *ReturnOrMask\r
+MtrrLibProgramFixedMtrr (\r
+ IN MTRR_MEMORY_CACHE_TYPE Type,\r
+ IN OUT UINT64 *Base,\r
+ IN OUT UINT64 *Length,\r
+ IN OUT UINT32 *LastMsrIndex,\r
+ OUT UINT64 *ClearMask,\r
+ OUT UINT64 *OrMask\r
)\r
{\r
- UINT32 MsrNum;\r
- UINT32 ByteShift;\r
- UINT64 TempQword;\r
- UINT64 OrMask;\r
- UINT64 ClearMask;\r
-\r
- TempQword = 0;\r
- OrMask = 0;\r
- ClearMask = 0;\r
-\r
- for (MsrNum = 0; MsrNum < MTRR_NUMBER_OF_FIXED_MTRR; MsrNum++) {\r
- if ((*Base >= mMtrrLibFixedMtrrTable[MsrNum].BaseAddress) &&\r
+ UINT32 MsrIndex;\r
+ UINT32 LeftByteShift;\r
+ UINT32 RightByteShift;\r
+ UINT64 SubLength;\r
+\r
+ //\r
+ // Find the fixed MTRR index to be programmed\r
+ //\r
+ for (MsrIndex = *LastMsrIndex + 1; MsrIndex < ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {\r
+ if ((*Base >= mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) &&\r
(*Base <\r
- (\r
- mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +\r
- (8 * mMtrrLibFixedMtrrTable[MsrNum].Length)\r
- )\r
- )\r
- ) {\r
+ (\r
+ mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress +\r
+ (8 * mMtrrLibFixedMtrrTable[MsrIndex].Length)\r
+ )\r
+ )\r
+ )\r
+ {\r
break;\r
}\r
}\r
\r
- if (MsrNum == MTRR_NUMBER_OF_FIXED_MTRR) {\r
+ ASSERT (MsrIndex != ARRAY_SIZE (mMtrrLibFixedMtrrTable));\r
+\r
+ //\r
+ // Find the begin offset in fixed MTRR and calculate byte offset of left shift\r
+ //\r
+ if ((((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {\r
+ //\r
+ // Base address should be aligned to the begin of a certain Fixed MTRR range.\r
+ //\r
return RETURN_UNSUPPORTED;\r
}\r
\r
+ LeftByteShift = ((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) / mMtrrLibFixedMtrrTable[MsrIndex].Length;\r
+ ASSERT (LeftByteShift < 8);\r
+\r
//\r
- // We found the fixed MTRR to be programmed\r
+ // Find the end offset in fixed MTRR and calculate byte offset of right shift\r
//\r
- for (ByteShift = 0; ByteShift < 8; ByteShift++) {\r
- if (*Base ==\r
- (\r
- mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +\r
- (ByteShift * mMtrrLibFixedMtrrTable[MsrNum].Length)\r
- )\r
- ) {\r
- break;\r
+ SubLength = mMtrrLibFixedMtrrTable[MsrIndex].Length * (8 - LeftByteShift);\r
+ if (*Length >= SubLength) {\r
+ RightByteShift = 0;\r
+ } else {\r
+ if (((UINT32)(*Length) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {\r
+ //\r
+ // Length should be aligned to the end of a certain Fixed MTRR range.\r
+ //\r
+ return RETURN_UNSUPPORTED;\r
}\r
- }\r
\r
- if (ByteShift == 8) {\r
- return RETURN_UNSUPPORTED;\r
+ RightByteShift = 8 - LeftByteShift - (UINT32)(*Length) / mMtrrLibFixedMtrrTable[MsrIndex].Length;\r
+ //\r
+ // Update SubLength by actual length\r
+ //\r
+ SubLength = *Length;\r
}\r
\r
- for (\r
- ;\r
- ((ByteShift < 8) && (*Length >= mMtrrLibFixedMtrrTable[MsrNum].Length));\r
- ByteShift++\r
- ) {\r
- OrMask |= LShiftU64 ((UINT64) MemoryCacheType, (UINT32) (ByteShift * 8));\r
- ClearMask |= LShiftU64 ((UINT64) 0xFF, (UINT32) (ByteShift * 8));\r
- *Length -= mMtrrLibFixedMtrrTable[MsrNum].Length;\r
- *Base += mMtrrLibFixedMtrrTable[MsrNum].Length;\r
+ *ClearMask = CLEAR_SEED;\r
+ *OrMask = MultU64x32 (OR_SEED, (UINT32)Type);\r
+\r
+ if (LeftByteShift != 0) {\r
+ //\r
+ // Clear the low bits by LeftByteShift\r
+ //\r
+ *ClearMask &= LShiftU64 (*ClearMask, LeftByteShift * 8);\r
+ *OrMask &= LShiftU64 (*OrMask, LeftByteShift * 8);\r
}\r
\r
- if (ByteShift < 8 && (*Length != 0)) {\r
- return RETURN_UNSUPPORTED;\r
+ if (RightByteShift != 0) {\r
+ //\r
+ // Clear the high bits by RightByteShift\r
+ //\r
+ *ClearMask &= RShiftU64 (*ClearMask, RightByteShift * 8);\r
+ *OrMask &= RShiftU64 (*OrMask, RightByteShift * 8);\r
}\r
\r
- *ReturnMsrNum = MsrNum;\r
- *ReturnClearMask = ClearMask;\r
- *ReturnOrMask = OrMask;\r
+ *Length -= SubLength;\r
+ *Base += SubLength;\r
+\r
+ *LastMsrIndex = MsrIndex;\r
\r
return RETURN_SUCCESS;\r
}\r
\r
-\r
/**\r
Worker function gets the attribute of variable MTRRs.\r
\r
This function shadows the content of variable MTRRs into an\r
internal array: VariableMtrr.\r
\r
- @param[in] VariableSettings The variable MTRR values to shadow\r
- @param[in] FirmwareVariableMtrrCount The number of variable MTRRs available to firmware\r
- @param[in] MtrrValidBitsMask The mask for the valid bit of the MTRR\r
- @param[in] MtrrValidAddressMask The valid address mask for MTRR\r
- @param[out] VariableMtrr The array to shadow variable MTRRs content\r
+ @param[in] VariableSettings The variable MTRR values to shadow\r
+ @param[in] VariableMtrrCount The number of variable MTRRs\r
+ @param[in] MtrrValidBitsMask The mask for the valid bit of the MTRR\r
+ @param[in] MtrrValidAddressMask The valid address mask for MTRR\r
+ @param[out] VariableMtrr The array to shadow variable MTRRs content\r
\r
- @return The return value of this parameter indicates the\r
- number of MTRRs which has been used.\r
+ @return Number of MTRRs which has been used.\r
\r
**/\r
UINT32\r
MtrrGetMemoryAttributeInVariableMtrrWorker (\r
IN MTRR_VARIABLE_SETTINGS *VariableSettings,\r
- IN UINTN FirmwareVariableMtrrCount,\r
+ IN UINTN VariableMtrrCount,\r
IN UINT64 MtrrValidBitsMask,\r
IN UINT64 MtrrValidAddressMask,\r
OUT VARIABLE_MTRR *VariableMtrr\r
UINTN Index;\r
UINT32 UsedMtrr;\r
\r
- ZeroMem (VariableMtrr, sizeof (VARIABLE_MTRR) * MTRR_NUMBER_OF_VARIABLE_MTRR);\r
- for (Index = 0, UsedMtrr = 0; Index < FirmwareVariableMtrrCount; Index++) {\r
- if ((VariableSettings->Mtrr[Index].Mask & MTRR_LIB_CACHE_MTRR_ENABLED) != 0) {\r
+ ZeroMem (VariableMtrr, sizeof (VARIABLE_MTRR) * ARRAY_SIZE (VariableSettings->Mtrr));\r
+ for (Index = 0, UsedMtrr = 0; Index < VariableMtrrCount; Index++) {\r
+ if (((MSR_IA32_MTRR_PHYSMASK_REGISTER *)&VariableSettings->Mtrr[Index].Mask)->Bits.V != 0) {\r
VariableMtrr[Index].Msr = (UINT32)Index;\r
VariableMtrr[Index].BaseAddress = (VariableSettings->Mtrr[Index].Base & MtrrValidAddressMask);\r
- VariableMtrr[Index].Length = ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;\r
- VariableMtrr[Index].Type = (VariableSettings->Mtrr[Index].Base & 0x0ff);\r
- VariableMtrr[Index].Valid = TRUE;\r
- VariableMtrr[Index].Used = TRUE;\r
+ VariableMtrr[Index].Length =\r
+ ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;\r
+ VariableMtrr[Index].Type = (VariableSettings->Mtrr[Index].Base & 0x0ff);\r
+ VariableMtrr[Index].Valid = TRUE;\r
+ VariableMtrr[Index].Used = TRUE;\r
UsedMtrr++;\r
}\r
}\r
+\r
return UsedMtrr;\r
}\r
\r
+/**\r
+ Convert variable MTRRs to a RAW MTRR_MEMORY_RANGE array.\r
+ One MTRR_MEMORY_RANGE element is created for each MTRR setting.\r
+ The routine doesn't remove the overlap or combine the near-by region.\r
+\r
+ @param[in] VariableSettings The variable MTRR values to shadow\r
+ @param[in] VariableMtrrCount The number of variable MTRRs\r
+ @param[in] MtrrValidBitsMask The mask for the valid bit of the MTRR\r
+ @param[in] MtrrValidAddressMask The valid address mask for MTRR\r
+ @param[out] VariableMtrr The array to shadow variable MTRRs content\r
+\r
+ @return Number of MTRRs which has been used.\r
+\r
+**/\r
+UINT32\r
+MtrrLibGetRawVariableRanges (\r
+ IN MTRR_VARIABLE_SETTINGS *VariableSettings,\r
+ IN UINTN VariableMtrrCount,\r
+ IN UINT64 MtrrValidBitsMask,\r
+ IN UINT64 MtrrValidAddressMask,\r
+ OUT MTRR_MEMORY_RANGE *VariableMtrr\r
+ )\r
+{\r
+ UINTN Index;\r
+ UINT32 UsedMtrr;\r
+\r
+ ZeroMem (VariableMtrr, sizeof (MTRR_MEMORY_RANGE) * ARRAY_SIZE (VariableSettings->Mtrr));\r
+ for (Index = 0, UsedMtrr = 0; Index < VariableMtrrCount; Index++) {\r
+ if (((MSR_IA32_MTRR_PHYSMASK_REGISTER *)&VariableSettings->Mtrr[Index].Mask)->Bits.V != 0) {\r
+ VariableMtrr[Index].BaseAddress = (VariableSettings->Mtrr[Index].Base & MtrrValidAddressMask);\r
+ VariableMtrr[Index].Length =\r
+ ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;\r
+ VariableMtrr[Index].Type = (MTRR_MEMORY_CACHE_TYPE)(VariableSettings->Mtrr[Index].Base & 0x0ff);\r
+ UsedMtrr++;\r
+ }\r
+ }\r
+\r
+ return UsedMtrr;\r
+}\r
\r
/**\r
Gets the attribute of variable MTRRs.\r
@param[in] MtrrValidAddressMask The valid address mask for MTRR\r
@param[out] VariableMtrr The array to shadow variable MTRRs content\r
\r
- @return The return value of this paramter indicates the\r
+ @return The return value of this parameter indicates the\r
number of MTRRs which has been used.\r
\r
**/\r
UINT32\r
EFIAPI\r
MtrrGetMemoryAttributeInVariableMtrr (\r
- IN UINT64 MtrrValidBitsMask,\r
- IN UINT64 MtrrValidAddressMask,\r
- OUT VARIABLE_MTRR *VariableMtrr\r
+ IN UINT64 MtrrValidBitsMask,\r
+ IN UINT64 MtrrValidAddressMask,\r
+ OUT VARIABLE_MTRR *VariableMtrr\r
)\r
{\r
MTRR_VARIABLE_SETTINGS VariableSettings;\r
}\r
\r
MtrrGetVariableMtrrWorker (\r
+ NULL,\r
GetVariableMtrrCountWorker (),\r
&VariableSettings\r
);\r
);\r
}\r
\r
-\r
/**\r
- Checks overlap between given memory range and MTRRs.\r
-\r
- @param[in] FirmwareVariableMtrrCount The number of variable MTRRs available\r
- to firmware.\r
- @param[in] Start The start address of memory range.\r
- @param[in] End The end address of memory range.\r
- @param[in] VariableMtrr The array to shadow variable MTRRs content\r
+ Return the biggest alignment (lowest set bit) of address.\r
+ The function is equivalent to: 1 << LowBitSet64 (Address).\r
\r
- @retval TRUE Overlap exists.\r
- @retval FALSE No overlap.\r
+ @param Address The address to return the alignment.\r
+ @param Alignment0 The alignment to return when Address is 0.\r
\r
+ @return The least alignment of the Address.\r
**/\r
-BOOLEAN\r
-CheckMemoryAttributeOverlap (\r
- IN UINTN FirmwareVariableMtrrCount,\r
- IN PHYSICAL_ADDRESS Start,\r
- IN PHYSICAL_ADDRESS End,\r
- IN VARIABLE_MTRR *VariableMtrr\r
+UINT64\r
+MtrrLibBiggestAlignment (\r
+ UINT64 Address,\r
+ UINT64 Alignment0\r
)\r
{\r
- UINT32 Index;\r
-\r
- for (Index = 0; Index < FirmwareVariableMtrrCount; Index++) {\r
- if (\r
- VariableMtrr[Index].Valid &&\r
- !(\r
- (Start > (VariableMtrr[Index].BaseAddress +\r
- VariableMtrr[Index].Length - 1)\r
- ) ||\r
- (End < VariableMtrr[Index].BaseAddress)\r
- )\r
- ) {\r
- return TRUE;\r
- }\r
+ if (Address == 0) {\r
+ return Alignment0;\r
}\r
\r
- return FALSE;\r
+ return Address & ((~Address) + 1);\r
}\r
\r
-\r
/**\r
- Marks a variable MTRR as non-valid.\r
+ Return whether the left MTRR type precedes the right MTRR type.\r
+\r
+ The MTRR type precedence rules are:\r
+ 1. UC precedes any other type\r
+ 2. WT precedes WB\r
+ For further details, please refer the IA32 Software Developer's Manual,\r
+ Volume 3, Section "MTRR Precedences".\r
\r
- @param[in] Index The index of the array VariableMtrr to be invalidated\r
- @param[in] VariableMtrr The array to shadow variable MTRRs content\r
- @param[out] UsedMtrr The number of MTRRs which has already been used\r
+ @param Left The left MTRR type.\r
+ @param Right The right MTRR type.\r
\r
+ @retval TRUE Left precedes Right.\r
+ @retval FALSE Left doesn't precede Right.\r
**/\r
-VOID\r
-InvalidateShadowMtrr (\r
- IN UINTN Index,\r
- IN VARIABLE_MTRR *VariableMtrr,\r
- OUT UINT32 *UsedMtrr\r
+BOOLEAN\r
+MtrrLibTypeLeftPrecedeRight (\r
+ IN MTRR_MEMORY_CACHE_TYPE Left,\r
+ IN MTRR_MEMORY_CACHE_TYPE Right\r
)\r
{\r
- VariableMtrr[Index].Valid = FALSE;\r
- *UsedMtrr = *UsedMtrr - 1;\r
+ return (BOOLEAN)(Left == CacheUncacheable || (Left == CacheWriteThrough && Right == CacheWriteBack));\r
}\r
\r
-\r
/**\r
- Combines memory attributes.\r
-\r
- If overlap exists between given memory range and MTRRs, try to combine them.\r
+ Initializes the valid bits mask and valid address mask for MTRRs.\r
\r
- @param[in] FirmwareVariableMtrrCount The number of variable MTRRs\r
- available to firmware.\r
- @param[in] Attributes The memory type to set.\r
- @param[in, out] Base The base address of memory range.\r
- @param[in, out] Length The length of memory range.\r
- @param[in] VariableMtrr The array to shadow variable MTRRs content\r
- @param[in, out] UsedMtrr The number of MTRRs which has already been used\r
- @param[out] OverwriteExistingMtrr Returns whether an existing MTRR was used\r
+ This function initializes the valid bits mask and valid address mask for MTRRs.\r
\r
- @retval EFI_SUCCESS Memory region successfully combined.\r
- @retval EFI_ACCESS_DENIED Memory region cannot be combined.\r
+ @param[out] MtrrValidBitsMask The mask for the valid bit of the MTRR\r
+ @param[out] MtrrValidAddressMask The valid address mask for the MTRR\r
\r
**/\r
-RETURN_STATUS\r
-CombineMemoryAttribute (\r
- IN UINT32 FirmwareVariableMtrrCount,\r
- IN UINT64 Attributes,\r
- IN OUT UINT64 *Base,\r
- IN OUT UINT64 *Length,\r
- IN VARIABLE_MTRR *VariableMtrr,\r
- IN OUT UINT32 *UsedMtrr,\r
- OUT BOOLEAN *OverwriteExistingMtrr\r
+VOID\r
+MtrrLibInitializeMtrrMask (\r
+ OUT UINT64 *MtrrValidBitsMask,\r
+ OUT UINT64 *MtrrValidAddressMask\r
)\r
{\r
- UINT32 Index;\r
- UINT64 CombineStart;\r
- UINT64 CombineEnd;\r
- UINT64 MtrrEnd;\r
- UINT64 EndAddress;\r
- BOOLEAN CoveredByExistingMtrr;\r
-\r
- *OverwriteExistingMtrr = FALSE;\r
- CoveredByExistingMtrr = FALSE;\r
- EndAddress = *Base +*Length - 1;\r
-\r
- for (Index = 0; Index < FirmwareVariableMtrrCount; Index++) {\r
-\r
- MtrrEnd = VariableMtrr[Index].BaseAddress + VariableMtrr[Index].Length - 1;\r
- if (\r
- !VariableMtrr[Index].Valid ||\r
- (\r
- *Base > (MtrrEnd) ||\r
- (EndAddress < VariableMtrr[Index].BaseAddress)\r
- )\r
- ) {\r
- continue;\r
- }\r
-\r
- //\r
- // Combine same attribute MTRR range\r
- //\r
- if (Attributes == VariableMtrr[Index].Type) {\r
- //\r
- // if the MTRR range contain the request range, set a flag, then continue to\r
- // invalidate any MTRR of the same request range with higher priority cache type.\r
- //\r
- if (VariableMtrr[Index].BaseAddress <= *Base && MtrrEnd >= EndAddress) {\r
- CoveredByExistingMtrr = TRUE;\r
- continue;\r
- }\r
- //\r
- // invalid this MTRR, and program the combine range\r
- //\r
- CombineStart =\r
- (*Base) < VariableMtrr[Index].BaseAddress ?\r
- (*Base) :\r
- VariableMtrr[Index].BaseAddress;\r
- CombineEnd = EndAddress > MtrrEnd ? EndAddress : MtrrEnd;\r
-\r
- //\r
- // Record the MTRR usage status in VariableMtrr array.\r
- //\r
- InvalidateShadowMtrr (Index, VariableMtrr, UsedMtrr);\r
- *Base = CombineStart;\r
- *Length = CombineEnd - CombineStart + 1;\r
- EndAddress = CombineEnd;\r
- *OverwriteExistingMtrr = TRUE;\r
- continue;\r
- } else {\r
- //\r
- // The cache type is different, but the range is convered by one MTRR\r
- //\r
- if (VariableMtrr[Index].BaseAddress == *Base && MtrrEnd == EndAddress) {\r
- InvalidateShadowMtrr (Index, VariableMtrr, UsedMtrr);\r
- continue;\r
- }\r
-\r
- }\r
+ UINT32 MaxExtendedFunction;\r
+ CPUID_VIR_PHY_ADDRESS_SIZE_EAX VirPhyAddressSize;\r
\r
- if ((Attributes== MTRR_CACHE_WRITE_THROUGH &&\r
- VariableMtrr[Index].Type == MTRR_CACHE_WRITE_BACK) ||\r
- (Attributes == MTRR_CACHE_WRITE_BACK &&\r
- VariableMtrr[Index].Type == MTRR_CACHE_WRITE_THROUGH) ||\r
- (Attributes == MTRR_CACHE_UNCACHEABLE) ||\r
- (VariableMtrr[Index].Type == MTRR_CACHE_UNCACHEABLE)\r
- ) {\r
- *OverwriteExistingMtrr = TRUE;\r
- continue;\r
- }\r
- //\r
- // Other type memory overlap is invalid\r
- //\r
- return RETURN_ACCESS_DENIED;\r
- }\r
+ AsmCpuid (CPUID_EXTENDED_FUNCTION, &MaxExtendedFunction, NULL, NULL, NULL);\r
\r
- if (CoveredByExistingMtrr) {\r
- *Length = 0;\r
+ if (MaxExtendedFunction >= CPUID_VIR_PHY_ADDRESS_SIZE) {\r
+ AsmCpuid (CPUID_VIR_PHY_ADDRESS_SIZE, &VirPhyAddressSize.Uint32, NULL, NULL, NULL);\r
+ } else {\r
+ VirPhyAddressSize.Bits.PhysicalAddressBits = 36;\r
}\r
\r
- return RETURN_SUCCESS;\r
+ *MtrrValidBitsMask = LShiftU64 (1, VirPhyAddressSize.Bits.PhysicalAddressBits) - 1;\r
+ *MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;\r
}\r
\r
-\r
/**\r
- Calculates the maximum value which is a power of 2, but less the MemoryLength.\r
+ Determines the real attribute of a memory range.\r
\r
- @param[in] MemoryLength The number to pass in.\r
+ This function is to arbitrate the real attribute of the memory when\r
+ there are 2 MTRRs covers the same memory range. For further details,\r
+ please refer the IA32 Software Developer's Manual, Volume 3,\r
+ Section "MTRR Precedences".\r
\r
- @return The maximum value which is align to power of 2 and less the MemoryLength\r
+ @param[in] MtrrType1 The first kind of Memory type\r
+ @param[in] MtrrType2 The second kind of memory type\r
\r
**/\r
-UINT64\r
-Power2MaxMemory (\r
- IN UINT64 MemoryLength\r
+MTRR_MEMORY_CACHE_TYPE\r
+MtrrLibPrecedence (\r
+ IN MTRR_MEMORY_CACHE_TYPE MtrrType1,\r
+ IN MTRR_MEMORY_CACHE_TYPE MtrrType2\r
)\r
{\r
- UINT64 Result;\r
-\r
- if (RShiftU64 (MemoryLength, 32) != 0) {\r
- Result = LShiftU64 (\r
- (UINT64) GetPowerOfTwo32 (\r
- (UINT32) RShiftU64 (MemoryLength, 32)\r
- ),\r
- 32\r
- );\r
- } else {\r
- Result = (UINT64) GetPowerOfTwo32 ((UINT32) MemoryLength);\r
+ if (MtrrType1 == MtrrType2) {\r
+ return MtrrType1;\r
}\r
\r
- return Result;\r
-}\r
+ ASSERT (\r
+ MtrrLibTypeLeftPrecedeRight (MtrrType1, MtrrType2) ||\r
+ MtrrLibTypeLeftPrecedeRight (MtrrType2, MtrrType1)\r
+ );\r
\r
+ if (MtrrLibTypeLeftPrecedeRight (MtrrType1, MtrrType2)) {\r
+ return MtrrType1;\r
+ } else {\r
+ return MtrrType2;\r
+ }\r
+}\r
\r
/**\r
- Determines the MTRR numbers used to program a memory range.\r
-\r
- This function first checks the alignment of the base address.\r
- If the alignment of the base address <= Length, cover the memory range\r
- (BaseAddress, alignment) by a MTRR, then BaseAddress += alignment and\r
- Length -= alignment. Repeat the step until alignment > Length.\r
+ Worker function will get the memory cache type of the specific address.\r
\r
- Then this function determines which direction of programming the variable\r
- MTRRs for the remaining length will use fewer MTRRs.\r
+ If MtrrSetting is not NULL, gets the memory cache type from input\r
+ MTRR settings buffer.\r
+ If MtrrSetting is NULL, gets the memory cache type from MTRRs.\r
\r
- @param[in] BaseAddress Length of Memory to program MTRR\r
- @param[in] Length Length of Memory to program MTRR\r
- @param[in] MtrrNumber Pointer to the number of necessary MTRRs\r
+ @param[in] MtrrSetting A buffer holding all MTRRs content.\r
+ @param[in] Address The specific address\r
\r
- @retval TRUE Positive direction is better.\r
- FALSE Negative direction is better.\r
+ @return Memory cache type of the specific address\r
\r
**/\r
-BOOLEAN\r
-GetMtrrNumberAndDirection (\r
- IN UINT64 BaseAddress,\r
- IN UINT64 Length,\r
- IN UINTN *MtrrNumber\r
+MTRR_MEMORY_CACHE_TYPE\r
+MtrrGetMemoryAttributeByAddressWorker (\r
+ IN MTRR_SETTINGS *MtrrSetting,\r
+ IN PHYSICAL_ADDRESS Address\r
)\r
{\r
- UINT64 TempQword;\r
- UINT64 Alignment;\r
- UINT32 Positive;\r
- UINT32 Subtractive;\r
+ MSR_IA32_MTRR_DEF_TYPE_REGISTER DefType;\r
+ UINT64 FixedMtrr;\r
+ UINTN Index;\r
+ UINTN SubIndex;\r
+ MTRR_MEMORY_CACHE_TYPE MtrrType;\r
+ MTRR_MEMORY_RANGE VariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];\r
+ UINT64 MtrrValidBitsMask;\r
+ UINT64 MtrrValidAddressMask;\r
+ UINT32 VariableMtrrCount;\r
+ MTRR_VARIABLE_SETTINGS VariableSettings;\r
\r
- *MtrrNumber = 0;\r
+ //\r
+ // Check if MTRR is enabled, if not, return UC as attribute\r
+ //\r
+ if (MtrrSetting == NULL) {\r
+ DefType.Uint64 = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);\r
+ } else {\r
+ DefType.Uint64 = MtrrSetting->MtrrDefType;\r
+ }\r
+\r
+ if (DefType.Bits.E == 0) {\r
+ return CacheUncacheable;\r
+ }\r
\r
- if (BaseAddress != 0) {\r
- do {\r
+ //\r
+ // If address is less than 1M, then try to go through the fixed MTRR\r
+ //\r
+ if (Address < BASE_1MB) {\r
+ if (DefType.Bits.FE != 0) {\r
//\r
- // Calculate the alignment of the base address.\r
+ // Go through the fixed MTRR\r
//\r
- Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));\r
+ for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
+ if ((Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress) &&\r
+ (Address < mMtrrLibFixedMtrrTable[Index].BaseAddress +\r
+ (mMtrrLibFixedMtrrTable[Index].Length * 8)))\r
+ {\r
+ SubIndex =\r
+ ((UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /\r
+ mMtrrLibFixedMtrrTable[Index].Length;\r
+ if (MtrrSetting == NULL) {\r
+ FixedMtrr = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);\r
+ } else {\r
+ FixedMtrr = MtrrSetting->Fixed.Mtrr[Index];\r
+ }\r
\r
- if (Alignment > Length) {\r
- break;\r
+ return (MTRR_MEMORY_CACHE_TYPE)(RShiftU64 (FixedMtrr, SubIndex * 8) & 0xFF);\r
+ }\r
}\r
-\r
- (*MtrrNumber)++;\r
- BaseAddress += Alignment;\r
- Length -= Alignment;\r
- } while (TRUE);\r
-\r
- if (Length == 0) {\r
- return TRUE;\r
}\r
}\r
\r
- TempQword = Length;\r
- Positive = 0;\r
- Subtractive = 0;\r
-\r
- do {\r
- TempQword -= Power2MaxMemory (TempQword);\r
- Positive++;\r
- } while (TempQword != 0);\r
+ VariableMtrrCount = GetVariableMtrrCountWorker ();\r
+ ASSERT (VariableMtrrCount <= ARRAY_SIZE (MtrrSetting->Variables.Mtrr));\r
+ MtrrGetVariableMtrrWorker (MtrrSetting, VariableMtrrCount, &VariableSettings);\r
+\r
+ MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);\r
+ MtrrLibGetRawVariableRanges (\r
+ &VariableSettings,\r
+ VariableMtrrCount,\r
+ MtrrValidBitsMask,\r
+ MtrrValidAddressMask,\r
+ VariableMtrr\r
+ );\r
\r
- TempQword = Power2MaxMemory (LShiftU64 (Length, 1)) - Length;\r
- Subtractive++;\r
- do {\r
- TempQword -= Power2MaxMemory (TempQword);\r
- Subtractive++;\r
- } while (TempQword != 0);\r
+ //\r
+ // Go through the variable MTRR\r
+ //\r
+ MtrrType = CacheInvalid;\r
+ for (Index = 0; Index < VariableMtrrCount; Index++) {\r
+ if (VariableMtrr[Index].Length != 0) {\r
+ if ((Address >= VariableMtrr[Index].BaseAddress) &&\r
+ (Address < VariableMtrr[Index].BaseAddress + VariableMtrr[Index].Length))\r
+ {\r
+ if (MtrrType == CacheInvalid) {\r
+ MtrrType = (MTRR_MEMORY_CACHE_TYPE)VariableMtrr[Index].Type;\r
+ } else {\r
+ MtrrType = MtrrLibPrecedence (MtrrType, (MTRR_MEMORY_CACHE_TYPE)VariableMtrr[Index].Type);\r
+ }\r
+ }\r
+ }\r
+ }\r
\r
- if (Positive <= Subtractive) {\r
- *MtrrNumber += Positive;\r
- return TRUE;\r
- } else {\r
- *MtrrNumber += Subtractive;\r
- return FALSE;\r
+ //\r
+ // If there is no MTRR which covers the Address, use the default MTRR type.\r
+ //\r
+ if (MtrrType == CacheInvalid) {\r
+ MtrrType = (MTRR_MEMORY_CACHE_TYPE)DefType.Bits.Type;\r
}\r
+\r
+ return MtrrType;\r
}\r
\r
/**\r
- Invalid variable MTRRs according to the value in the shadow array.\r
+ This function will get the memory cache type of the specific address.\r
+\r
+ This function is mainly for debug purpose.\r
\r
- This function programs MTRRs according to the values specified\r
- in the shadow array.\r
+ @param[in] Address The specific address\r
\r
- @param[in] VariableMtrrCount Number of variable MTRRs\r
- @param[in, out] VariableMtrr Shadow of variable MTRR contents\r
+ @return Memory cache type of the specific address\r
\r
**/\r
-VOID\r
-InvalidateMtrr (\r
- IN UINTN VariableMtrrCount,\r
- IN OUT VARIABLE_MTRR *VariableMtrr\r
+MTRR_MEMORY_CACHE_TYPE\r
+EFIAPI\r
+MtrrGetMemoryAttribute (\r
+ IN PHYSICAL_ADDRESS Address\r
)\r
{\r
- UINTN Index;\r
- MTRR_CONTEXT MtrrContext;\r
-\r
- PreMtrrChange (&MtrrContext);\r
- Index = 0;\r
- while (Index < VariableMtrrCount) {\r
- if (!VariableMtrr[Index].Valid && VariableMtrr[Index].Used) {\r
- AsmWriteMsr64 (VariableMtrr[Index].Msr, 0);\r
- AsmWriteMsr64 (VariableMtrr[Index].Msr + 1, 0);\r
- VariableMtrr[Index].Used = FALSE;\r
- }\r
- Index ++;\r
+ if (!IsMtrrSupported ()) {\r
+ return CacheUncacheable;\r
}\r
- PostMtrrChange (&MtrrContext);\r
-}\r
\r
+ return MtrrGetMemoryAttributeByAddressWorker (NULL, Address);\r
+}\r
\r
/**\r
- Programs variable MTRRs\r
-\r
- This function programs variable MTRRs\r
-\r
- @param[in] MtrrNumber Index of MTRR to program.\r
- @param[in] BaseAddress Base address of memory region.\r
- @param[in] Length Length of memory region.\r
- @param[in] MemoryCacheType Memory type to set.\r
- @param[in] MtrrValidAddressMask The valid address mask for MTRR\r
-\r
+ Update the Ranges array to change the specified range identified by\r
+ BaseAddress and Length to Type.\r
+\r
+ @param Ranges Array holding memory type settings for all memory regions.\r
+ @param Capacity The maximum count of memory ranges the array can hold.\r
+ @param Count Return the new memory range count in the array.\r
+ @param BaseAddress The base address of the memory range to change type.\r
+ @param Length The length of the memory range to change type.\r
+ @param Type The new type of the specified memory range.\r
+\r
+ @retval RETURN_SUCCESS The type of the specified memory range is\r
+ changed successfully.\r
+ @retval RETURN_ALREADY_STARTED The type of the specified memory range equals\r
+ to the desired type.\r
+ @retval RETURN_OUT_OF_RESOURCES The new type set causes the count of memory\r
+ range exceeds capacity.\r
**/\r
-VOID\r
-ProgramVariableMtrr (\r
- IN UINTN MtrrNumber,\r
- IN PHYSICAL_ADDRESS BaseAddress,\r
- IN UINT64 Length,\r
- IN UINT64 MemoryCacheType,\r
- IN UINT64 MtrrValidAddressMask\r
+RETURN_STATUS\r
+MtrrLibSetMemoryType (\r
+ IN MTRR_MEMORY_RANGE *Ranges,\r
+ IN UINTN Capacity,\r
+ IN OUT UINTN *Count,\r
+ IN UINT64 BaseAddress,\r
+ IN UINT64 Length,\r
+ IN MTRR_MEMORY_CACHE_TYPE Type\r
)\r
{\r
- UINT64 TempQword;\r
- MTRR_CONTEXT MtrrContext;\r
+ UINTN Index;\r
+ UINT64 Limit;\r
+ UINT64 LengthLeft;\r
+ UINT64 LengthRight;\r
+ UINTN StartIndex;\r
+ UINTN EndIndex;\r
+ UINTN DeltaCount;\r
+\r
+ LengthRight = 0;\r
+ LengthLeft = 0;\r
+ Limit = BaseAddress + Length;\r
+ StartIndex = *Count;\r
+ EndIndex = *Count;\r
+ for (Index = 0; Index < *Count; Index++) {\r
+ if ((StartIndex == *Count) &&\r
+ (Ranges[Index].BaseAddress <= BaseAddress) &&\r
+ (BaseAddress < Ranges[Index].BaseAddress + Ranges[Index].Length))\r
+ {\r
+ StartIndex = Index;\r
+ LengthLeft = BaseAddress - Ranges[Index].BaseAddress;\r
+ }\r
+\r
+ if ((EndIndex == *Count) &&\r
+ (Ranges[Index].BaseAddress < Limit) &&\r
+ (Limit <= Ranges[Index].BaseAddress + Ranges[Index].Length))\r
+ {\r
+ EndIndex = Index;\r
+ LengthRight = Ranges[Index].BaseAddress + Ranges[Index].Length - Limit;\r
+ break;\r
+ }\r
+ }\r
\r
- PreMtrrChange (&MtrrContext);\r
+ ASSERT (StartIndex != *Count && EndIndex != *Count);\r
+ if ((StartIndex == EndIndex) && (Ranges[StartIndex].Type == Type)) {\r
+ return RETURN_ALREADY_STARTED;\r
+ }\r
\r
//\r
- // MTRR Physical Base\r
+ // The type change may cause merging with previous range or next range.\r
+ // Update the StartIndex, EndIndex, BaseAddress, Length so that following\r
+ // logic doesn't need to consider merging.\r
//\r
- TempQword = (BaseAddress & MtrrValidAddressMask) | MemoryCacheType;\r
- AsmWriteMsr64 ((UINT32) MtrrNumber, TempQword);\r
+ if (StartIndex != 0) {\r
+ if ((LengthLeft == 0) && (Ranges[StartIndex - 1].Type == Type)) {\r
+ StartIndex--;\r
+ Length += Ranges[StartIndex].Length;\r
+ BaseAddress -= Ranges[StartIndex].Length;\r
+ }\r
+ }\r
+\r
+ if (EndIndex != (*Count) - 1) {\r
+ if ((LengthRight == 0) && (Ranges[EndIndex + 1].Type == Type)) {\r
+ EndIndex++;\r
+ Length += Ranges[EndIndex].Length;\r
+ }\r
+ }\r
\r
//\r
- // MTRR Physical Mask\r
+ // |- 0 -|- 1 -|- 2 -|- 3 -| StartIndex EndIndex DeltaCount Count (Count = 4)\r
+ // |++++++++++++++++++| 0 3 1=3-0-2 3\r
+ // |+++++++| 0 1 -1=1-0-2 5\r
+ // |+| 0 0 -2=0-0-2 6\r
+ // |+++| 0 0 -1=0-0-2+1 5\r
//\r
- TempQword = ~(Length - 1);\r
- AsmWriteMsr64 (\r
- (UINT32) (MtrrNumber + 1),\r
- (TempQword & MtrrValidAddressMask) | MTRR_LIB_CACHE_MTRR_ENABLED\r
- );\r
+ //\r
+ DeltaCount = EndIndex - StartIndex - 2;\r
+ if (LengthLeft == 0) {\r
+ DeltaCount++;\r
+ }\r
\r
- PostMtrrChange (&MtrrContext);\r
-}\r
+ if (LengthRight == 0) {\r
+ DeltaCount++;\r
+ }\r
+\r
+ if (*Count - DeltaCount > Capacity) {\r
+ return RETURN_OUT_OF_RESOURCES;\r
+ }\r
\r
+ //\r
+ // Reserve (-DeltaCount) space\r
+ //\r
+ CopyMem (&Ranges[EndIndex + 1 - DeltaCount], &Ranges[EndIndex + 1], (*Count - EndIndex - 1) * sizeof (Ranges[0]));\r
+ *Count -= DeltaCount;\r
+\r
+ if (LengthLeft != 0) {\r
+ Ranges[StartIndex].Length = LengthLeft;\r
+ StartIndex++;\r
+ }\r
+\r
+ if (LengthRight != 0) {\r
+ Ranges[EndIndex - DeltaCount].BaseAddress = BaseAddress + Length;\r
+ Ranges[EndIndex - DeltaCount].Length = LengthRight;\r
+ Ranges[EndIndex - DeltaCount].Type = Ranges[EndIndex].Type;\r
+ }\r
+\r
+ Ranges[StartIndex].BaseAddress = BaseAddress;\r
+ Ranges[StartIndex].Length = Length;\r
+ Ranges[StartIndex].Type = Type;\r
+ return RETURN_SUCCESS;\r
+}\r
\r
/**\r
- Converts the Memory attribute value to MTRR_MEMORY_CACHE_TYPE.\r
+ Return the number of memory types in range [BaseAddress, BaseAddress + Length).\r
\r
- @param[in] MtrrType MTRR memory type\r
+ @param Ranges Array holding memory type settings for all memory regions.\r
+ @param RangeCount The count of memory ranges the array holds.\r
+ @param BaseAddress Base address.\r
+ @param Length Length.\r
+ @param Types Return bit mask to indicate all memory types in the specified range.\r
\r
- @return The enum item in MTRR_MEMORY_CACHE_TYPE\r
+ @retval Number of memory types.\r
+**/\r
+UINT8\r
+MtrrLibGetNumberOfTypes (\r
+ IN CONST MTRR_MEMORY_RANGE *Ranges,\r
+ IN UINTN RangeCount,\r
+ IN UINT64 BaseAddress,\r
+ IN UINT64 Length,\r
+ IN OUT UINT8 *Types OPTIONAL\r
+ )\r
+{\r
+ UINTN Index;\r
+ UINT8 TypeCount;\r
+ UINT8 LocalTypes;\r
+\r
+ TypeCount = 0;\r
+ LocalTypes = 0;\r
+ for (Index = 0; Index < RangeCount; Index++) {\r
+ if ((Ranges[Index].BaseAddress <= BaseAddress) &&\r
+ (BaseAddress < Ranges[Index].BaseAddress + Ranges[Index].Length)\r
+ )\r
+ {\r
+ if ((LocalTypes & (1 << Ranges[Index].Type)) == 0) {\r
+ LocalTypes |= (UINT8)(1 << Ranges[Index].Type);\r
+ TypeCount++;\r
+ }\r
\r
+ if (BaseAddress + Length > Ranges[Index].BaseAddress + Ranges[Index].Length) {\r
+ Length -= Ranges[Index].BaseAddress + Ranges[Index].Length - BaseAddress;\r
+ BaseAddress = Ranges[Index].BaseAddress + Ranges[Index].Length;\r
+ } else {\r
+ break;\r
+ }\r
+ }\r
+ }\r
+\r
+ if (Types != NULL) {\r
+ *Types = LocalTypes;\r
+ }\r
+\r
+ return TypeCount;\r
+}\r
+\r
+/**\r
+ Calculate the least MTRR number from vertex Start to Stop and update\r
+ the Previous of all vertices from Start to Stop is updated to reflect\r
+ how the memory range is covered by MTRR.\r
+\r
+ @param VertexCount The count of vertices in the graph.\r
+ @param Vertices Array holding all vertices.\r
+ @param Weight 2-dimention array holding weights between vertices.\r
+ @param Start Start vertex.\r
+ @param Stop Stop vertex.\r
+ @param IncludeOptional TRUE to count the optional weight.\r
**/\r
-MTRR_MEMORY_CACHE_TYPE\r
-GetMemoryCacheTypeFromMtrrType (\r
- IN UINT64 MtrrType\r
+VOID\r
+MtrrLibCalculateLeastMtrrs (\r
+ IN UINT16 VertexCount,\r
+ IN MTRR_LIB_ADDRESS *Vertices,\r
+ IN OUT CONST UINT8 *Weight,\r
+ IN UINT16 Start,\r
+ IN UINT16 Stop,\r
+ IN BOOLEAN IncludeOptional\r
)\r
{\r
- switch (MtrrType) {\r
- case MTRR_CACHE_UNCACHEABLE:\r
- return CacheUncacheable;\r
- case MTRR_CACHE_WRITE_COMBINING:\r
- return CacheWriteCombining;\r
- case MTRR_CACHE_WRITE_THROUGH:\r
- return CacheWriteThrough;\r
- case MTRR_CACHE_WRITE_PROTECTED:\r
- return CacheWriteProtected;\r
- case MTRR_CACHE_WRITE_BACK:\r
- return CacheWriteBack;\r
- default:\r
+ UINT16 Index;\r
+ UINT8 MinWeight;\r
+ UINT16 MinI;\r
+ UINT8 Mandatory;\r
+ UINT8 Optional;\r
+\r
+ for (Index = Start; Index <= Stop; Index++) {\r
+ Vertices[Index].Visited = FALSE;\r
+ Mandatory = Weight[M (Start, Index)];\r
+ Vertices[Index].Weight = Mandatory;\r
+ if (Mandatory != MAX_WEIGHT) {\r
+ Optional = IncludeOptional ? Weight[O (Start, Index)] : 0;\r
+ Vertices[Index].Weight += Optional;\r
+ ASSERT (Vertices[Index].Weight >= Optional);\r
+ }\r
+ }\r
+\r
+ MinI = Start;\r
+ MinWeight = 0;\r
+ while (!Vertices[Stop].Visited) {\r
+ //\r
+ // Update the weight from the shortest vertex to other unvisited vertices\r
+ //\r
+ for (Index = Start + 1; Index <= Stop; Index++) {\r
+ if (!Vertices[Index].Visited) {\r
+ Mandatory = Weight[M (MinI, Index)];\r
+ if (Mandatory != MAX_WEIGHT) {\r
+ Optional = IncludeOptional ? Weight[O (MinI, Index)] : 0;\r
+ if (MinWeight + Mandatory + Optional <= Vertices[Index].Weight) {\r
+ Vertices[Index].Weight = MinWeight + Mandatory + Optional;\r
+ Vertices[Index].Previous = MinI; // Previous is Start based.\r
+ }\r
+ }\r
+ }\r
+ }\r
+\r
+ //\r
+ // Find the shortest vertex from Start\r
+ //\r
+ MinI = VertexCount;\r
+ MinWeight = MAX_WEIGHT;\r
+ for (Index = Start + 1; Index <= Stop; Index++) {\r
+ if (!Vertices[Index].Visited && (MinWeight > Vertices[Index].Weight)) {\r
+ MinI = Index;\r
+ MinWeight = Vertices[Index].Weight;\r
+ }\r
+ }\r
+\r
//\r
- // MtrrType is MTRR_CACHE_INVALID_TYPE, that means\r
- // no MTRR covers the range\r
+ // Mark the shortest vertex from Start as visited\r
//\r
- return MtrrGetDefaultMemoryType ();\r
+ Vertices[MinI].Visited = TRUE;\r
}\r
}\r
\r
/**\r
- Initializes the valid bits mask and valid address mask for MTRRs.\r
+ Append the MTRR setting to MTRR setting array.\r
\r
- This function initializes the valid bits mask and valid address mask for MTRRs.\r
-\r
- @param[out] MtrrValidBitsMask The mask for the valid bit of the MTRR\r
- @param[out] MtrrValidAddressMask The valid address mask for the MTRR\r
+ @param Mtrrs Array holding all MTRR settings.\r
+ @param MtrrCapacity Capacity of the MTRR array.\r
+ @param MtrrCount The count of MTRR settings in array.\r
+ @param BaseAddress Base address.\r
+ @param Length Length.\r
+ @param Type Memory type.\r
\r
+ @retval RETURN_SUCCESS MTRR setting is appended to array.\r
+ @retval RETURN_OUT_OF_RESOURCES Array is full.\r
**/\r
-VOID\r
-MtrrLibInitializeMtrrMask (\r
- OUT UINT64 *MtrrValidBitsMask,\r
- OUT UINT64 *MtrrValidAddressMask\r
+RETURN_STATUS\r
+MtrrLibAppendVariableMtrr (\r
+ IN OUT MTRR_MEMORY_RANGE *Mtrrs,\r
+ IN UINT32 MtrrCapacity,\r
+ IN OUT UINT32 *MtrrCount,\r
+ IN UINT64 BaseAddress,\r
+ IN UINT64 Length,\r
+ IN MTRR_MEMORY_CACHE_TYPE Type\r
)\r
{\r
- UINT32 RegEax;\r
- UINT8 PhysicalAddressBits;\r
+ if (*MtrrCount == MtrrCapacity) {\r
+ return RETURN_OUT_OF_RESOURCES;\r
+ }\r
\r
- AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);\r
+ Mtrrs[*MtrrCount].BaseAddress = BaseAddress;\r
+ Mtrrs[*MtrrCount].Length = Length;\r
+ Mtrrs[*MtrrCount].Type = Type;\r
+ (*MtrrCount)++;\r
+ return RETURN_SUCCESS;\r
+}\r
\r
- if (RegEax >= 0x80000008) {\r
- AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);\r
+/**\r
+ Return the memory type that has the least precedence.\r
\r
- PhysicalAddressBits = (UINT8) RegEax;\r
+ @param TypeBits Bit mask of memory type.\r
\r
- *MtrrValidBitsMask = LShiftU64 (1, PhysicalAddressBits) - 1;\r
- *MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;\r
- } else {\r
- *MtrrValidBitsMask = MTRR_LIB_MSR_VALID_MASK;\r
- *MtrrValidAddressMask = MTRR_LIB_CACHE_VALID_ADDRESS;\r
+ @retval Memory type that has the least precedence.\r
+**/\r
+MTRR_MEMORY_CACHE_TYPE\r
+MtrrLibLowestType (\r
+ IN UINT8 TypeBits\r
+ )\r
+{\r
+ INT8 Type;\r
+\r
+ ASSERT (TypeBits != 0);\r
+ for (Type = 7; (INT8)TypeBits > 0; Type--, TypeBits <<= 1) {\r
}\r
-}\r
\r
+ return (MTRR_MEMORY_CACHE_TYPE)Type;\r
+}\r
\r
/**\r
- Determines the real attribute of a memory range.\r
+ Return TRUE when the Operand is exactly power of 2.\r
\r
- This function is to arbitrate the real attribute of the memory when\r
- there are 2 MTRRs covers the same memory range. For further details,\r
- please refer the IA32 Software Developer's Manual, Volume 3,\r
- Section 10.11.4.1.\r
+ @retval TRUE Operand is exactly power of 2.\r
+ @retval FALSE Operand is not power of 2.\r
+**/\r
+BOOLEAN\r
+MtrrLibIsPowerOfTwo (\r
+ IN UINT64 Operand\r
+ )\r
+{\r
+ ASSERT (Operand != 0);\r
+ return (BOOLEAN)((Operand & (Operand - 1)) == 0);\r
+}\r
\r
- @param[in] MtrrType1 The first kind of Memory type\r
- @param[in] MtrrType2 The second kind of memory type\r
+/**\r
+ Calculate the subtractive path from vertex Start to Stop.\r
+\r
+ @param DefaultType Default memory type.\r
+ @param A0 Alignment to use when base address is 0.\r
+ @param Ranges Array holding memory type settings for all memory regions.\r
+ @param RangeCount The count of memory ranges the array holds.\r
+ @param VertexCount The count of vertices in the graph.\r
+ @param Vertices Array holding all vertices.\r
+ @param Weight 2-dimention array holding weights between vertices.\r
+ @param Start Start vertex.\r
+ @param Stop Stop vertex.\r
+ @param Types Type bit mask of memory range from Start to Stop.\r
+ @param TypeCount Number of different memory types from Start to Stop.\r
+ @param Mtrrs Array holding all MTRR settings.\r
+ @param MtrrCapacity Capacity of the MTRR array.\r
+ @param MtrrCount The count of MTRR settings in array.\r
+\r
+ @retval RETURN_SUCCESS The subtractive path is calculated successfully.\r
+ @retval RETURN_OUT_OF_RESOURCES The MTRR setting array is full.\r
\r
**/\r
-UINT64\r
-MtrrPrecedence (\r
- IN UINT64 MtrrType1,\r
- IN UINT64 MtrrType2\r
+RETURN_STATUS\r
+MtrrLibCalculateSubtractivePath (\r
+ IN MTRR_MEMORY_CACHE_TYPE DefaultType,\r
+ IN UINT64 A0,\r
+ IN CONST MTRR_MEMORY_RANGE *Ranges,\r
+ IN UINTN RangeCount,\r
+ IN UINT16 VertexCount,\r
+ IN MTRR_LIB_ADDRESS *Vertices,\r
+ IN OUT UINT8 *Weight,\r
+ IN UINT16 Start,\r
+ IN UINT16 Stop,\r
+ IN UINT8 Types,\r
+ IN UINT8 TypeCount,\r
+ IN OUT MTRR_MEMORY_RANGE *Mtrrs OPTIONAL,\r
+ IN UINT32 MtrrCapacity OPTIONAL,\r
+ IN OUT UINT32 *MtrrCount OPTIONAL\r
)\r
{\r
- UINT64 MtrrType;\r
-\r
- MtrrType = MTRR_CACHE_INVALID_TYPE;\r
- switch (MtrrType1) {\r
- case MTRR_CACHE_UNCACHEABLE:\r
- MtrrType = MTRR_CACHE_UNCACHEABLE;\r
- break;\r
- case MTRR_CACHE_WRITE_COMBINING:\r
- if (\r
- MtrrType2==MTRR_CACHE_WRITE_COMBINING ||\r
- MtrrType2==MTRR_CACHE_UNCACHEABLE\r
- ) {\r
- MtrrType = MtrrType2;\r
+ RETURN_STATUS Status;\r
+ UINT64 Base;\r
+ UINT64 Length;\r
+ UINT8 PrecedentTypes;\r
+ UINTN Index;\r
+ UINT64 HBase;\r
+ UINT64 HLength;\r
+ UINT64 SubLength;\r
+ UINT16 SubStart;\r
+ UINT16 SubStop;\r
+ UINT16 Cur;\r
+ UINT16 Pre;\r
+ MTRR_MEMORY_CACHE_TYPE LowestType;\r
+ MTRR_MEMORY_CACHE_TYPE LowestPrecedentType;\r
+\r
+ Base = Vertices[Start].Address;\r
+ Length = Vertices[Stop].Address - Base;\r
+\r
+ LowestType = MtrrLibLowestType (Types);\r
+\r
+ //\r
+ // Clear the lowest type (highest bit) to get the precedent types\r
+ //\r
+ PrecedentTypes = ~(1 << LowestType) & Types;\r
+ LowestPrecedentType = MtrrLibLowestType (PrecedentTypes);\r
+\r
+ if (Mtrrs == NULL) {\r
+ Weight[M (Start, Stop)] = ((LowestType == DefaultType) ? 0 : 1);\r
+ Weight[O (Start, Stop)] = ((LowestType == DefaultType) ? 1 : 0);\r
+ }\r
+\r
+ // Add all high level ranges\r
+ HBase = MAX_UINT64;\r
+ HLength = 0;\r
+ for (Index = 0; Index < RangeCount; Index++) {\r
+ if (Length == 0) {\r
+ break;\r
}\r
- break;\r
- case MTRR_CACHE_WRITE_THROUGH:\r
- if (\r
- MtrrType2==MTRR_CACHE_WRITE_THROUGH ||\r
- MtrrType2==MTRR_CACHE_WRITE_BACK\r
- ) {\r
- MtrrType = MTRR_CACHE_WRITE_THROUGH;\r
- } else if(MtrrType2==MTRR_CACHE_UNCACHEABLE) {\r
- MtrrType = MTRR_CACHE_UNCACHEABLE;\r
+\r
+ if ((Base < Ranges[Index].BaseAddress) || (Ranges[Index].BaseAddress + Ranges[Index].Length <= Base)) {\r
+ continue;\r
}\r
- break;\r
- case MTRR_CACHE_WRITE_PROTECTED:\r
- if (MtrrType2 == MTRR_CACHE_WRITE_PROTECTED ||\r
- MtrrType2 == MTRR_CACHE_UNCACHEABLE) {\r
- MtrrType = MtrrType2;\r
+\r
+ //\r
+ // Base is in the Range[Index]\r
+ //\r
+ if (Base + Length > Ranges[Index].BaseAddress + Ranges[Index].Length) {\r
+ SubLength = Ranges[Index].BaseAddress + Ranges[Index].Length - Base;\r
+ } else {\r
+ SubLength = Length;\r
}\r
- break;\r
- case MTRR_CACHE_WRITE_BACK:\r
- if (\r
- MtrrType2== MTRR_CACHE_UNCACHEABLE ||\r
- MtrrType2==MTRR_CACHE_WRITE_THROUGH ||\r
- MtrrType2== MTRR_CACHE_WRITE_BACK\r
- ) {\r
- MtrrType = MtrrType2;\r
+\r
+ if (((1 << Ranges[Index].Type) & PrecedentTypes) != 0) {\r
+ //\r
+ // Meet a range whose types take precedence.\r
+ // Update the [HBase, HBase + HLength) to include the range,\r
+ // [HBase, HBase + HLength) may contain sub ranges with 2 different types, and both take precedence.\r
+ //\r
+ if (HBase == MAX_UINT64) {\r
+ HBase = Base;\r
+ }\r
+\r
+ HLength += SubLength;\r
}\r
- break;\r
- case MTRR_CACHE_INVALID_TYPE:\r
- MtrrType = MtrrType2;\r
- break;\r
- default:\r
- break;\r
- }\r
\r
- if (MtrrType2 == MTRR_CACHE_INVALID_TYPE) {\r
- MtrrType = MtrrType1;\r
- }\r
- return MtrrType;\r
-}\r
+ Base += SubLength;\r
+ Length -= SubLength;\r
\r
+ if (HLength == 0) {\r
+ continue;\r
+ }\r
\r
+ if ((Ranges[Index].Type == LowestType) || (Length == 0)) {\r
+ // meet low type or end\r
\r
-/**\r
- This function will get the memory cache type of the specific address.\r
+ //\r
+ // Add the MTRRs for each high priority type range\r
+ // the range[HBase, HBase + HLength) contains only two types.\r
+ // We might use positive or subtractive, depending on which way uses less MTRR\r
+ //\r
+ for (SubStart = Start; SubStart <= Stop; SubStart++) {\r
+ if (Vertices[SubStart].Address == HBase) {\r
+ break;\r
+ }\r
+ }\r
\r
- This function is mainly for debug purpose.\r
+ for (SubStop = SubStart; SubStop <= Stop; SubStop++) {\r
+ if (Vertices[SubStop].Address == HBase + HLength) {\r
+ break;\r
+ }\r
+ }\r
\r
- @param[in] Address The specific address\r
+ ASSERT (Vertices[SubStart].Address == HBase);\r
+ ASSERT (Vertices[SubStop].Address == HBase + HLength);\r
+\r
+ if ((TypeCount == 2) || (SubStart == SubStop - 1)) {\r
+ //\r
+ // add subtractive MTRRs for [HBase, HBase + HLength)\r
+ // [HBase, HBase + HLength) contains only one type.\r
+ // while - loop is to split the range to MTRR - compliant aligned range.\r
+ //\r
+ if (Mtrrs == NULL) {\r
+ Weight[M (Start, Stop)] += (UINT8)(SubStop - SubStart);\r
+ } else {\r
+ while (SubStart != SubStop) {\r
+ Status = MtrrLibAppendVariableMtrr (\r
+ Mtrrs,\r
+ MtrrCapacity,\r
+ MtrrCount,\r
+ Vertices[SubStart].Address,\r
+ Vertices[SubStart].Length,\r
+ Vertices[SubStart].Type\r
+ );\r
+ if (RETURN_ERROR (Status)) {\r
+ return Status;\r
+ }\r
+\r
+ SubStart++;\r
+ }\r
+ }\r
+ } else {\r
+ ASSERT (TypeCount == 3);\r
+ MtrrLibCalculateLeastMtrrs (VertexCount, Vertices, Weight, SubStart, SubStop, TRUE);\r
+\r
+ if (Mtrrs == NULL) {\r
+ Weight[M (Start, Stop)] += Vertices[SubStop].Weight;\r
+ } else {\r
+ // When we need to collect the optimal path from SubStart to SubStop\r
+ while (SubStop != SubStart) {\r
+ Cur = SubStop;\r
+ Pre = Vertices[Cur].Previous;\r
+ SubStop = Pre;\r
+\r
+ if (Weight[M (Pre, Cur)] + Weight[O (Pre, Cur)] != 0) {\r
+ Status = MtrrLibAppendVariableMtrr (\r
+ Mtrrs,\r
+ MtrrCapacity,\r
+ MtrrCount,\r
+ Vertices[Pre].Address,\r
+ Vertices[Cur].Address - Vertices[Pre].Address,\r
+ (Pre != Cur - 1) ? LowestPrecedentType : Vertices[Pre].Type\r
+ );\r
+ if (RETURN_ERROR (Status)) {\r
+ return Status;\r
+ }\r
+ }\r
+\r
+ if (Pre != Cur - 1) {\r
+ Status = MtrrLibCalculateSubtractivePath (\r
+ DefaultType,\r
+ A0,\r
+ Ranges,\r
+ RangeCount,\r
+ VertexCount,\r
+ Vertices,\r
+ Weight,\r
+ Pre,\r
+ Cur,\r
+ PrecedentTypes,\r
+ 2,\r
+ Mtrrs,\r
+ MtrrCapacity,\r
+ MtrrCount\r
+ );\r
+ if (RETURN_ERROR (Status)) {\r
+ return Status;\r
+ }\r
+ }\r
+ }\r
+ }\r
+ }\r
\r
- @return Memory cache type of the specific address\r
+ //\r
+ // Reset HBase, HLength\r
+ //\r
+ HBase = MAX_UINT64;\r
+ HLength = 0;\r
+ }\r
+ }\r
+\r
+ return RETURN_SUCCESS;\r
+}\r
\r
+/**\r
+ Calculate MTRR settings to cover the specified memory ranges.\r
+\r
+ @param DefaultType Default memory type.\r
+ @param A0 Alignment to use when base address is 0.\r
+ @param Ranges Memory range array holding the memory type\r
+ settings for all memory address.\r
+ @param RangeCount Count of memory ranges.\r
+ @param Scratch A temporary scratch buffer that is used to perform the calculation.\r
+ This is an optional parameter that may be NULL.\r
+ @param ScratchSize Pointer to the size in bytes of the scratch buffer.\r
+ It may be updated to the actual required size when the calculation\r
+ needs more scratch buffer.\r
+ @param Mtrrs Array holding all MTRR settings.\r
+ @param MtrrCapacity Capacity of the MTRR array.\r
+ @param MtrrCount The count of MTRR settings in array.\r
+\r
+ @retval RETURN_SUCCESS Variable MTRRs are allocated successfully.\r
+ @retval RETURN_OUT_OF_RESOURCES Count of variable MTRRs exceeds capacity.\r
+ @retval RETURN_BUFFER_TOO_SMALL The scratch buffer is too small for MTRR calculation.\r
**/\r
-MTRR_MEMORY_CACHE_TYPE\r
-EFIAPI\r
-MtrrGetMemoryAttribute (\r
- IN PHYSICAL_ADDRESS Address\r
+RETURN_STATUS\r
+MtrrLibCalculateMtrrs (\r
+ IN MTRR_MEMORY_CACHE_TYPE DefaultType,\r
+ IN UINT64 A0,\r
+ IN CONST MTRR_MEMORY_RANGE *Ranges,\r
+ IN UINTN RangeCount,\r
+ IN VOID *Scratch,\r
+ IN OUT UINTN *ScratchSize,\r
+ IN OUT MTRR_MEMORY_RANGE *Mtrrs,\r
+ IN UINT32 MtrrCapacity,\r
+ IN OUT UINT32 *MtrrCount\r
)\r
{\r
- UINT64 TempQword;\r
- UINTN Index;\r
- UINTN SubIndex;\r
- UINT64 MtrrType;\r
- UINT64 TempMtrrType;\r
- MTRR_MEMORY_CACHE_TYPE CacheType;\r
- VARIABLE_MTRR VariableMtrr[MTRR_NUMBER_OF_VARIABLE_MTRR];\r
- UINT64 MtrrValidBitsMask;\r
- UINT64 MtrrValidAddressMask;\r
- UINTN VariableMtrrCount;\r
- MTRR_VARIABLE_SETTINGS VariableSettings;\r
+ UINT64 Base0;\r
+ UINT64 Base1;\r
+ UINTN Index;\r
+ UINT64 Base;\r
+ UINT64 Length;\r
+ UINT64 Alignment;\r
+ UINT64 SubLength;\r
+ MTRR_LIB_ADDRESS *Vertices;\r
+ UINT8 *Weight;\r
+ UINT32 VertexIndex;\r
+ UINT32 VertexCount;\r
+ UINTN RequiredScratchSize;\r
+ UINT8 TypeCount;\r
+ UINT16 Start;\r
+ UINT16 Stop;\r
+ UINT8 Type;\r
+ RETURN_STATUS Status;\r
+\r
+ Base0 = Ranges[0].BaseAddress;\r
+ Base1 = Ranges[RangeCount - 1].BaseAddress + Ranges[RangeCount - 1].Length;\r
+ MTRR_LIB_ASSERT_ALIGNED (Base0, Base1 - Base0);\r
+\r
+ //\r
+ // Count the number of vertices.\r
+ //\r
+ Vertices = (MTRR_LIB_ADDRESS *)Scratch;\r
+ for (VertexIndex = 0, Index = 0; Index < RangeCount; Index++) {\r
+ Base = Ranges[Index].BaseAddress;\r
+ Length = Ranges[Index].Length;\r
+ while (Length != 0) {\r
+ Alignment = MtrrLibBiggestAlignment (Base, A0);\r
+ SubLength = Alignment;\r
+ if (SubLength > Length) {\r
+ SubLength = GetPowerOfTwo64 (Length);\r
+ }\r
\r
- if (!IsMtrrSupported ()) {\r
- return CacheUncacheable;\r
+ if (VertexIndex < *ScratchSize / sizeof (*Vertices)) {\r
+ Vertices[VertexIndex].Address = Base;\r
+ Vertices[VertexIndex].Alignment = Alignment;\r
+ Vertices[VertexIndex].Type = Ranges[Index].Type;\r
+ Vertices[VertexIndex].Length = SubLength;\r
+ }\r
+\r
+ Base += SubLength;\r
+ Length -= SubLength;\r
+ VertexIndex++;\r
+ }\r
}\r
\r
//\r
- // Check if MTRR is enabled, if not, return UC as attribute\r
+ // Vertices[VertexIndex] = Base1, so whole vertex count is (VertexIndex + 1).\r
//\r
- TempQword = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE);\r
- MtrrType = MTRR_CACHE_INVALID_TYPE;\r
+ VertexCount = VertexIndex + 1;\r
+ DEBUG ((\r
+ DEBUG_CACHE,\r
+ " Count of vertices (%016llx - %016llx) = %d\n",\r
+ Ranges[0].BaseAddress,\r
+ Ranges[RangeCount - 1].BaseAddress + Ranges[RangeCount - 1].Length,\r
+ VertexCount\r
+ ));\r
+ ASSERT (VertexCount < MAX_UINT16);\r
\r
- if ((TempQword & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
- return CacheUncacheable;\r
+ RequiredScratchSize = VertexCount * sizeof (*Vertices) + VertexCount * VertexCount * sizeof (*Weight);\r
+ if (*ScratchSize < RequiredScratchSize) {\r
+ *ScratchSize = RequiredScratchSize;\r
+ return RETURN_BUFFER_TOO_SMALL;\r
+ }\r
+\r
+ Vertices[VertexCount - 1].Address = Base1;\r
+\r
+ Weight = (UINT8 *)&Vertices[VertexCount];\r
+ for (VertexIndex = 0; VertexIndex < VertexCount; VertexIndex++) {\r
+ //\r
+ // Set optional weight between vertices and self->self to 0\r
+ //\r
+ SetMem (&Weight[M (VertexIndex, 0)], VertexIndex + 1, 0);\r
+ //\r
+ // Set mandatory weight between vertices to MAX_WEIGHT\r
+ //\r
+ SetMem (&Weight[M (VertexIndex, VertexIndex + 1)], VertexCount - VertexIndex - 1, MAX_WEIGHT);\r
+\r
+ // Final result looks like:\r
+ // 00 FF FF FF\r
+ // 00 00 FF FF\r
+ // 00 00 00 FF\r
+ // 00 00 00 00\r
}\r
\r
//\r
- // If address is less than 1M, then try to go through the fixed MTRR\r
+ // Set mandatory weight and optional weight for adjacent vertices\r
//\r
- if (Address < BASE_1MB) {\r
- if ((TempQword & MTRR_LIB_CACHE_FIXED_MTRR_ENABLED) != 0) {\r
+ for (VertexIndex = 0; VertexIndex < VertexCount - 1; VertexIndex++) {\r
+ if (Vertices[VertexIndex].Type != DefaultType) {\r
+ Weight[M (VertexIndex, VertexIndex + 1)] = 1;\r
+ Weight[O (VertexIndex, VertexIndex + 1)] = 0;\r
+ } else {\r
+ Weight[M (VertexIndex, VertexIndex + 1)] = 0;\r
+ Weight[O (VertexIndex, VertexIndex + 1)] = 1;\r
+ }\r
+ }\r
+\r
+ for (TypeCount = 2; TypeCount <= 3; TypeCount++) {\r
+ for (Start = 0; Start < VertexCount; Start++) {\r
+ for (Stop = Start + 2; Stop < VertexCount; Stop++) {\r
+ ASSERT (Vertices[Stop].Address > Vertices[Start].Address);\r
+ Length = Vertices[Stop].Address - Vertices[Start].Address;\r
+ if (Length > Vertices[Start].Alignment) {\r
+ //\r
+ // Pickup a new Start when [Start, Stop) cannot be described by one MTRR.\r
+ //\r
+ break;\r
+ }\r
+\r
+ if ((Weight[M (Start, Stop)] == MAX_WEIGHT) && MtrrLibIsPowerOfTwo (Length)) {\r
+ if (MtrrLibGetNumberOfTypes (\r
+ Ranges,\r
+ RangeCount,\r
+ Vertices[Start].Address,\r
+ Vertices[Stop].Address - Vertices[Start].Address,\r
+ &Type\r
+ ) == TypeCount)\r
+ {\r
+ //\r
+ // Update the Weight[Start, Stop] using subtractive path.\r
+ //\r
+ MtrrLibCalculateSubtractivePath (\r
+ DefaultType,\r
+ A0,\r
+ Ranges,\r
+ RangeCount,\r
+ (UINT16)VertexCount,\r
+ Vertices,\r
+ Weight,\r
+ Start,\r
+ Stop,\r
+ Type,\r
+ TypeCount,\r
+ NULL,\r
+ 0,\r
+ NULL\r
+ );\r
+ } else if (TypeCount == 2) {\r
+ //\r
+ // Pick up a new Start when we expect 2-type range, but 3-type range is met.\r
+ // Because no matter how Stop is increased, we always meet 3-type range.\r
+ //\r
+ break;\r
+ }\r
+ }\r
+ }\r
+ }\r
+ }\r
+\r
+ Status = RETURN_SUCCESS;\r
+ MtrrLibCalculateLeastMtrrs ((UINT16)VertexCount, Vertices, Weight, 0, (UINT16)VertexCount - 1, FALSE);\r
+ Stop = (UINT16)VertexCount - 1;\r
+ while (Stop != 0) {\r
+ Start = Vertices[Stop].Previous;\r
+ TypeCount = MAX_UINT8;\r
+ Type = 0;\r
+ if (Weight[M (Start, Stop)] != 0) {\r
+ TypeCount = MtrrLibGetNumberOfTypes (Ranges, RangeCount, Vertices[Start].Address, Vertices[Stop].Address - Vertices[Start].Address, &Type);\r
+ Status = MtrrLibAppendVariableMtrr (\r
+ Mtrrs,\r
+ MtrrCapacity,\r
+ MtrrCount,\r
+ Vertices[Start].Address,\r
+ Vertices[Stop].Address - Vertices[Start].Address,\r
+ MtrrLibLowestType (Type)\r
+ );\r
+ if (RETURN_ERROR (Status)) {\r
+ break;\r
+ }\r
+ }\r
+\r
+ if (Start != Stop - 1) {\r
//\r
- // Go through the fixed MTRR\r
+ // substractive path\r
//\r
- for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- if (Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress &&\r
- Address < (\r
- mMtrrLibFixedMtrrTable[Index].BaseAddress +\r
- (mMtrrLibFixedMtrrTable[Index].Length * 8)\r
- )\r
- ) {\r
- SubIndex =\r
- ((UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /\r
- mMtrrLibFixedMtrrTable[Index].Length;\r
- TempQword = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);\r
- MtrrType = RShiftU64 (TempQword, SubIndex * 8) & 0xFF;\r
- return GetMemoryCacheTypeFromMtrrType (MtrrType);\r
- }\r
+ if (TypeCount == MAX_UINT8) {\r
+ TypeCount = MtrrLibGetNumberOfTypes (\r
+ Ranges,\r
+ RangeCount,\r
+ Vertices[Start].Address,\r
+ Vertices[Stop].Address - Vertices[Start].Address,\r
+ &Type\r
+ );\r
+ }\r
+\r
+ Status = MtrrLibCalculateSubtractivePath (\r
+ DefaultType,\r
+ A0,\r
+ Ranges,\r
+ RangeCount,\r
+ (UINT16)VertexCount,\r
+ Vertices,\r
+ Weight,\r
+ Start,\r
+ Stop,\r
+ Type,\r
+ TypeCount,\r
+ Mtrrs,\r
+ MtrrCapacity,\r
+ MtrrCount\r
+ );\r
+ if (RETURN_ERROR (Status)) {\r
+ break;\r
}\r
}\r
+\r
+ Stop = Start;\r
}\r
- MtrrLibInitializeMtrrMask(&MtrrValidBitsMask, &MtrrValidAddressMask);\r
\r
- MtrrGetVariableMtrrWorker (\r
- GetVariableMtrrCountWorker (),\r
- &VariableSettings\r
- );\r
+ return Status;\r
+}\r
\r
- MtrrGetMemoryAttributeInVariableMtrrWorker (\r
- &VariableSettings,\r
- GetFirmwareVariableMtrrCountWorker (),\r
- MtrrValidBitsMask,\r
- MtrrValidAddressMask,\r
- VariableMtrr\r
- );\r
+/**\r
+ Apply the fixed MTRR settings to memory range array.\r
+\r
+ @param Fixed The fixed MTRR settings.\r
+ @param Ranges Return the memory range array holding memory type\r
+ settings for all memory address.\r
+ @param RangeCapacity The capacity of memory range array.\r
+ @param RangeCount Return the count of memory range.\r
+\r
+ @retval RETURN_SUCCESS The memory range array is returned successfully.\r
+ @retval RETURN_OUT_OF_RESOURCES The count of memory ranges exceeds capacity.\r
+**/\r
+RETURN_STATUS\r
+MtrrLibApplyFixedMtrrs (\r
+ IN MTRR_FIXED_SETTINGS *Fixed,\r
+ IN OUT MTRR_MEMORY_RANGE *Ranges,\r
+ IN UINTN RangeCapacity,\r
+ IN OUT UINTN *RangeCount\r
+ )\r
+{\r
+ RETURN_STATUS Status;\r
+ UINTN MsrIndex;\r
+ UINTN Index;\r
+ MTRR_MEMORY_CACHE_TYPE MemoryType;\r
+ UINT64 Base;\r
+\r
+ Base = 0;\r
+ for (MsrIndex = 0; MsrIndex < ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {\r
+ ASSERT (Base == mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress);\r
+ for (Index = 0; Index < sizeof (UINT64); Index++) {\r
+ MemoryType = (MTRR_MEMORY_CACHE_TYPE)((UINT8 *)(&Fixed->Mtrr[MsrIndex]))[Index];\r
+ Status = MtrrLibSetMemoryType (\r
+ Ranges,\r
+ RangeCapacity,\r
+ RangeCount,\r
+ Base,\r
+ mMtrrLibFixedMtrrTable[MsrIndex].Length,\r
+ MemoryType\r
+ );\r
+ if (Status == RETURN_OUT_OF_RESOURCES) {\r
+ return Status;\r
+ }\r
+\r
+ Base += mMtrrLibFixedMtrrTable[MsrIndex].Length;\r
+ }\r
+ }\r
+\r
+ ASSERT (Base == BASE_1MB);\r
+ return RETURN_SUCCESS;\r
+}\r
+\r
+/**\r
+ Apply the variable MTRR settings to memory range array.\r
+\r
+ @param VariableMtrr The variable MTRR array.\r
+ @param VariableMtrrCount The count of variable MTRRs.\r
+ @param Ranges Return the memory range array with new MTRR settings applied.\r
+ @param RangeCapacity The capacity of memory range array.\r
+ @param RangeCount Return the count of memory range.\r
+\r
+ @retval RETURN_SUCCESS The memory range array is returned successfully.\r
+ @retval RETURN_OUT_OF_RESOURCES The count of memory ranges exceeds capacity.\r
+**/\r
+RETURN_STATUS\r
+MtrrLibApplyVariableMtrrs (\r
+ IN CONST MTRR_MEMORY_RANGE *VariableMtrr,\r
+ IN UINT32 VariableMtrrCount,\r
+ IN OUT MTRR_MEMORY_RANGE *Ranges,\r
+ IN UINTN RangeCapacity,\r
+ IN OUT UINTN *RangeCount\r
+ )\r
+{\r
+ RETURN_STATUS Status;\r
+ UINTN Index;\r
\r
//\r
- // Go through the variable MTRR\r
+ // WT > WB\r
+ // UC > *\r
+ // UC > * (except WB, UC) > WB\r
//\r
- VariableMtrrCount = GetVariableMtrrCountWorker ();\r
- ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
\r
+ //\r
+ // 1. Set WB\r
+ //\r
+ for (Index = 0; Index < VariableMtrrCount; Index++) {\r
+ if ((VariableMtrr[Index].Length != 0) && (VariableMtrr[Index].Type == CacheWriteBack)) {\r
+ Status = MtrrLibSetMemoryType (\r
+ Ranges,\r
+ RangeCapacity,\r
+ RangeCount,\r
+ VariableMtrr[Index].BaseAddress,\r
+ VariableMtrr[Index].Length,\r
+ VariableMtrr[Index].Type\r
+ );\r
+ if (Status == RETURN_OUT_OF_RESOURCES) {\r
+ return Status;\r
+ }\r
+ }\r
+ }\r
+\r
+ //\r
+ // 2. Set other types than WB or UC\r
+ //\r
+ for (Index = 0; Index < VariableMtrrCount; Index++) {\r
+ if ((VariableMtrr[Index].Length != 0) &&\r
+ (VariableMtrr[Index].Type != CacheWriteBack) && (VariableMtrr[Index].Type != CacheUncacheable))\r
+ {\r
+ Status = MtrrLibSetMemoryType (\r
+ Ranges,\r
+ RangeCapacity,\r
+ RangeCount,\r
+ VariableMtrr[Index].BaseAddress,\r
+ VariableMtrr[Index].Length,\r
+ VariableMtrr[Index].Type\r
+ );\r
+ if (Status == RETURN_OUT_OF_RESOURCES) {\r
+ return Status;\r
+ }\r
+ }\r
+ }\r
+\r
+ //\r
+ // 3. Set UC\r
+ //\r
for (Index = 0; Index < VariableMtrrCount; Index++) {\r
- if (VariableMtrr[Index].Valid) {\r
- if (Address >= VariableMtrr[Index].BaseAddress &&\r
- Address < VariableMtrr[Index].BaseAddress+VariableMtrr[Index].Length) {\r
- TempMtrrType = VariableMtrr[Index].Type;\r
- MtrrType = MtrrPrecedence (MtrrType, TempMtrrType);\r
+ if ((VariableMtrr[Index].Length != 0) && (VariableMtrr[Index].Type == CacheUncacheable)) {\r
+ Status = MtrrLibSetMemoryType (\r
+ Ranges,\r
+ RangeCapacity,\r
+ RangeCount,\r
+ VariableMtrr[Index].BaseAddress,\r
+ VariableMtrr[Index].Length,\r
+ VariableMtrr[Index].Type\r
+ );\r
+ if (Status == RETURN_OUT_OF_RESOURCES) {\r
+ return Status;\r
}\r
}\r
}\r
- CacheType = GetMemoryCacheTypeFromMtrrType (MtrrType);\r
\r
- return CacheType;\r
+ return RETURN_SUCCESS;\r
}\r
\r
+/**\r
+ Return the memory type bit mask that's compatible to first type in the Ranges.\r
+\r
+ @param Ranges Memory range array holding the memory type\r
+ settings for all memory address.\r
+ @param RangeCount Count of memory ranges.\r
\r
+ @return Compatible memory type bit mask.\r
+**/\r
+UINT8\r
+MtrrLibGetCompatibleTypes (\r
+ IN CONST MTRR_MEMORY_RANGE *Ranges,\r
+ IN UINTN RangeCount\r
+ )\r
+{\r
+ ASSERT (RangeCount != 0);\r
+\r
+ switch (Ranges[0].Type) {\r
+ case CacheWriteBack:\r
+ case CacheWriteThrough:\r
+ return (1 << CacheWriteBack) | (1 << CacheWriteThrough) | (1 << CacheUncacheable);\r
+ break;\r
+\r
+ case CacheWriteCombining:\r
+ case CacheWriteProtected:\r
+ return (1 << Ranges[0].Type) | (1 << CacheUncacheable);\r
+ break;\r
+\r
+ case CacheUncacheable:\r
+ if (RangeCount == 1) {\r
+ return (1 << CacheUncacheable);\r
+ }\r
+\r
+ return MtrrLibGetCompatibleTypes (&Ranges[1], RangeCount - 1);\r
+ break;\r
+\r
+ case CacheInvalid:\r
+ default:\r
+ ASSERT (FALSE);\r
+ break;\r
+ }\r
+\r
+ return 0;\r
+}\r
\r
/**\r
- This function prints all MTRRs for debugging.\r
+ Overwrite the destination MTRR settings with the source MTRR settings.\r
+ This routine is to make sure the modification to destination MTRR settings\r
+ is as small as possible.\r
+\r
+ @param DstMtrrs Destination MTRR settings.\r
+ @param DstMtrrCount Count of destination MTRR settings.\r
+ @param SrcMtrrs Source MTRR settings.\r
+ @param SrcMtrrCount Count of source MTRR settings.\r
+ @param Modified Flag array to indicate which destination MTRR setting is modified.\r
**/\r
VOID\r
-EFIAPI\r
-MtrrDebugPrintAllMtrrs (\r
- VOID\r
+MtrrLibMergeVariableMtrr (\r
+ MTRR_MEMORY_RANGE *DstMtrrs,\r
+ UINT32 DstMtrrCount,\r
+ MTRR_MEMORY_RANGE *SrcMtrrs,\r
+ UINT32 SrcMtrrCount,\r
+ BOOLEAN *Modified\r
)\r
{\r
- DEBUG_CODE (\r
- MTRR_SETTINGS MtrrSettings;\r
- UINTN Index;\r
- UINTN Index1;\r
- UINTN VariableMtrrCount;\r
- UINT64 Base;\r
- UINT64 Limit;\r
- UINT64 MtrrBase;\r
- UINT64 MtrrLimit;\r
- UINT64 RangeBase;\r
- UINT64 RangeLimit;\r
- UINT64 NoRangeBase;\r
- UINT64 NoRangeLimit;\r
- UINT32 RegEax;\r
- UINTN MemoryType;\r
- UINTN PreviousMemoryType;\r
- BOOLEAN Found;\r
-\r
- if (!IsMtrrSupported ()) {\r
- return;\r
- }\r
+ UINT32 DstIndex;\r
+ UINT32 SrcIndex;\r
\r
- DEBUG((DEBUG_CACHE, "MTRR Settings\n"));\r
- DEBUG((DEBUG_CACHE, "=============\n"));\r
+ ASSERT (SrcMtrrCount <= DstMtrrCount);\r
\r
- MtrrGetAllMtrrs (&MtrrSettings);\r
- DEBUG((DEBUG_CACHE, "MTRR Default Type: %016lx\n", MtrrSettings.MtrrDefType));\r
- for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- DEBUG((DEBUG_CACHE, "Fixed MTRR[%02d] : %016lx\n", Index, MtrrSettings.Fixed.Mtrr[Index]));\r
- }\r
+ for (DstIndex = 0; DstIndex < DstMtrrCount; DstIndex++) {\r
+ Modified[DstIndex] = FALSE;\r
\r
- VariableMtrrCount = GetVariableMtrrCount ();\r
- for (Index = 0; Index < VariableMtrrCount; Index++) {\r
- DEBUG((DEBUG_CACHE, "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",\r
- Index,\r
- MtrrSettings.Variables.Mtrr[Index].Base,\r
- MtrrSettings.Variables.Mtrr[Index].Mask\r
- ));\r
+ if (DstMtrrs[DstIndex].Length == 0) {\r
+ continue;\r
}\r
- DEBUG((DEBUG_CACHE, "\n"));\r
- DEBUG((DEBUG_CACHE, "MTRR Ranges\n"));\r
- DEBUG((DEBUG_CACHE, "====================================\n"));\r
-\r
- Base = 0;\r
- PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;\r
- for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- Base = mMtrrLibFixedMtrrTable[Index].BaseAddress;\r
- for (Index1 = 0; Index1 < 8; Index1++) {\r
- MemoryType = (UINTN)(RShiftU64 (MtrrSettings.Fixed.Mtrr[Index], Index1 * 8) & 0xff);\r
- if (MemoryType > CacheWriteBack) {\r
- MemoryType = MTRR_CACHE_INVALID_TYPE;\r
- }\r
- if (MemoryType != PreviousMemoryType) {\r
- if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {\r
- DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));\r
- }\r
- PreviousMemoryType = MemoryType;\r
- DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));\r
- }\r
- Base += mMtrrLibFixedMtrrTable[Index].Length;\r
+\r
+ for (SrcIndex = 0; SrcIndex < SrcMtrrCount; SrcIndex++) {\r
+ if ((DstMtrrs[DstIndex].BaseAddress == SrcMtrrs[SrcIndex].BaseAddress) &&\r
+ (DstMtrrs[DstIndex].Length == SrcMtrrs[SrcIndex].Length) &&\r
+ (DstMtrrs[DstIndex].Type == SrcMtrrs[SrcIndex].Type))\r
+ {\r
+ break;\r
}\r
}\r
- DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));\r
-\r
- VariableMtrrCount = GetVariableMtrrCount ();\r
\r
- Limit = BIT36 - 1;\r
- AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);\r
- if (RegEax >= 0x80000008) {\r
- AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);\r
- Limit = LShiftU64 (1, RegEax & 0xff) - 1;\r
+ if (SrcIndex == SrcMtrrCount) {\r
+ //\r
+ // Remove the one from DstMtrrs which is not in SrcMtrrs\r
+ //\r
+ DstMtrrs[DstIndex].Length = 0;\r
+ Modified[DstIndex] = TRUE;\r
+ } else {\r
+ //\r
+ // Remove the one from SrcMtrrs which is also in DstMtrrs\r
+ //\r
+ SrcMtrrs[SrcIndex].Length = 0;\r
}\r
- Base = BASE_1MB;\r
- PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;\r
- do {\r
- MemoryType = MtrrGetMemoryAttribute (Base);\r
- if (MemoryType > CacheWriteBack) {\r
- MemoryType = MTRR_CACHE_INVALID_TYPE;\r
- }\r
+ }\r
\r
- if (MemoryType != PreviousMemoryType) {\r
- if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {\r
- DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));\r
+ //\r
+ // Now valid MTRR only exists in either DstMtrrs or SrcMtrrs.\r
+ // Merge MTRRs from SrcMtrrs to DstMtrrs\r
+ //\r
+ DstIndex = 0;\r
+ for (SrcIndex = 0; SrcIndex < SrcMtrrCount; SrcIndex++) {\r
+ if (SrcMtrrs[SrcIndex].Length != 0) {\r
+ //\r
+ // Find the empty slot in DstMtrrs\r
+ //\r
+ while (DstIndex < DstMtrrCount) {\r
+ if (DstMtrrs[DstIndex].Length == 0) {\r
+ break;\r
}\r
- PreviousMemoryType = MemoryType;\r
- DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));\r
+\r
+ DstIndex++;\r
}\r
\r
- RangeBase = BASE_1MB;\r
- NoRangeBase = BASE_1MB;\r
- RangeLimit = Limit;\r
- NoRangeLimit = Limit;\r
+ ASSERT (DstIndex < DstMtrrCount);\r
+ CopyMem (&DstMtrrs[DstIndex], &SrcMtrrs[SrcIndex], sizeof (SrcMtrrs[0]));\r
+ Modified[DstIndex] = TRUE;\r
+ }\r
+ }\r
+}\r
\r
- for (Index = 0, Found = FALSE; Index < VariableMtrrCount; Index++) {\r
- if ((MtrrSettings.Variables.Mtrr[Index].Mask & BIT11) == 0) {\r
- //\r
- // If mask is not valid, then do not display range\r
- //\r
- continue;\r
- }\r
- MtrrBase = (MtrrSettings.Variables.Mtrr[Index].Base & (~(SIZE_4KB - 1)));\r
- MtrrLimit = MtrrBase + ((~(MtrrSettings.Variables.Mtrr[Index].Mask & (~(SIZE_4KB - 1)))) & Limit);\r
+/**\r
+ Calculate the variable MTRR settings for all memory ranges.\r
+\r
+ @param DefaultType Default memory type.\r
+ @param A0 Alignment to use when base address is 0.\r
+ @param Ranges Memory range array holding the memory type\r
+ settings for all memory address.\r
+ @param RangeCount Count of memory ranges.\r
+ @param Scratch Scratch buffer to be used in MTRR calculation.\r
+ @param ScratchSize Pointer to the size of scratch buffer.\r
+ @param VariableMtrr Array holding all MTRR settings.\r
+ @param VariableMtrrCapacity Capacity of the MTRR array.\r
+ @param VariableMtrrCount The count of MTRR settings in array.\r
+\r
+ @retval RETURN_SUCCESS Variable MTRRs are allocated successfully.\r
+ @retval RETURN_OUT_OF_RESOURCES Count of variable MTRRs exceeds capacity.\r
+ @retval RETURN_BUFFER_TOO_SMALL The scratch buffer is too small for MTRR calculation.\r
+ The required scratch buffer size is returned through ScratchSize.\r
+**/\r
+RETURN_STATUS\r
+MtrrLibSetMemoryRanges (\r
+ IN MTRR_MEMORY_CACHE_TYPE DefaultType,\r
+ IN UINT64 A0,\r
+ IN MTRR_MEMORY_RANGE *Ranges,\r
+ IN UINTN RangeCount,\r
+ IN VOID *Scratch,\r
+ IN OUT UINTN *ScratchSize,\r
+ OUT MTRR_MEMORY_RANGE *VariableMtrr,\r
+ IN UINT32 VariableMtrrCapacity,\r
+ OUT UINT32 *VariableMtrrCount\r
+ )\r
+{\r
+ RETURN_STATUS Status;\r
+ UINT32 Index;\r
+ UINT64 Base0;\r
+ UINT64 Base1;\r
+ UINT64 Alignment;\r
+ UINT8 CompatibleTypes;\r
+ UINT64 Length;\r
+ UINT32 End;\r
+ UINTN ActualScratchSize;\r
+ UINTN BiggestScratchSize;\r
\r
- if (Base >= MtrrBase && Base < MtrrLimit) {\r
- Found = TRUE;\r
- }\r
+ *VariableMtrrCount = 0;\r
\r
- if (Base >= MtrrBase && MtrrBase > RangeBase) {\r
- RangeBase = MtrrBase;\r
- }\r
- if (Base > MtrrLimit && MtrrLimit > RangeBase) {\r
- RangeBase = MtrrLimit + 1;\r
- }\r
- if (Base < MtrrBase && MtrrBase < RangeLimit) {\r
- RangeLimit = MtrrBase - 1;\r
- }\r
- if (Base < MtrrLimit && MtrrLimit <= RangeLimit) {\r
- RangeLimit = MtrrLimit;\r
- }\r
+ //\r
+ // Since the whole ranges need multiple calls of MtrrLibCalculateMtrrs().\r
+ // Each call needs different scratch buffer size.\r
+ // When the provided scratch buffer size is not sufficient in any call,\r
+ // set the GetActualScratchSize to TRUE, and following calls will only\r
+ // calculate the actual scratch size for the caller.\r
+ //\r
+ BiggestScratchSize = 0;\r
\r
- if (Base > MtrrLimit && NoRangeBase < MtrrLimit) {\r
- NoRangeBase = MtrrLimit + 1;\r
- }\r
- if (Base < MtrrBase && NoRangeLimit > MtrrBase) {\r
- NoRangeLimit = MtrrBase - 1;\r
+ for (Index = 0; Index < RangeCount;) {\r
+ Base0 = Ranges[Index].BaseAddress;\r
+\r
+ //\r
+ // Full step is optimal\r
+ //\r
+ while (Index < RangeCount) {\r
+ ASSERT (Ranges[Index].BaseAddress == Base0);\r
+ Alignment = MtrrLibBiggestAlignment (Base0, A0);\r
+ while (Base0 + Alignment <= Ranges[Index].BaseAddress + Ranges[Index].Length) {\r
+ if ((BiggestScratchSize <= *ScratchSize) && (Ranges[Index].Type != DefaultType)) {\r
+ Status = MtrrLibAppendVariableMtrr (\r
+ VariableMtrr,\r
+ VariableMtrrCapacity,\r
+ VariableMtrrCount,\r
+ Base0,\r
+ Alignment,\r
+ Ranges[Index].Type\r
+ );\r
+ if (RETURN_ERROR (Status)) {\r
+ return Status;\r
+ }\r
}\r
+\r
+ Base0 += Alignment;\r
+ Alignment = MtrrLibBiggestAlignment (Base0, A0);\r
}\r
\r
- if (Found) {\r
- Base = RangeLimit + 1;\r
+ //\r
+ // Remove the above range from Ranges[Index]\r
+ //\r
+ Ranges[Index].Length -= Base0 - Ranges[Index].BaseAddress;\r
+ Ranges[Index].BaseAddress = Base0;\r
+ if (Ranges[Index].Length != 0) {\r
+ break;\r
} else {\r
- Base = NoRangeLimit + 1;\r
+ Index++;\r
}\r
- } while (Base < Limit);\r
- DEBUG((DEBUG_CACHE, "%016lx\n\n", Base - 1));\r
- );\r
+ }\r
+\r
+ if (Index == RangeCount) {\r
+ break;\r
+ }\r
+\r
+ //\r
+ // Find continous ranges [Base0, Base1) which could be combined by MTRR.\r
+ // Per SDM, the compatible types between[B0, B1) are:\r
+ // UC, *\r
+ // WB, WT\r
+ // UC, WB, WT\r
+ //\r
+ CompatibleTypes = MtrrLibGetCompatibleTypes (&Ranges[Index], RangeCount - Index);\r
+\r
+ End = Index; // End points to last one that matches the CompatibleTypes.\r
+ while (End + 1 < RangeCount) {\r
+ if (((1 << Ranges[End + 1].Type) & CompatibleTypes) == 0) {\r
+ break;\r
+ }\r
+\r
+ End++;\r
+ }\r
+\r
+ Alignment = MtrrLibBiggestAlignment (Base0, A0);\r
+ Length = GetPowerOfTwo64 (Ranges[End].BaseAddress + Ranges[End].Length - Base0);\r
+ Base1 = Base0 + MIN (Alignment, Length);\r
+\r
+ //\r
+ // Base1 may not in Ranges[End]. Update End to the range Base1 belongs to.\r
+ //\r
+ End = Index;\r
+ while (End + 1 < RangeCount) {\r
+ if (Base1 <= Ranges[End + 1].BaseAddress) {\r
+ break;\r
+ }\r
+\r
+ End++;\r
+ }\r
+\r
+ Length = Ranges[End].Length;\r
+ Ranges[End].Length = Base1 - Ranges[End].BaseAddress;\r
+ ActualScratchSize = *ScratchSize;\r
+ Status = MtrrLibCalculateMtrrs (\r
+ DefaultType,\r
+ A0,\r
+ &Ranges[Index],\r
+ End + 1 - Index,\r
+ Scratch,\r
+ &ActualScratchSize,\r
+ VariableMtrr,\r
+ VariableMtrrCapacity,\r
+ VariableMtrrCount\r
+ );\r
+ if (Status == RETURN_BUFFER_TOO_SMALL) {\r
+ BiggestScratchSize = MAX (BiggestScratchSize, ActualScratchSize);\r
+ //\r
+ // Ignore this error, because we need to calculate the biggest\r
+ // scratch buffer size.\r
+ //\r
+ Status = RETURN_SUCCESS;\r
+ }\r
+\r
+ if (RETURN_ERROR (Status)) {\r
+ return Status;\r
+ }\r
+\r
+ if (Length != Ranges[End].Length) {\r
+ Ranges[End].BaseAddress = Base1;\r
+ Ranges[End].Length = Length - Ranges[End].Length;\r
+ Index = End;\r
+ } else {\r
+ Index = End + 1;\r
+ }\r
+ }\r
+\r
+ if (*ScratchSize < BiggestScratchSize) {\r
+ *ScratchSize = BiggestScratchSize;\r
+ return RETURN_BUFFER_TOO_SMALL;\r
+ }\r
+\r
+ return RETURN_SUCCESS;\r
}\r
-/**\r
- This function attempts to set the attributes for a memory range.\r
\r
- @param[in] BaseAddress The physical address that is the start\r
- address of a memory region.\r
- @param[in] Length The size in bytes of the memory region.\r
- @param[in] Attribute The bit mask of attributes to set for the\r
- memory region.\r
+/**\r
+ Set the below-1MB memory attribute to fixed MTRR buffer.\r
+ Modified flag array indicates which fixed MTRR is modified.\r
\r
- @retval RETURN_SUCCESS The attributes were set for the memory\r
- region.\r
- @retval RETURN_INVALID_PARAMETER Length is zero.\r
- @retval RETURN_UNSUPPORTED The processor does not support one or\r
- more bytes of the memory resource range\r
- specified by BaseAddress and Length.\r
- @retval RETURN_UNSUPPORTED The bit mask of attributes is not support\r
- for the memory resource range specified\r
- by BaseAddress and Length.\r
- @retval RETURN_ACCESS_DENIED The attributes for the memory resource\r
- range specified by BaseAddress and Length\r
- cannot be modified.\r
- @retval RETURN_OUT_OF_RESOURCES There are not enough system resources to\r
- modify the attributes of the memory\r
- resource range.\r
+ @param [in, out] ClearMasks The bits (when set) to clear in the fixed MTRR MSR.\r
+ @param [in, out] OrMasks The bits to set in the fixed MTRR MSR.\r
+ @param [in] BaseAddress Base address.\r
+ @param [in] Length Length.\r
+ @param [in] Type Memory type.\r
\r
+ @retval RETURN_SUCCESS The memory attribute is set successfully.\r
+ @retval RETURN_UNSUPPORTED The requested range or cache type was invalid\r
+ for the fixed MTRRs.\r
**/\r
RETURN_STATUS\r
-EFIAPI\r
-MtrrSetMemoryAttribute (\r
+MtrrLibSetBelow1MBMemoryAttribute (\r
+ IN OUT UINT64 *ClearMasks,\r
+ IN OUT UINT64 *OrMasks,\r
IN PHYSICAL_ADDRESS BaseAddress,\r
IN UINT64 Length,\r
- IN MTRR_MEMORY_CACHE_TYPE Attribute\r
+ IN MTRR_MEMORY_CACHE_TYPE Type\r
)\r
{\r
- UINT64 TempQword;\r
- RETURN_STATUS Status;\r
- UINT64 MemoryType;\r
- UINT64 Alignment;\r
- BOOLEAN OverLap;\r
- BOOLEAN Positive;\r
- UINT32 MsrNum;\r
- UINTN MtrrNumber;\r
- VARIABLE_MTRR VariableMtrr[MTRR_NUMBER_OF_VARIABLE_MTRR];\r
- UINT32 UsedMtrr;\r
- UINT64 MtrrValidBitsMask;\r
- UINT64 MtrrValidAddressMask;\r
- BOOLEAN OverwriteExistingMtrr;\r
- UINT32 FirmwareVariableMtrrCount;\r
- UINT32 VariableMtrrEnd;\r
- MTRR_CONTEXT MtrrContext;\r
- BOOLEAN MtrrContextValid;\r
- BOOLEAN FixedSettingsValid[MTRR_NUMBER_OF_FIXED_MTRR];\r
- BOOLEAN FixedSettingsModified[MTRR_NUMBER_OF_FIXED_MTRR];\r
- MTRR_FIXED_SETTINGS WorkingFixedSettings;\r
- UINT32 VariableMtrrCount;\r
- MTRR_VARIABLE_SETTINGS OriginalVariableSettings;\r
- MTRR_VARIABLE_SETTINGS WorkingVariableSettings;\r
- UINT32 Index;\r
- UINT64 ClearMask;\r
- UINT64 OrMask;\r
- UINT64 NewValue;\r
- MTRR_VARIABLE_SETTINGS *VariableSettings;\r
-\r
- DEBUG((DEBUG_CACHE, "MtrrSetMemoryAttribute() %a:%016lx-%016lx\n", mMtrrMemoryCacheTypeShortName[Attribute], BaseAddress, Length));\r
- MtrrContextValid = FALSE;\r
- for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- FixedSettingsValid[Index] = FALSE;\r
- FixedSettingsModified[Index] = FALSE;\r
- }\r
+ RETURN_STATUS Status;\r
+ UINT32 MsrIndex;\r
+ UINT64 ClearMask;\r
+ UINT64 OrMask;\r
\r
- if (!IsMtrrSupported ()) {\r
- Status = RETURN_UNSUPPORTED;\r
- goto Done;\r
+ ASSERT (BaseAddress < BASE_1MB);\r
+\r
+ MsrIndex = (UINT32)-1;\r
+ while ((BaseAddress < BASE_1MB) && (Length != 0)) {\r
+ Status = MtrrLibProgramFixedMtrr (Type, &BaseAddress, &Length, &MsrIndex, &ClearMask, &OrMask);\r
+ if (RETURN_ERROR (Status)) {\r
+ return Status;\r
+ }\r
+\r
+ ClearMasks[MsrIndex] = ClearMasks[MsrIndex] | ClearMask;\r
+ OrMasks[MsrIndex] = (OrMasks[MsrIndex] & ~ClearMask) | OrMask;\r
}\r
\r
- FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCountWorker ();\r
- VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;\r
+ return RETURN_SUCCESS;\r
+}\r
+\r
+/**\r
+ This function attempts to set the attributes into MTRR setting buffer for multiple memory ranges.\r
+\r
+ @param[in, out] MtrrSetting MTRR setting buffer to be set.\r
+ @param[in] Scratch A temporary scratch buffer that is used to perform the calculation.\r
+ @param[in, out] ScratchSize Pointer to the size in bytes of the scratch buffer.\r
+ It may be updated to the actual required size when the calculation\r
+ needs more scratch buffer.\r
+ @param[in] Ranges Pointer to an array of MTRR_MEMORY_RANGE.\r
+ When range overlap happens, the last one takes higher priority.\r
+ When the function returns, either all the attributes are set successfully,\r
+ or none of them is set.\r
+ @param[in] RangeCount Count of MTRR_MEMORY_RANGE.\r
+\r
+ @retval RETURN_SUCCESS The attributes were set for all the memory ranges.\r
+ @retval RETURN_INVALID_PARAMETER Length in any range is zero.\r
+ @retval RETURN_UNSUPPORTED The processor does not support one or more bytes of the\r
+ memory resource range specified by BaseAddress and Length in any range.\r
+ @retval RETURN_UNSUPPORTED The bit mask of attributes is not support for the memory resource\r
+ range specified by BaseAddress and Length in any range.\r
+ @retval RETURN_OUT_OF_RESOURCES There are not enough system resources to modify the attributes of\r
+ the memory resource ranges.\r
+ @retval RETURN_ACCESS_DENIED The attributes for the memory resource range specified by\r
+ BaseAddress and Length cannot be modified.\r
+ @retval RETURN_BUFFER_TOO_SMALL The scratch buffer is too small for MTRR calculation.\r
+**/\r
+RETURN_STATUS\r
+EFIAPI\r
+MtrrSetMemoryAttributesInMtrrSettings (\r
+ IN OUT MTRR_SETTINGS *MtrrSetting,\r
+ IN VOID *Scratch,\r
+ IN OUT UINTN *ScratchSize,\r
+ IN CONST MTRR_MEMORY_RANGE *Ranges,\r
+ IN UINTN RangeCount\r
+ )\r
+{\r
+ RETURN_STATUS Status;\r
+ UINT32 Index;\r
+ UINT64 BaseAddress;\r
+ UINT64 Length;\r
+ BOOLEAN Above1MbExist;\r
+\r
+ UINT64 MtrrValidBitsMask;\r
+ UINT64 MtrrValidAddressMask;\r
+ MTRR_MEMORY_CACHE_TYPE DefaultType;\r
+ MTRR_VARIABLE_SETTINGS VariableSettings;\r
+ MTRR_MEMORY_RANGE WorkingRanges[2 * ARRAY_SIZE (MtrrSetting->Variables.Mtrr) + 2];\r
+ UINTN WorkingRangeCount;\r
+ BOOLEAN Modified;\r
+ MTRR_VARIABLE_SETTING VariableSetting;\r
+ UINT32 OriginalVariableMtrrCount;\r
+ UINT32 FirmwareVariableMtrrCount;\r
+ UINT32 WorkingVariableMtrrCount;\r
+ MTRR_MEMORY_RANGE OriginalVariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];\r
+ MTRR_MEMORY_RANGE WorkingVariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];\r
+ BOOLEAN VariableSettingModified[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];\r
+\r
+ UINT64 ClearMasks[ARRAY_SIZE (mMtrrLibFixedMtrrTable)];\r
+ UINT64 OrMasks[ARRAY_SIZE (mMtrrLibFixedMtrrTable)];\r
\r
- MtrrLibInitializeMtrrMask(&MtrrValidBitsMask, &MtrrValidAddressMask);\r
+ MTRR_CONTEXT MtrrContext;\r
+ BOOLEAN MtrrContextValid;\r
\r
- TempQword = 0;\r
- MemoryType = (UINT64)Attribute;\r
- OverwriteExistingMtrr = FALSE;\r
+ Status = RETURN_SUCCESS;\r
+ MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);\r
\r
//\r
- // Check for an invalid parameter\r
+ // TRUE indicating the accordingly Variable setting needs modificaiton in OriginalVariableMtrr.\r
//\r
- if (Length == 0) {\r
- Status = RETURN_INVALID_PARAMETER;\r
- goto Done;\r
- }\r
-\r
- if (\r
- (BaseAddress & ~MtrrValidAddressMask) != 0 ||\r
- (Length & ~MtrrValidAddressMask) != 0\r
- ) {\r
- Status = RETURN_UNSUPPORTED;\r
- goto Done;\r
- }\r
+ SetMem (VariableSettingModified, ARRAY_SIZE (VariableSettingModified), FALSE);\r
\r
//\r
- // Check if Fixed MTRR\r
+ // TRUE indicating the caller requests to set variable MTRRs.\r
//\r
- Status = RETURN_SUCCESS;\r
- if (BaseAddress < BASE_1MB) {\r
- while ((BaseAddress < BASE_1MB) && (Length > 0) && Status == RETURN_SUCCESS) {\r
- Status = ProgramFixedMtrr (MemoryType, &BaseAddress, &Length, &MsrNum, &ClearMask, &OrMask);\r
- if (RETURN_ERROR (Status)) {\r
- goto Done;\r
- }\r
- if (!FixedSettingsValid[MsrNum]) {\r
- WorkingFixedSettings.Mtrr[MsrNum] = AsmReadMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr);\r
- FixedSettingsValid[MsrNum] = TRUE;\r
- }\r
- NewValue = (WorkingFixedSettings.Mtrr[MsrNum] & ~ClearMask) | OrMask;\r
- if (WorkingFixedSettings.Mtrr[MsrNum] != NewValue) {\r
- WorkingFixedSettings.Mtrr[MsrNum] = NewValue;\r
- FixedSettingsModified[MsrNum] = TRUE;\r
- }\r
- }\r
-\r
- if (Length == 0) {\r
- //\r
- // A Length of 0 can only make sense for fixed MTTR ranges.\r
- // Since we just handled the fixed MTRRs, we can skip the\r
- // variable MTRR section.\r
- //\r
- goto Done;\r
- }\r
- }\r
+ Above1MbExist = FALSE;\r
+ OriginalVariableMtrrCount = 0;\r
\r
//\r
- // Since memory ranges below 1MB will be overridden by the fixed MTRRs,\r
- // we can set the base to 0 to save variable MTRRs.\r
+ // 0. Dump the requests.\r
//\r
- if (BaseAddress == BASE_1MB) {\r
- BaseAddress = 0;\r
- Length += SIZE_1MB;\r
+ DEBUG_CODE_BEGIN ();\r
+ DEBUG ((\r
+ DEBUG_CACHE,\r
+ "Mtrr: Set Mem Attribute to %a, ScratchSize = %x%a",\r
+ (MtrrSetting == NULL) ? "Hardware" : "Buffer",\r
+ *ScratchSize,\r
+ (RangeCount <= 1) ? "," : "\n"\r
+ ));\r
+ for (Index = 0; Index < RangeCount; Index++) {\r
+ DEBUG ((\r
+ DEBUG_CACHE,\r
+ " %a: [%016lx, %016lx)\n",\r
+ mMtrrMemoryCacheTypeShortName[MIN (Ranges[Index].Type, CacheInvalid)],\r
+ Ranges[Index].BaseAddress,\r
+ Ranges[Index].BaseAddress + Ranges[Index].Length\r
+ ));\r
}\r
\r
- //\r
- // Read all variable MTRRs\r
- //\r
- VariableMtrrCount = GetVariableMtrrCountWorker ();\r
- MtrrGetVariableMtrrWorker (VariableMtrrCount, &OriginalVariableSettings);\r
- CopyMem (&WorkingVariableSettings, &OriginalVariableSettings, sizeof (WorkingVariableSettings));\r
- VariableSettings = &WorkingVariableSettings;\r
+ DEBUG_CODE_END ();\r
\r
//\r
- // Check for overlap\r
+ // 1. Validate the parameters.\r
//\r
- UsedMtrr = MtrrGetMemoryAttributeInVariableMtrrWorker (\r
- VariableSettings,\r
- FirmwareVariableMtrrCount,\r
- MtrrValidBitsMask,\r
- MtrrValidAddressMask,\r
- VariableMtrr\r
- );\r
- OverLap = CheckMemoryAttributeOverlap (\r
- FirmwareVariableMtrrCount,\r
- BaseAddress,\r
- BaseAddress + Length - 1,\r
- VariableMtrr\r
- );\r
+ if (!IsMtrrSupported ()) {\r
+ Status = RETURN_UNSUPPORTED;\r
+ goto Exit;\r
+ }\r
\r
- if (OverLap) {\r
- Status = CombineMemoryAttribute (\r
- FirmwareVariableMtrrCount,\r
- MemoryType,\r
- &BaseAddress,\r
- &Length,\r
- VariableMtrr,\r
- &UsedMtrr,\r
- &OverwriteExistingMtrr\r
- );\r
- if (RETURN_ERROR (Status)) {\r
- goto Done;\r
+ for (Index = 0; Index < RangeCount; Index++) {\r
+ if (Ranges[Index].Length == 0) {\r
+ Status = RETURN_INVALID_PARAMETER;\r
+ goto Exit;\r
}\r
\r
- if (Length == 0) {\r
+ if (((Ranges[Index].BaseAddress & ~MtrrValidAddressMask) != 0) ||\r
+ ((((Ranges[Index].BaseAddress + Ranges[Index].Length) & ~MtrrValidAddressMask) != 0) &&\r
+ ((Ranges[Index].BaseAddress + Ranges[Index].Length) != MtrrValidBitsMask + 1))\r
+ )\r
+ {\r
//\r
- // Combined successfully, invalidate the now-unused MTRRs\r
+ // Either the BaseAddress or the Limit doesn't follow the alignment requirement.\r
+ // Note: It's still valid if Limit doesn't follow the alignment requirement but equals to MAX Address.\r
//\r
- InvalidateMtrr(VariableMtrrCount, VariableMtrr);\r
- Status = RETURN_SUCCESS;\r
- goto Done;\r
+ Status = RETURN_UNSUPPORTED;\r
+ goto Exit;\r
+ }\r
+\r
+ if ((Ranges[Index].Type != CacheUncacheable) &&\r
+ (Ranges[Index].Type != CacheWriteCombining) &&\r
+ (Ranges[Index].Type != CacheWriteThrough) &&\r
+ (Ranges[Index].Type != CacheWriteProtected) &&\r
+ (Ranges[Index].Type != CacheWriteBack))\r
+ {\r
+ Status = RETURN_INVALID_PARAMETER;\r
+ goto Exit;\r
+ }\r
+\r
+ if (Ranges[Index].BaseAddress + Ranges[Index].Length > BASE_1MB) {\r
+ Above1MbExist = TRUE;\r
}\r
}\r
\r
//\r
- // The memory type is the same with the type specified by\r
- // MTRR_LIB_IA32_MTRR_DEF_TYPE.\r
+ // 2. Apply the above-1MB memory attribute settings.\r
//\r
- if ((!OverwriteExistingMtrr) && (Attribute == MtrrGetDefaultMemoryType ())) {\r
+ if (Above1MbExist) {\r
//\r
- // Invalidate the now-unused MTRRs\r
+ // 2.1. Read all variable MTRRs and convert to Ranges.\r
//\r
- InvalidateMtrr(VariableMtrrCount, VariableMtrr);\r
- goto Done;\r
- }\r
+ OriginalVariableMtrrCount = GetVariableMtrrCountWorker ();\r
+ MtrrGetVariableMtrrWorker (MtrrSetting, OriginalVariableMtrrCount, &VariableSettings);\r
+ MtrrLibGetRawVariableRanges (\r
+ &VariableSettings,\r
+ OriginalVariableMtrrCount,\r
+ MtrrValidBitsMask,\r
+ MtrrValidAddressMask,\r
+ OriginalVariableMtrr\r
+ );\r
\r
- Positive = GetMtrrNumberAndDirection (BaseAddress, Length, &MtrrNumber);\r
+ DefaultType = MtrrGetDefaultMemoryTypeWorker (MtrrSetting);\r
+ WorkingRangeCount = 1;\r
+ WorkingRanges[0].BaseAddress = 0;\r
+ WorkingRanges[0].Length = MtrrValidBitsMask + 1;\r
+ WorkingRanges[0].Type = DefaultType;\r
+\r
+ Status = MtrrLibApplyVariableMtrrs (\r
+ OriginalVariableMtrr,\r
+ OriginalVariableMtrrCount,\r
+ WorkingRanges,\r
+ ARRAY_SIZE (WorkingRanges),\r
+ &WorkingRangeCount\r
+ );\r
+ ASSERT_RETURN_ERROR (Status);\r
\r
- if ((UsedMtrr + MtrrNumber) > FirmwareVariableMtrrCount) {\r
- Status = RETURN_OUT_OF_RESOURCES;\r
- goto Done;\r
- }\r
+ ASSERT (OriginalVariableMtrrCount >= PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs));\r
+ FirmwareVariableMtrrCount = OriginalVariableMtrrCount - PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);\r
+ ASSERT (WorkingRangeCount <= 2 * FirmwareVariableMtrrCount + 1);\r
\r
- //\r
- // Invalidate the now-unused MTRRs\r
- //\r
- InvalidateMtrr(VariableMtrrCount, VariableMtrr);\r
+ //\r
+ // 2.2. Force [0, 1M) to UC, so that it doesn't impact subtraction algorithm.\r
+ //\r
+ Status = MtrrLibSetMemoryType (\r
+ WorkingRanges,\r
+ ARRAY_SIZE (WorkingRanges),\r
+ &WorkingRangeCount,\r
+ 0,\r
+ SIZE_1MB,\r
+ CacheUncacheable\r
+ );\r
+ ASSERT (Status != RETURN_OUT_OF_RESOURCES);\r
\r
- //\r
- // Find first unused MTRR\r
- //\r
- for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;\r
- MsrNum < VariableMtrrEnd;\r
- MsrNum += 2\r
- ) {\r
- if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
- break;\r
+ //\r
+ // 2.3. Apply the new memory attribute settings to Ranges.\r
+ //\r
+ Modified = FALSE;\r
+ for (Index = 0; Index < RangeCount; Index++) {\r
+ BaseAddress = Ranges[Index].BaseAddress;\r
+ Length = Ranges[Index].Length;\r
+ if (BaseAddress < BASE_1MB) {\r
+ if (Length <= BASE_1MB - BaseAddress) {\r
+ continue;\r
+ }\r
+\r
+ Length -= BASE_1MB - BaseAddress;\r
+ BaseAddress = BASE_1MB;\r
+ }\r
+\r
+ Status = MtrrLibSetMemoryType (\r
+ WorkingRanges,\r
+ ARRAY_SIZE (WorkingRanges),\r
+ &WorkingRangeCount,\r
+ BaseAddress,\r
+ Length,\r
+ Ranges[Index].Type\r
+ );\r
+ if (Status == RETURN_ALREADY_STARTED) {\r
+ Status = RETURN_SUCCESS;\r
+ } else if (Status == RETURN_OUT_OF_RESOURCES) {\r
+ goto Exit;\r
+ } else {\r
+ ASSERT_RETURN_ERROR (Status);\r
+ Modified = TRUE;\r
+ }\r
}\r
- }\r
\r
- if (BaseAddress != 0) {\r
- do {\r
+ if (Modified) {\r
//\r
- // Calculate the alignment of the base address.\r
+ // 2.4. Calculate the Variable MTRR settings based on the Ranges.\r
+ // Buffer Too Small may be returned if the scratch buffer size is insufficient.\r
//\r
- Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));\r
-\r
- if (Alignment > Length) {\r
- break;\r
+ Status = MtrrLibSetMemoryRanges (\r
+ DefaultType,\r
+ LShiftU64 (1, (UINTN)HighBitSet64 (MtrrValidBitsMask)),\r
+ WorkingRanges,\r
+ WorkingRangeCount,\r
+ Scratch,\r
+ ScratchSize,\r
+ WorkingVariableMtrr,\r
+ FirmwareVariableMtrrCount + 1,\r
+ &WorkingVariableMtrrCount\r
+ );\r
+ if (RETURN_ERROR (Status)) {\r
+ goto Exit;\r
}\r
\r
//\r
- // Find unused MTRR\r
+ // 2.5. Remove the [0, 1MB) MTRR if it still exists (not merged with other range)\r
//\r
- for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
- if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
+ for (Index = 0; Index < WorkingVariableMtrrCount; Index++) {\r
+ if ((WorkingVariableMtrr[Index].BaseAddress == 0) && (WorkingVariableMtrr[Index].Length == SIZE_1MB)) {\r
+ ASSERT (WorkingVariableMtrr[Index].Type == CacheUncacheable);\r
+ WorkingVariableMtrrCount--;\r
+ CopyMem (\r
+ &WorkingVariableMtrr[Index],\r
+ &WorkingVariableMtrr[Index + 1],\r
+ (WorkingVariableMtrrCount - Index) * sizeof (WorkingVariableMtrr[0])\r
+ );\r
break;\r
}\r
}\r
\r
- ProgramVariableMtrr (\r
- MsrNum,\r
- BaseAddress,\r
- Alignment,\r
- MemoryType,\r
- MtrrValidAddressMask\r
- );\r
- BaseAddress += Alignment;\r
- Length -= Alignment;\r
- } while (TRUE);\r
-\r
- if (Length == 0) {\r
- goto Done;\r
- }\r
- }\r
-\r
- TempQword = Length;\r
-\r
- if (!Positive) {\r
- Length = Power2MaxMemory (LShiftU64 (TempQword, 1));\r
-\r
- //\r
- // Find unused MTRR\r
- //\r
- for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
- if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
- break;\r
+ if (WorkingVariableMtrrCount > FirmwareVariableMtrrCount) {\r
+ Status = RETURN_OUT_OF_RESOURCES;\r
+ goto Exit;\r
}\r
- }\r
\r
- ProgramVariableMtrr (\r
- MsrNum,\r
- BaseAddress,\r
- Length,\r
- MemoryType,\r
- MtrrValidAddressMask\r
- );\r
- BaseAddress += Length;\r
- TempQword = Length - TempQword;\r
- MemoryType = MTRR_CACHE_UNCACHEABLE;\r
+ //\r
+ // 2.6. Merge the WorkingVariableMtrr to OriginalVariableMtrr\r
+ // Make sure least modification is made to OriginalVariableMtrr.\r
+ //\r
+ MtrrLibMergeVariableMtrr (\r
+ OriginalVariableMtrr,\r
+ OriginalVariableMtrrCount,\r
+ WorkingVariableMtrr,\r
+ WorkingVariableMtrrCount,\r
+ VariableSettingModified\r
+ );\r
+ }\r
}\r
\r
- do {\r
- //\r
- // Find unused MTRR\r
- //\r
- for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
- if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
- break;\r
- }\r
+ //\r
+ // 3. Apply the below-1MB memory attribute settings.\r
+ //\r
+ // (Value & ~0 | 0) still equals to (Value)\r
+ //\r
+ ZeroMem (ClearMasks, sizeof (ClearMasks));\r
+ ZeroMem (OrMasks, sizeof (OrMasks));\r
+ for (Index = 0; Index < RangeCount; Index++) {\r
+ if (Ranges[Index].BaseAddress >= BASE_1MB) {\r
+ continue;\r
}\r
\r
- Length = Power2MaxMemory (TempQword);\r
- if (!Positive) {\r
- BaseAddress -= Length;\r
+ Status = MtrrLibSetBelow1MBMemoryAttribute (\r
+ ClearMasks,\r
+ OrMasks,\r
+ Ranges[Index].BaseAddress,\r
+ Ranges[Index].Length,\r
+ Ranges[Index].Type\r
+ );\r
+ if (RETURN_ERROR (Status)) {\r
+ goto Exit;\r
}\r
+ }\r
\r
- ProgramVariableMtrr (\r
- MsrNum,\r
- BaseAddress,\r
- Length,\r
- MemoryType,\r
- MtrrValidAddressMask\r
- );\r
+ MtrrContextValid = FALSE;\r
+ //\r
+ // 4. Write fixed MTRRs that have been modified\r
+ //\r
+ for (Index = 0; Index < ARRAY_SIZE (ClearMasks); Index++) {\r
+ if (ClearMasks[Index] != 0) {\r
+ if (MtrrSetting != NULL) {\r
+ MtrrSetting->Fixed.Mtrr[Index] = (MtrrSetting->Fixed.Mtrr[Index] & ~ClearMasks[Index]) | OrMasks[Index];\r
+ } else {\r
+ if (!MtrrContextValid) {\r
+ MtrrLibPreMtrrChange (&MtrrContext);\r
+ MtrrContextValid = TRUE;\r
+ }\r
\r
- if (Positive) {\r
- BaseAddress += Length;\r
+ AsmMsrAndThenOr64 (mMtrrLibFixedMtrrTable[Index].Msr, ~ClearMasks[Index], OrMasks[Index]);\r
+ }\r
}\r
- TempQword -= Length;\r
-\r
- } while (TempQword > 0);\r
-\r
-Done:\r
+ }\r
\r
//\r
- // Write fixed MTRRs that have been modified\r
+ // 5. Write variable MTRRs that have been modified\r
//\r
- for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- if (FixedSettingsModified[Index]) {\r
- if (!MtrrContextValid) {\r
- PreMtrrChange (&MtrrContext);\r
- MtrrContextValid = TRUE;\r
+ for (Index = 0; Index < OriginalVariableMtrrCount; Index++) {\r
+ if (VariableSettingModified[Index]) {\r
+ if (OriginalVariableMtrr[Index].Length != 0) {\r
+ VariableSetting.Base = (OriginalVariableMtrr[Index].BaseAddress & MtrrValidAddressMask)\r
+ | (UINT8)OriginalVariableMtrr[Index].Type;\r
+ VariableSetting.Mask = ((~(OriginalVariableMtrr[Index].Length - 1)) & MtrrValidAddressMask) | BIT11;\r
+ } else {\r
+ VariableSetting.Base = 0;\r
+ VariableSetting.Mask = 0;\r
+ }\r
+\r
+ if (MtrrSetting != NULL) {\r
+ CopyMem (&MtrrSetting->Variables.Mtrr[Index], &VariableSetting, sizeof (VariableSetting));\r
+ } else {\r
+ if (!MtrrContextValid) {\r
+ MtrrLibPreMtrrChange (&MtrrContext);\r
+ MtrrContextValid = TRUE;\r
+ }\r
+\r
+ AsmWriteMsr64 (\r
+ MSR_IA32_MTRR_PHYSBASE0 + (Index << 1),\r
+ VariableSetting.Base\r
+ );\r
+ AsmWriteMsr64 (\r
+ MSR_IA32_MTRR_PHYSMASK0 + (Index << 1),\r
+ VariableSetting.Mask\r
+ );\r
}\r
- AsmWriteMsr64 (\r
- mMtrrLibFixedMtrrTable[Index].Msr,\r
- WorkingFixedSettings.Mtrr[Index]\r
- );\r
}\r
}\r
\r
- if (MtrrContextValid) {\r
- PostMtrrChange (&MtrrContext);\r
+ if (MtrrSetting != NULL) {\r
+ ((MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&MtrrSetting->MtrrDefType)->Bits.E = 1;\r
+ ((MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&MtrrSetting->MtrrDefType)->Bits.FE = 1;\r
+ } else {\r
+ if (MtrrContextValid) {\r
+ MtrrLibPostMtrrChange (&MtrrContext);\r
+ }\r
}\r
\r
- DEBUG((DEBUG_CACHE, " Status = %r\n", Status));\r
+Exit:\r
+ DEBUG ((DEBUG_CACHE, " Result = %r\n", Status));\r
if (!RETURN_ERROR (Status)) {\r
- MtrrDebugPrintAllMtrrs ();\r
+ MtrrDebugPrintAllMtrrsWorker (MtrrSetting);\r
}\r
\r
return Status;\r
}\r
+\r
+/**\r
+ This function attempts to set the attributes into MTRR setting buffer for a memory range.\r
+\r
+ @param[in, out] MtrrSetting MTRR setting buffer to be set.\r
+ @param[in] BaseAddress The physical address that is the start address\r
+ of a memory range.\r
+ @param[in] Length The size in bytes of the memory range.\r
+ @param[in] Attribute The bit mask of attributes to set for the\r
+ memory range.\r
+\r
+ @retval RETURN_SUCCESS The attributes were set for the memory range.\r
+ @retval RETURN_INVALID_PARAMETER Length is zero.\r
+ @retval RETURN_UNSUPPORTED The processor does not support one or more bytes of the\r
+ memory resource range specified by BaseAddress and Length.\r
+ @retval RETURN_UNSUPPORTED The bit mask of attributes is not support for the memory resource\r
+ range specified by BaseAddress and Length.\r
+ @retval RETURN_ACCESS_DENIED The attributes for the memory resource range specified by\r
+ BaseAddress and Length cannot be modified.\r
+ @retval RETURN_OUT_OF_RESOURCES There are not enough system resources to modify the attributes of\r
+ the memory resource range.\r
+ Multiple memory range attributes setting by calling this API multiple\r
+ times may fail with status RETURN_OUT_OF_RESOURCES. It may not mean\r
+ the number of CPU MTRRs are too small to set such memory attributes.\r
+ Pass the multiple memory range attributes to one call of\r
+ MtrrSetMemoryAttributesInMtrrSettings() may succeed.\r
+ @retval RETURN_BUFFER_TOO_SMALL The fixed internal scratch buffer is too small for MTRR calculation.\r
+ Caller should use MtrrSetMemoryAttributesInMtrrSettings() to specify\r
+ external scratch buffer.\r
+**/\r
+RETURN_STATUS\r
+EFIAPI\r
+MtrrSetMemoryAttributeInMtrrSettings (\r
+ IN OUT MTRR_SETTINGS *MtrrSetting,\r
+ IN PHYSICAL_ADDRESS BaseAddress,\r
+ IN UINT64 Length,\r
+ IN MTRR_MEMORY_CACHE_TYPE Attribute\r
+ )\r
+{\r
+ UINT8 Scratch[SCRATCH_BUFFER_SIZE];\r
+ UINTN ScratchSize;\r
+ MTRR_MEMORY_RANGE Range;\r
+\r
+ Range.BaseAddress = BaseAddress;\r
+ Range.Length = Length;\r
+ Range.Type = Attribute;\r
+ ScratchSize = sizeof (Scratch);\r
+ return MtrrSetMemoryAttributesInMtrrSettings (MtrrSetting, Scratch, &ScratchSize, &Range, 1);\r
+}\r
+\r
+/**\r
+ This function attempts to set the attributes for a memory range.\r
+\r
+ @param[in] BaseAddress The physical address that is the start\r
+ address of a memory range.\r
+ @param[in] Length The size in bytes of the memory range.\r
+ @param[in] Attributes The bit mask of attributes to set for the\r
+ memory range.\r
+\r
+ @retval RETURN_SUCCESS The attributes were set for the memory\r
+ range.\r
+ @retval RETURN_INVALID_PARAMETER Length is zero.\r
+ @retval RETURN_UNSUPPORTED The processor does not support one or\r
+ more bytes of the memory resource range\r
+ specified by BaseAddress and Length.\r
+ @retval RETURN_UNSUPPORTED The bit mask of attributes is not support\r
+ for the memory resource range specified\r
+ by BaseAddress and Length.\r
+ @retval RETURN_ACCESS_DENIED The attributes for the memory resource\r
+ range specified by BaseAddress and Length\r
+ cannot be modified.\r
+ @retval RETURN_OUT_OF_RESOURCES There are not enough system resources to\r
+ modify the attributes of the memory\r
+ resource range.\r
+ Multiple memory range attributes setting by calling this API multiple\r
+ times may fail with status RETURN_OUT_OF_RESOURCES. It may not mean\r
+ the number of CPU MTRRs are too small to set such memory attributes.\r
+ Pass the multiple memory range attributes to one call of\r
+ MtrrSetMemoryAttributesInMtrrSettings() may succeed.\r
+ @retval RETURN_BUFFER_TOO_SMALL The fixed internal scratch buffer is too small for MTRR calculation.\r
+ Caller should use MtrrSetMemoryAttributesInMtrrSettings() to specify\r
+ external scratch buffer.\r
+**/\r
+RETURN_STATUS\r
+EFIAPI\r
+MtrrSetMemoryAttribute (\r
+ IN PHYSICAL_ADDRESS BaseAddress,\r
+ IN UINT64 Length,\r
+ IN MTRR_MEMORY_CACHE_TYPE Attribute\r
+ )\r
+{\r
+ return MtrrSetMemoryAttributeInMtrrSettings (NULL, BaseAddress, Length, Attribute);\r
+}\r
+\r
/**\r
Worker function setting variable MTRRs\r
\r
**/\r
VOID\r
MtrrSetVariableMtrrWorker (\r
- IN MTRR_VARIABLE_SETTINGS *VariableSettings\r
+ IN MTRR_VARIABLE_SETTINGS *VariableSettings\r
)\r
{\r
UINT32 Index;\r
UINT32 VariableMtrrCount;\r
\r
VariableMtrrCount = GetVariableMtrrCountWorker ();\r
- ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
+ ASSERT (VariableMtrrCount <= ARRAY_SIZE (VariableSettings->Mtrr));\r
\r
for (Index = 0; Index < VariableMtrrCount; Index++) {\r
AsmWriteMsr64 (\r
- MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1),\r
+ MSR_IA32_MTRR_PHYSBASE0 + (Index << 1),\r
VariableSettings->Mtrr[Index].Base\r
);\r
AsmWriteMsr64 (\r
- MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1) + 1,\r
+ MSR_IA32_MTRR_PHYSMASK0 + (Index << 1),\r
VariableSettings->Mtrr[Index].Mask\r
);\r
}\r
}\r
\r
-\r
-/**\r
- This function sets variable MTRRs\r
-\r
- @param[in] VariableSettings A buffer to hold variable MTRRs content.\r
-\r
- @return The pointer of VariableSettings\r
-\r
-**/\r
-MTRR_VARIABLE_SETTINGS*\r
-EFIAPI\r
-MtrrSetVariableMtrr (\r
- IN MTRR_VARIABLE_SETTINGS *VariableSettings\r
- )\r
-{\r
- MTRR_CONTEXT MtrrContext;\r
-\r
- if (!IsMtrrSupported ()) {\r
- return VariableSettings;\r
- }\r
-\r
- PreMtrrChange (&MtrrContext);\r
- MtrrSetVariableMtrrWorker (VariableSettings);\r
- PostMtrrChange (&MtrrContext);\r
- return VariableSettings;\r
-}\r
-\r
/**\r
Worker function setting fixed MTRRs\r
\r
**/\r
VOID\r
MtrrSetFixedMtrrWorker (\r
- IN MTRR_FIXED_SETTINGS *FixedSettings\r
+ IN MTRR_FIXED_SETTINGS *FixedSettings\r
)\r
{\r
UINT32 Index;\r
\r
for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- AsmWriteMsr64 (\r
- mMtrrLibFixedMtrrTable[Index].Msr,\r
- FixedSettings->Mtrr[Index]\r
- );\r
- }\r
-}\r
-\r
-\r
-/**\r
- This function sets fixed MTRRs\r
-\r
- @param[in] FixedSettings A buffer to hold fixed MTRRs content.\r
-\r
- @retval The pointer of FixedSettings\r
-\r
-**/\r
-MTRR_FIXED_SETTINGS*\r
-EFIAPI\r
-MtrrSetFixedMtrr (\r
- IN MTRR_FIXED_SETTINGS *FixedSettings\r
- )\r
-{\r
- MTRR_CONTEXT MtrrContext;\r
-\r
- if (!IsMtrrSupported ()) {\r
- return FixedSettings;\r
+ AsmWriteMsr64 (\r
+ mMtrrLibFixedMtrrTable[Index].Msr,\r
+ FixedSettings->Mtrr[Index]\r
+ );\r
}\r
-\r
- PreMtrrChange (&MtrrContext);\r
- MtrrSetFixedMtrrWorker (FixedSettings);\r
- PostMtrrChange (&MtrrContext);\r
-\r
- return FixedSettings;\r
}\r
\r
-\r
/**\r
This function gets the content in all MTRRs (variable and fixed)\r
\r
MTRR_SETTINGS *\r
EFIAPI\r
MtrrGetAllMtrrs (\r
- OUT MTRR_SETTINGS *MtrrSetting\r
+ OUT MTRR_SETTINGS *MtrrSetting\r
)\r
{\r
if (!IsMtrrSupported ()) {\r
// Get variable MTRRs\r
//\r
MtrrGetVariableMtrrWorker (\r
+ NULL,\r
GetVariableMtrrCountWorker (),\r
&MtrrSetting->Variables\r
);\r
//\r
// Get MTRR_DEF_TYPE value\r
//\r
- MtrrSetting->MtrrDefType = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE);\r
+ MtrrSetting->MtrrDefType = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);\r
\r
return MtrrSetting;\r
}\r
\r
-\r
/**\r
This function sets all MTRRs (variable and fixed)\r
\r
MTRR_SETTINGS *\r
EFIAPI\r
MtrrSetAllMtrrs (\r
- IN MTRR_SETTINGS *MtrrSetting\r
+ IN MTRR_SETTINGS *MtrrSetting\r
)\r
{\r
MTRR_CONTEXT MtrrContext;\r
return MtrrSetting;\r
}\r
\r
- PreMtrrChange (&MtrrContext);\r
+ MtrrLibPreMtrrChange (&MtrrContext);\r
\r
//\r
// Set fixed MTRRs\r
//\r
// Set MTRR_DEF_TYPE value\r
//\r
- AsmWriteMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);\r
+ AsmWriteMsr64 (MSR_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);\r
\r
- PostMtrrChangeEnableCache (&MtrrContext);\r
+ MtrrLibPostMtrrChangeEnableCache (&MtrrContext);\r
\r
return MtrrSetting;\r
}\r
VOID\r
)\r
{\r
- UINT32 RegEdx;\r
- UINT64 MtrrCap;\r
+ CPUID_VERSION_INFO_EDX Edx;\r
+ MSR_IA32_MTRRCAP_REGISTER MtrrCap;\r
\r
//\r
// Check CPUID(1).EDX[12] for MTRR capability\r
//\r
- AsmCpuid (1, NULL, NULL, NULL, &RegEdx);\r
- if (BitFieldRead32 (RegEdx, 12, 12) == 0) {\r
+ AsmCpuid (CPUID_VERSION_INFO, NULL, NULL, NULL, &Edx.Uint32);\r
+ if (Edx.Bits.MTRR == 0) {\r
return FALSE;\r
}\r
\r
//\r
- // Check IA32_MTRRCAP.[0..7] for number of variable MTRRs and IA32_MTRRCAP[8] for\r
- // fixed MTRRs existence. If number of variable MTRRs is zero, or fixed MTRRs do not\r
+ // Check number of variable MTRRs and fixed MTRRs existence.\r
+ // If number of variable MTRRs is zero, or fixed MTRRs do not\r
// exist, return false.\r
//\r
- MtrrCap = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP);\r
- if ((BitFieldRead64 (MtrrCap, 0, 7) == 0) || (BitFieldRead64 (MtrrCap, 8, 8) == 0)) {\r
+ MtrrCap.Uint64 = AsmReadMsr64 (MSR_IA32_MTRRCAP);\r
+ if ((MtrrCap.Bits.VCNT == 0) || (MtrrCap.Bits.FIX == 0)) {\r
return FALSE;\r
}\r
\r
return TRUE;\r
}\r
+\r
+/**\r
+ Worker function prints all MTRRs for debugging.\r
+\r
+ If MtrrSetting is not NULL, print MTRR settings from input MTRR\r
+ settings buffer.\r
+ If MtrrSetting is NULL, print MTRR settings from MTRRs.\r
+\r
+ @param MtrrSetting A buffer holding all MTRRs content.\r
+**/\r
+VOID\r
+MtrrDebugPrintAllMtrrsWorker (\r
+ IN MTRR_SETTINGS *MtrrSetting\r
+ )\r
+{\r
+ DEBUG_CODE_BEGIN ();\r
+ MTRR_SETTINGS LocalMtrrs;\r
+ MTRR_SETTINGS *Mtrrs;\r
+ UINTN Index;\r
+ UINTN RangeCount;\r
+ UINT64 MtrrValidBitsMask;\r
+ UINT64 MtrrValidAddressMask;\r
+ UINT32 VariableMtrrCount;\r
+ BOOLEAN ContainVariableMtrr;\r
+ MTRR_MEMORY_RANGE Ranges[\r
+ ARRAY_SIZE (mMtrrLibFixedMtrrTable) * sizeof (UINT64) + 2 * ARRAY_SIZE (Mtrrs->Variables.Mtrr) + 1\r
+ ];\r
+ MTRR_MEMORY_RANGE RawVariableRanges[ARRAY_SIZE (Mtrrs->Variables.Mtrr)];\r
+\r
+ if (!IsMtrrSupported ()) {\r
+ return;\r
+ }\r
+\r
+ VariableMtrrCount = GetVariableMtrrCountWorker ();\r
+\r
+ if (MtrrSetting != NULL) {\r
+ Mtrrs = MtrrSetting;\r
+ } else {\r
+ MtrrGetAllMtrrs (&LocalMtrrs);\r
+ Mtrrs = &LocalMtrrs;\r
+ }\r
+\r
+ //\r
+ // Dump RAW MTRR contents\r
+ //\r
+ DEBUG ((DEBUG_CACHE, "MTRR Settings:\n"));\r
+ DEBUG ((DEBUG_CACHE, "=============\n"));\r
+ DEBUG ((DEBUG_CACHE, "MTRR Default Type: %016lx\n", Mtrrs->MtrrDefType));\r
+ for (Index = 0; Index < ARRAY_SIZE (mMtrrLibFixedMtrrTable); Index++) {\r
+ DEBUG ((DEBUG_CACHE, "Fixed MTRR[%02d] : %016lx\n", Index, Mtrrs->Fixed.Mtrr[Index]));\r
+ }\r
+\r
+ ContainVariableMtrr = FALSE;\r
+ for (Index = 0; Index < VariableMtrrCount; Index++) {\r
+ if ((Mtrrs->Variables.Mtrr[Index].Mask & BIT11) == 0) {\r
+ //\r
+ // If mask is not valid, then do not display range\r
+ //\r
+ continue;\r
+ }\r
+\r
+ ContainVariableMtrr = TRUE;\r
+ DEBUG ((\r
+ DEBUG_CACHE,\r
+ "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",\r
+ Index,\r
+ Mtrrs->Variables.Mtrr[Index].Base,\r
+ Mtrrs->Variables.Mtrr[Index].Mask\r
+ ));\r
+ }\r
+\r
+ if (!ContainVariableMtrr) {\r
+ DEBUG ((DEBUG_CACHE, "Variable MTRR : None.\n"));\r
+ }\r
+\r
+ DEBUG ((DEBUG_CACHE, "\n"));\r
+\r
+ //\r
+ // Dump MTRR setting in ranges\r
+ //\r
+ DEBUG ((DEBUG_CACHE, "Memory Ranges:\n"));\r
+ DEBUG ((DEBUG_CACHE, "====================================\n"));\r
+ MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);\r
+ Ranges[0].BaseAddress = 0;\r
+ Ranges[0].Length = MtrrValidBitsMask + 1;\r
+ Ranges[0].Type = MtrrGetDefaultMemoryTypeWorker (Mtrrs);\r
+ RangeCount = 1;\r
+\r
+ MtrrLibGetRawVariableRanges (\r
+ &Mtrrs->Variables,\r
+ VariableMtrrCount,\r
+ MtrrValidBitsMask,\r
+ MtrrValidAddressMask,\r
+ RawVariableRanges\r
+ );\r
+ MtrrLibApplyVariableMtrrs (\r
+ RawVariableRanges,\r
+ VariableMtrrCount,\r
+ Ranges,\r
+ ARRAY_SIZE (Ranges),\r
+ &RangeCount\r
+ );\r
+\r
+ MtrrLibApplyFixedMtrrs (&Mtrrs->Fixed, Ranges, ARRAY_SIZE (Ranges), &RangeCount);\r
+\r
+ for (Index = 0; Index < RangeCount; Index++) {\r
+ DEBUG ((\r
+ DEBUG_CACHE,\r
+ "%a:%016lx-%016lx\n",\r
+ mMtrrMemoryCacheTypeShortName[Ranges[Index].Type],\r
+ Ranges[Index].BaseAddress,\r
+ Ranges[Index].BaseAddress + Ranges[Index].Length - 1\r
+ ));\r
+ }\r
+\r
+ DEBUG_CODE_END ();\r
+}\r
+\r
+/**\r
+ This function prints all MTRRs for debugging.\r
+**/\r
+VOID\r
+EFIAPI\r
+MtrrDebugPrintAllMtrrs (\r
+ VOID\r
+ )\r
+{\r
+ MtrrDebugPrintAllMtrrsWorker (NULL);\r
+}\r