/** @file\r
MTRR setting library\r
\r
- Copyright (c) 2008 - 2010, Intel Corporation\r
- All rights reserved. This program and the accompanying materials\r
+ Copyright (c) 2008 - 2012, Intel Corporation. All rights reserved.<BR>\r
+ This program and the accompanying materials\r
are licensed and made available under the terms and conditions of the BSD License\r
which accompanies this distribution. The full text of the license may be found at\r
http://opensource.org/licenses/bsd-license.php\r
//\r
// This table defines the offset, base and length of the fixed MTRRs\r
//\r
-STATIC\r
-FIXED_MTRR MtrrLibFixedMtrrTable[] = {\r
+CONST FIXED_MTRR mMtrrLibFixedMtrrTable[] = {\r
{\r
MTRR_LIB_IA32_MTRR_FIX64K_00000,\r
0,\r
},\r
};\r
\r
+//\r
+// Lookup table used to print MTRRs\r
+//\r
+GLOBAL_REMOVE_IF_UNREFERENCED CONST CHAR8 *mMtrrMemoryCacheTypeShortName[] = {\r
+ "UC", // CacheUncacheable\r
+ "WC", // CacheWriteCombining\r
+ "R*", // Invalid\r
+ "R*", // Invalid\r
+ "WT", // CacheWriteThrough\r
+ "WP", // CacheWriteProtected\r
+ "WB", // CacheWriteBack\r
+ "R*" // Invalid\r
+};\r
+\r
/**\r
Returns the variable MTRR count for the CPU.\r
\r
\r
**/\r
UINT32\r
+EFIAPI\r
GetVariableMtrrCount (\r
VOID\r
)\r
{\r
- return (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK);\r
+ UINT32 VariableMtrrCount;\r
+\r
+ if (!IsMtrrSupported ()) {\r
+ return 0;\r
+ }\r
+\r
+ VariableMtrrCount = (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK);\r
+ ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
+\r
+ return VariableMtrrCount;\r
}\r
\r
/**\r
\r
**/\r
UINT32\r
+EFIAPI\r
GetFirmwareVariableMtrrCount (\r
VOID\r
)\r
{\r
- return GetVariableMtrrCount () - RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER;\r
+ UINT32 VariableMtrrCount;\r
+\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+ if (VariableMtrrCount < RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER) {\r
+ return 0;\r
+ }\r
+\r
+ return VariableMtrrCount - RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER;\r
}\r
\r
/**\r
Returns the default MTRR cache type for the system.\r
\r
- @return MTRR default type\r
+ @return The default MTRR cache type.\r
\r
**/\r
-UINT64\r
-GetMtrrDefaultMemoryType (\r
+MTRR_MEMORY_CACHE_TYPE\r
+EFIAPI\r
+MtrrGetDefaultMemoryType (\r
VOID\r
-)\r
+ )\r
{\r
- return (AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE) & 0xff);\r
-}\r
+ if (!IsMtrrSupported ()) {\r
+ return CacheUncacheable;\r
+ }\r
\r
+ return (MTRR_MEMORY_CACHE_TYPE) (AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE) & 0x7);\r
+}\r
\r
/**\r
Preparation before programming MTRR.\r
//\r
// Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)\r
//\r
- Value = AsmReadCr0 ();\r
- Value = (UINTN) BitFieldWrite64 (Value, 30, 30, 1);\r
- Value = (UINTN) BitFieldWrite64 (Value, 29, 29, 0);\r
- AsmWriteCr0 (Value);\r
- //\r
- // Flush cache\r
- //\r
- AsmWbinvd ();\r
+ AsmDisableCache ();\r
+\r
//\r
- // Clear PGE flag Bit 7\r
+ // Save original CR4 value and clear PGE flag (Bit 7)\r
//\r
Value = AsmReadCr4 ();\r
- AsmWriteCr4 ((UINTN) BitFieldWrite64 (Value, 7, 7, 0));\r
+ AsmWriteCr4 (Value & (~BIT7));\r
+\r
//\r
// Flush all TLBs\r
//\r
CpuFlushTlb ();\r
+\r
//\r
// Disable Mtrrs\r
//\r
AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 0);\r
\r
+ //\r
+ // Return original CR4 value\r
+ //\r
return Value;\r
}\r
\r
-\r
/**\r
Cleaning up after programming MTRRs.\r
\r
This function will do some clean up after programming MTRRs:\r
- enable MTRR caching functionality, and enable cache\r
+ Flush all TLBs, re-enable caching, restore CR4.\r
\r
@param Cr4 CR4 value to restore\r
\r
**/\r
VOID\r
-PostMtrrChange (\r
- UINTN Cr4\r
+PostMtrrChangeEnableCache (\r
+ IN UINTN Cr4\r
)\r
{\r
- UINTN Value;\r
-\r
//\r
- // Enable Cache MTRR\r
+ // Flush all TLBs \r
//\r
- AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 3);\r
-\r
- //\r
- // Flush all TLBs and cache the second time\r
- //\r
- AsmWbinvd ();\r
CpuFlushTlb ();\r
\r
//\r
// Enable Normal Mode caching CD=NW=0, CD(Bit30), NW(Bit29)\r
//\r
- Value = AsmReadCr0 ();\r
- Value = (UINTN) BitFieldWrite64 (Value, 30, 30, 0);\r
- Value = (UINTN) BitFieldWrite64 (Value, 29, 29, 0);\r
- AsmWriteCr0 (Value);\r
+ AsmEnableCache ();\r
\r
+ //\r
+ // Restore original CR4 value\r
+ //\r
AsmWriteCr4 (Cr4);\r
+}\r
+\r
+/**\r
+ Cleaning up after programming MTRRs.\r
+\r
+ This function will do some clean up after programming MTRRs:\r
+ enable MTRR caching functionality, and enable cache\r
+\r
+ @param Cr4 CR4 value to restore\r
\r
- return ;\r
+**/\r
+VOID\r
+PostMtrrChange (\r
+ IN UINTN Cr4\r
+ )\r
+{\r
+ //\r
+ // Enable Cache MTRR\r
+ //\r
+ AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 3);\r
+\r
+ PostMtrrChangeEnableCache (Cr4);\r
}\r
\r
\r
ClearMask = 0;\r
\r
for (MsrNum = 0; MsrNum < MTRR_NUMBER_OF_FIXED_MTRR; MsrNum++) {\r
- if ((*Base >= MtrrLibFixedMtrrTable[MsrNum].BaseAddress) &&\r
+ if ((*Base >= mMtrrLibFixedMtrrTable[MsrNum].BaseAddress) &&\r
(*Base <\r
(\r
- MtrrLibFixedMtrrTable[MsrNum].BaseAddress +\r
- (8 * MtrrLibFixedMtrrTable[MsrNum].Length)\r
+ mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +\r
+ (8 * mMtrrLibFixedMtrrTable[MsrNum].Length)\r
)\r
)\r
) {\r
for (ByteShift = 0; ByteShift < 8; ByteShift++) {\r
if (*Base ==\r
(\r
- MtrrLibFixedMtrrTable[MsrNum].BaseAddress +\r
- (ByteShift * MtrrLibFixedMtrrTable[MsrNum].Length)\r
+ mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +\r
+ (ByteShift * mMtrrLibFixedMtrrTable[MsrNum].Length)\r
)\r
) {\r
break;\r
\r
for (\r
;\r
- ((ByteShift < 8) && (*Length >= MtrrLibFixedMtrrTable[MsrNum].Length));\r
+ ((ByteShift < 8) && (*Length >= mMtrrLibFixedMtrrTable[MsrNum].Length));\r
ByteShift++\r
) {\r
OrMask |= LShiftU64 ((UINT64) MemoryCacheType, (UINT32) (ByteShift * 8));\r
ClearMask |= LShiftU64 ((UINT64) 0xFF, (UINT32) (ByteShift * 8));\r
- *Length -= MtrrLibFixedMtrrTable[MsrNum].Length;\r
- *Base += MtrrLibFixedMtrrTable[MsrNum].Length;\r
+ *Length -= mMtrrLibFixedMtrrTable[MsrNum].Length;\r
+ *Base += mMtrrLibFixedMtrrTable[MsrNum].Length;\r
}\r
\r
if (ByteShift < 8 && (*Length != 0)) {\r
}\r
\r
TempQword =\r
- (AsmReadMsr64 (MtrrLibFixedMtrrTable[MsrNum].Msr) & ~ClearMask) | OrMask;\r
- AsmWriteMsr64 (MtrrLibFixedMtrrTable[MsrNum].Msr, TempQword);\r
+ (AsmReadMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr) & ~ClearMask) | OrMask;\r
+ AsmWriteMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr, TempQword);\r
return RETURN_SUCCESS;\r
}\r
\r
UINT32 FirmwareVariableMtrrCount;\r
UINT32 VariableMtrrEnd;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return 0;\r
+ }\r
+\r
FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();\r
VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;\r
\r
UINT64 MtrrEnd;\r
UINT64 EndAddress;\r
UINT32 FirmwareVariableMtrrCount;\r
+ BOOLEAN CoveredByExistingMtrr;\r
\r
FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();\r
\r
*OverwriteExistingMtrr = FALSE;\r
+ CoveredByExistingMtrr = FALSE;\r
EndAddress = *Base +*Length - 1;\r
\r
for (Index = 0; Index < FirmwareVariableMtrrCount; Index++) {\r
//\r
if (Attributes == VariableMtrr[Index].Type) {\r
//\r
- // if the Mtrr range contain the request range, return RETURN_SUCCESS\r
+ // if the Mtrr range contain the request range, set a flag, then continue to \r
+ // invalidate any MTRR of the same request range with higher priority cache type.\r
//\r
if (VariableMtrr[Index].BaseAddress <= *Base && MtrrEnd >= EndAddress) {\r
- *Length = 0;\r
- return RETURN_SUCCESS;\r
+ CoveredByExistingMtrr = TRUE;\r
+ continue;\r
}\r
//\r
// invalid this MTRR, and program the combine range\r
return RETURN_ACCESS_DENIED;\r
}\r
\r
+ if (CoveredByExistingMtrr) {\r
+ *Length = 0;\r
+ }\r
+\r
return RETURN_SUCCESS;\r
}\r
\r
{\r
UINT64 Result;\r
\r
- if (RShiftU64 (MemoryLength, 32)) {\r
+ if (RShiftU64 (MemoryLength, 32) != 0) {\r
Result = LShiftU64 (\r
(UINT64) GetPowerOfTwo32 (\r
(UINT32) RShiftU64 (MemoryLength, 32)\r
\r
\r
/**\r
- Check the direction to program variable MTRRs.\r
+ Determine the MTRR numbers used to program a memory range.\r
+\r
+ This function first checks the alignment of the base address. If the alignment of the base address <= Length,\r
+ cover the memory range (BaseAddress, alignment) by a MTRR, then BaseAddress += alignment and Length -= alignment.\r
+ Repeat the step until alignment > Length.\r
\r
- This function determines which direction of programming the variable\r
- MTRRs will use fewer MTRRs.\r
+ Then this function determines which direction of programming the variable MTRRs for the remaining length\r
+ will use fewer MTRRs.\r
\r
- @param Input Length of Memory to program MTRR\r
+ @param BaseAddress Length of Memory to program MTRR\r
+ @param Length Length of Memory to program MTRR\r
@param MtrrNumber Pointer to the number of necessary MTRRs\r
\r
@retval TRUE Positive direction is better.\r
\r
**/\r
BOOLEAN\r
-GetDirection (\r
- IN UINT64 Input,\r
+GetMtrrNumberAndDirection (\r
+ IN UINT64 BaseAddress,\r
+ IN UINT64 Length,\r
IN UINTN *MtrrNumber\r
)\r
{\r
UINT64 TempQword;\r
+ UINT64 Alignment;\r
UINT32 Positive;\r
UINT32 Subtractive;\r
\r
- TempQword = Input;\r
+ *MtrrNumber = 0;\r
+\r
+ if (BaseAddress != 0) {\r
+ do {\r
+ //\r
+ // Calculate the alignment of the base address.\r
+ //\r
+ Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));\r
+\r
+ if (Alignment > Length) {\r
+ break;\r
+ }\r
+\r
+ (*MtrrNumber)++;\r
+ BaseAddress += Alignment;\r
+ Length -= Alignment;\r
+ } while (TRUE);\r
+\r
+ if (Length == 0) {\r
+ return TRUE;\r
+ }\r
+ }\r
+\r
+ TempQword = Length;\r
Positive = 0;\r
Subtractive = 0;\r
\r
Positive++;\r
} while (TempQword != 0);\r
\r
- TempQword = Power2MaxMemory (LShiftU64 (Input, 1)) - Input;\r
+ TempQword = Power2MaxMemory (LShiftU64 (Length, 1)) - Length;\r
Subtractive++;\r
do {\r
TempQword -= Power2MaxMemory (TempQword);\r
} while (TempQword != 0);\r
\r
if (Positive <= Subtractive) {\r
- *MtrrNumber = Positive;\r
+ *MtrrNumber += Positive;\r
return TRUE;\r
} else {\r
- *MtrrNumber = Subtractive;\r
+ *MtrrNumber += Subtractive;\r
return FALSE;\r
}\r
}\r
@param VariableMtrr The array to shadow variable MTRRs content\r
\r
**/\r
-STATIC\r
VOID\r
InvalidateMtrr (\r
IN VARIABLE_MTRR *VariableMtrr\r
Index = 0;\r
VariableMtrrCount = GetVariableMtrrCount ();\r
while (Index < VariableMtrrCount) {\r
- if (VariableMtrr[Index].Valid == FALSE && VariableMtrr[Index].Used == TRUE ) {\r
+ if (!VariableMtrr[Index].Valid && VariableMtrr[Index].Used) {\r
AsmWriteMsr64 (VariableMtrr[Index].Msr, 0);\r
AsmWriteMsr64 (VariableMtrr[Index].Msr + 1, 0);\r
VariableMtrr[Index].Used = FALSE;\r
@param MtrrValidAddressMask The valid address mask for MTRR\r
\r
**/\r
-STATIC\r
VOID\r
ProgramVariableMtrr (\r
IN UINTN MtrrNumber,\r
@return The enum item in MTRR_MEMORY_CACHE_TYPE\r
\r
**/\r
-STATIC\r
MTRR_MEMORY_CACHE_TYPE\r
GetMemoryCacheTypeFromMtrrType (\r
IN UINT64 MtrrType\r
@param MtrrValidAddressMask The valid address mask for the MTRR\r
\r
**/\r
-STATIC\r
VOID\r
MtrrLibInitializeMtrrMask (\r
OUT UINT64 *MtrrValidBitsMask,\r
OUT UINT64 *MtrrValidAddressMask\r
)\r
{\r
- UINT32 RegEax;\r
- UINT8 PhysicalAddressBits;\r
+ UINT32 RegEax;\r
+ UINT8 PhysicalAddressBits;\r
\r
AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);\r
\r
UINT64 TempQword;\r
RETURN_STATUS Status;\r
UINT64 MemoryType;\r
- UINT64 Remainder;\r
+ UINT64 Alignment;\r
BOOLEAN OverLap;\r
BOOLEAN Positive;\r
UINT32 MsrNum;\r
UINT32 FirmwareVariableMtrrCount;\r
UINT32 VariableMtrrEnd;\r
\r
+ DEBUG((DEBUG_CACHE, "MtrrSetMemoryAttribute() %a:%016lx-%016lx\n", mMtrrMemoryCacheTypeShortName[Attribute], BaseAddress, Length));\r
+\r
+ if (!IsMtrrSupported ()) {\r
+ Status = RETURN_UNSUPPORTED;\r
+ goto Done;\r
+ }\r
+\r
FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();\r
VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;\r
\r
// Check for an invalid parameter\r
//\r
if (Length == 0) {\r
- return RETURN_INVALID_PARAMETER;\r
+ Status = RETURN_INVALID_PARAMETER;\r
+ goto Done;\r
}\r
\r
if (\r
- (BaseAddress &~MtrrValidAddressMask) != 0 ||\r
- (Length &~MtrrValidAddressMask) != 0\r
+ (BaseAddress & ~MtrrValidAddressMask) != 0 ||\r
+ (Length & ~MtrrValidAddressMask) != 0\r
) {\r
- return RETURN_UNSUPPORTED;\r
+ Status = RETURN_UNSUPPORTED;\r
+ goto Done;\r
}\r
\r
//\r
Status = ProgramFixedMtrr (MemoryType, &BaseAddress, &Length);\r
PostMtrrChange (Cr4);\r
if (RETURN_ERROR (Status)) {\r
- return Status;\r
+ goto Done;\r
}\r
}\r
\r
\r
//\r
// Since memory ranges below 1MB will be overridden by the fixed MTRRs,\r
- // we can set the bade to 0 to save variable MTRRs.\r
+ // we can set the base to 0 to save variable MTRRs.\r
//\r
if (BaseAddress == BASE_1MB) {\r
BaseAddress = 0;\r
Length += SIZE_1MB;\r
}\r
\r
- //\r
- // Check memory base address alignment\r
- //\r
- DivU64x64Remainder (BaseAddress, Power2MaxMemory (LShiftU64 (Length, 1)), &Remainder);\r
- if (Remainder != 0) {\r
- DivU64x64Remainder (BaseAddress, Power2MaxMemory (Length), &Remainder);\r
- if (Remainder != 0) {\r
- Status = RETURN_UNSUPPORTED;\r
- goto Done;\r
- }\r
- }\r
-\r
//\r
// Check for overlap\r
//\r
\r
if (Length == 0) {\r
//\r
- // Combined successfully\r
+ // Combined successfully, invalidate the now-unused MTRRs\r
//\r
+ InvalidateMtrr(VariableMtrr);\r
Status = RETURN_SUCCESS;\r
goto Done;\r
}\r
}\r
\r
- //\r
- // Program Variable MTRRs\r
- //\r
- // Avoid hardcode here and read data dynamically\r
- //\r
- if (UsedMtrr >= FirmwareVariableMtrrCount) {\r
- Status = RETURN_OUT_OF_RESOURCES;\r
- goto Done;\r
- }\r
-\r
//\r
// The memory type is the same with the type specified by\r
// MTRR_LIB_IA32_MTRR_DEF_TYPE.\r
//\r
- if ((!OverwriteExistingMtrr) && (Attribute == GetMtrrDefaultMemoryType ())) {\r
+ if ((!OverwriteExistingMtrr) && (Attribute == MtrrGetDefaultMemoryType ())) {\r
//\r
// Invalidate the now-unused MTRRs\r
//\r
goto Done;\r
}\r
\r
- TempQword = Length;\r
+ Positive = GetMtrrNumberAndDirection (BaseAddress, Length, &MtrrNumber);\r
\r
+ if ((UsedMtrr + MtrrNumber) > FirmwareVariableMtrrCount) {\r
+ Status = RETURN_OUT_OF_RESOURCES;\r
+ goto Done;\r
+ }\r
\r
- if (TempQword == Power2MaxMemory (TempQword)) {\r
- //\r
- // Invalidate the now-unused MTRRs\r
- //\r
- InvalidateMtrr(VariableMtrr);\r
+ //\r
+ // Invalidate the now-unused MTRRs\r
+ //\r
+ InvalidateMtrr(VariableMtrr);\r
+\r
+ //\r
+ // Find first unused MTRR\r
+ //\r
+ for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;\r
+ MsrNum < VariableMtrrEnd;\r
+ MsrNum += 2\r
+ ) {\r
+ if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
+ break;\r
+ }\r
+ }\r
+\r
+ if (BaseAddress != 0) {\r
+ do {\r
+ //\r
+ // Calculate the alignment of the base address.\r
+ //\r
+ Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));\r
+\r
+ if (Alignment > Length) {\r
+ break;\r
+ }\r
+\r
+ //\r
+ // Find unused MTRR\r
+ //\r
+ for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
+ if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
+ break;\r
+ }\r
+ }\r
+\r
+ ProgramVariableMtrr (\r
+ MsrNum,\r
+ BaseAddress,\r
+ Alignment,\r
+ MemoryType,\r
+ MtrrValidAddressMask\r
+ );\r
+ BaseAddress += Alignment;\r
+ Length -= Alignment;\r
+ } while (TRUE);\r
+\r
+ if (Length == 0) {\r
+ goto Done;\r
+ }\r
+ }\r
+\r
+ TempQword = Length;\r
+\r
+ if (!Positive) {\r
+ Length = Power2MaxMemory (LShiftU64 (TempQword, 1));\r
\r
//\r
- // Find first unused MTRR\r
+ // Find unused MTRR\r
//\r
- for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;\r
- MsrNum < VariableMtrrEnd;\r
- MsrNum += 2\r
- ) {\r
+ for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
break;\r
}\r
MemoryType,\r
MtrrValidAddressMask\r
);\r
- } else {\r
-\r
- Positive = GetDirection (TempQword, &MtrrNumber);\r
-\r
- if ((UsedMtrr + MtrrNumber) > FirmwareVariableMtrrCount) {\r
- Status = RETURN_OUT_OF_RESOURCES;\r
- goto Done;\r
- }\r
-\r
- //\r
- // Invalidate the now-unused MTRRs\r
- //\r
- InvalidateMtrr(VariableMtrr);\r
+ BaseAddress += Length;\r
+ TempQword = Length - TempQword;\r
+ MemoryType = MTRR_CACHE_UNCACHEABLE;\r
+ }\r
\r
+ do {\r
//\r
- // Find first unused MTRR\r
+ // Find unused MTRR\r
//\r
- for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;\r
- MsrNum < VariableMtrrEnd;\r
- MsrNum += 2\r
- ) {\r
+ for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
break;\r
}\r
}\r
\r
+ Length = Power2MaxMemory (TempQword);\r
if (!Positive) {\r
- Length = Power2MaxMemory (LShiftU64 (TempQword, 1));\r
- ProgramVariableMtrr (\r
- MsrNum,\r
- BaseAddress,\r
- Length,\r
- MemoryType,\r
- MtrrValidAddressMask\r
- );\r
- BaseAddress += Length;\r
- TempQword = Length - TempQword;\r
- MemoryType = MTRR_CACHE_UNCACHEABLE;\r
+ BaseAddress -= Length;\r
}\r
\r
- do {\r
- //\r
- // Find unused MTRR\r
- //\r
- for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
- if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
- break;\r
- }\r
- }\r
-\r
- Length = Power2MaxMemory (TempQword);\r
- if (!Positive) {\r
- BaseAddress -= Length;\r
- }\r
+ ProgramVariableMtrr (\r
+ MsrNum,\r
+ BaseAddress,\r
+ Length,\r
+ MemoryType,\r
+ MtrrValidAddressMask\r
+ );\r
\r
- ProgramVariableMtrr (\r
- MsrNum,\r
- BaseAddress,\r
- Length,\r
- MemoryType,\r
- MtrrValidAddressMask\r
- );\r
+ if (Positive) {\r
+ BaseAddress += Length;\r
+ }\r
+ TempQword -= Length;\r
\r
- if (Positive) {\r
- BaseAddress += Length;\r
- }\r
- TempQword -= Length;\r
+ } while (TempQword > 0);\r
\r
- } while (TempQword > 0);\r
+Done:\r
+ DEBUG((DEBUG_CACHE, " Status = %r\n", Status));\r
+ if (!RETURN_ERROR (Status)) {\r
+ MtrrDebugPrintAllMtrrs ();\r
}\r
\r
-Done:\r
return Status;\r
-\r
}\r
\r
\r
UINT64 MtrrValidAddressMask;\r
UINTN VariableMtrrCount;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return CacheUncacheable;\r
+ }\r
+\r
//\r
// Check if MTRR is enabled, if not, return UC as attribute\r
//\r
// Go through the fixed MTRR\r
//\r
for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- if (Address >= MtrrLibFixedMtrrTable[Index].BaseAddress &&\r
+ if (Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress &&\r
Address < (\r
- MtrrLibFixedMtrrTable[Index].BaseAddress +\r
- (MtrrLibFixedMtrrTable[Index].Length * 8)\r
+ mMtrrLibFixedMtrrTable[Index].BaseAddress +\r
+ (mMtrrLibFixedMtrrTable[Index].Length * 8)\r
)\r
) {\r
SubIndex =\r
- ((UINTN)Address - MtrrLibFixedMtrrTable[Index].BaseAddress) /\r
- MtrrLibFixedMtrrTable[Index].Length;\r
- TempQword = AsmReadMsr64 (MtrrLibFixedMtrrTable[Index].Msr);\r
+ ((UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /\r
+ mMtrrLibFixedMtrrTable[Index].Length;\r
+ TempQword = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);\r
MtrrType = RShiftU64 (TempQword, SubIndex * 8) & 0xFF;\r
return GetMemoryCacheTypeFromMtrrType (MtrrType);\r
}\r
// Go through the variable MTRR\r
//\r
VariableMtrrCount = GetVariableMtrrCount ();\r
+ ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
+\r
for (Index = 0; Index < VariableMtrrCount; Index++) {\r
if (VariableMtrr[Index].Valid) {\r
if (Address >= VariableMtrr[Index].BaseAddress &&\r
UINT32 Index;\r
UINT32 VariableMtrrCount;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return VariableSettings;\r
+ }\r
+\r
VariableMtrrCount = GetVariableMtrrCount ();\r
+ ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
+\r
for (Index = 0; Index < VariableMtrrCount; Index++) {\r
VariableSettings->Mtrr[Index].Base =\r
AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1));\r
UINT32 VariableMtrrCount;\r
\r
VariableMtrrCount = GetVariableMtrrCount ();\r
+ ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
+\r
for (Index = 0; Index < VariableMtrrCount; Index++) {\r
AsmWriteMsr64 (\r
MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1),\r
{\r
UINTN Cr4;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return VariableSettings;\r
+ }\r
+\r
Cr4 = PreMtrrChange ();\r
MtrrSetVariableMtrrWorker (VariableSettings);\r
PostMtrrChange (Cr4);\r
{\r
UINT32 Index;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return FixedSettings;\r
+ }\r
+\r
for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
FixedSettings->Mtrr[Index] =\r
- AsmReadMsr64 (MtrrLibFixedMtrrTable[Index].Msr);\r
+ AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);\r
};\r
\r
return FixedSettings;\r
\r
for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
AsmWriteMsr64 (\r
- MtrrLibFixedMtrrTable[Index].Msr,\r
+ mMtrrLibFixedMtrrTable[Index].Msr,\r
FixedSettings->Mtrr[Index]\r
);\r
}\r
{\r
UINTN Cr4;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return FixedSettings;\r
+ }\r
+\r
Cr4 = PreMtrrChange ();\r
MtrrSetFixedMtrrWorker (FixedSettings);\r
PostMtrrChange (Cr4);\r
OUT MTRR_SETTINGS *MtrrSetting\r
)\r
{\r
+ if (!IsMtrrSupported ()) {\r
+ return MtrrSetting;\r
+ }\r
+\r
//\r
// Get fixed MTRRs\r
//\r
{\r
UINTN Cr4;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return MtrrSetting;\r
+ }\r
+\r
Cr4 = PreMtrrChange ();\r
\r
//\r
//\r
AsmWriteMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);\r
\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChangeEnableCache (Cr4);\r
\r
return MtrrSetting;\r
}\r
\r
-\r
/**\r
This function prints all MTRRs for debugging.\r
**/\r
VOID\r
+EFIAPI\r
MtrrDebugPrintAllMtrrs (\r
+ VOID\r
)\r
{\r
DEBUG_CODE (\r
- {\r
- MTRR_SETTINGS MtrrSettings;\r
- UINTN Index;\r
- UINTN VariableMtrrCount;\r
+ MTRR_SETTINGS MtrrSettings;\r
+ UINTN Index;\r
+ UINTN Index1;\r
+ UINTN VariableMtrrCount;\r
+ UINT64 Base;\r
+ UINT64 Limit;\r
+ UINT64 MtrrBase;\r
+ UINT64 MtrrLimit;\r
+ UINT64 RangeBase;\r
+ UINT64 RangeLimit;\r
+ UINT64 NoRangeBase;\r
+ UINT64 NoRangeLimit;\r
+ UINT32 RegEax;\r
+ UINTN MemoryType;\r
+ UINTN PreviousMemoryType;\r
+ BOOLEAN Found;\r
+\r
+ if (!IsMtrrSupported ()) {\r
+ return;\r
+ }\r
\r
- MtrrGetAllMtrrs (&MtrrSettings);\r
- DEBUG((EFI_D_ERROR, "DefaultType = %016lx\n", MtrrSettings.MtrrDefType));\r
- for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- DEBUG((\r
- EFI_D_ERROR, "Fixed[%02d] = %016lx\n",\r
- Index,\r
- MtrrSettings.Fixed.Mtrr[Index]\r
- ));\r
- }\r
+ DEBUG((DEBUG_CACHE, "MTRR Settings\n"));\r
+ DEBUG((DEBUG_CACHE, "=============\n"));\r
+ \r
+ MtrrGetAllMtrrs (&MtrrSettings);\r
+ DEBUG((DEBUG_CACHE, "MTRR Default Type: %016lx\n", MtrrSettings.MtrrDefType));\r
+ for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
+ DEBUG((DEBUG_CACHE, "Fixed MTRR[%02d] : %016lx\n", Index, MtrrSettings.Fixed.Mtrr[Index]));\r
+ }\r
\r
- VariableMtrrCount = GetVariableMtrrCount ();\r
- for (Index = 0; Index < VariableMtrrCount; Index++) {\r
- DEBUG((\r
- EFI_D_ERROR, "Variable[%02d] = %016lx, %016lx\n",\r
- Index,\r
- MtrrSettings.Variables.Mtrr[Index].Base,\r
- MtrrSettings.Variables.Mtrr[Index].Mask\r
- ));\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+ for (Index = 0; Index < VariableMtrrCount; Index++) {\r
+ DEBUG((DEBUG_CACHE, "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",\r
+ Index,\r
+ MtrrSettings.Variables.Mtrr[Index].Base,\r
+ MtrrSettings.Variables.Mtrr[Index].Mask\r
+ ));\r
+ }\r
+ DEBUG((DEBUG_CACHE, "\n"));\r
+ DEBUG((DEBUG_CACHE, "MTRR Ranges\n"));\r
+ DEBUG((DEBUG_CACHE, "====================================\n"));\r
+\r
+ Base = 0;\r
+ PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;\r
+ for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
+ Base = mMtrrLibFixedMtrrTable[Index].BaseAddress;\r
+ for (Index1 = 0; Index1 < 8; Index1++) {\r
+ MemoryType = (UINTN)(RShiftU64 (MtrrSettings.Fixed.Mtrr[Index], Index1 * 8) & 0xff);\r
+ if (MemoryType > CacheWriteBack) {\r
+ MemoryType = MTRR_CACHE_INVALID_TYPE;\r
+ } \r
+ if (MemoryType != PreviousMemoryType) {\r
+ if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {\r
+ DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));\r
+ }\r
+ PreviousMemoryType = MemoryType;\r
+ DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));\r
+ }\r
+ Base += mMtrrLibFixedMtrrTable[Index].Length;\r
}\r
}\r
+ DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));\r
+\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+\r
+ Base = BASE_1MB;\r
+ PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;\r
+ do {\r
+ MemoryType = MtrrGetMemoryAttribute (Base);\r
+ if (MemoryType > CacheWriteBack) {\r
+ MemoryType = MTRR_CACHE_INVALID_TYPE;\r
+ }\r
+\r
+ if (MemoryType != PreviousMemoryType) {\r
+ if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {\r
+ DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));\r
+ }\r
+ PreviousMemoryType = MemoryType;\r
+ DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));\r
+ }\r
+ \r
+ RangeBase = BASE_1MB; \r
+ NoRangeBase = BASE_1MB;\r
+ Limit = BIT36 - 1;\r
+ AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);\r
+ if (RegEax >= 0x80000008) {\r
+ AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);\r
+ Limit = LShiftU64 (1, RegEax & 0xff) - 1;\r
+ }\r
+ RangeLimit = Limit;\r
+ NoRangeLimit = Limit;\r
+ \r
+ for (Index = 0, Found = FALSE; Index < VariableMtrrCount; Index++) {\r
+ if ((MtrrSettings.Variables.Mtrr[Index].Mask & BIT11) == 0) {\r
+ //\r
+ // If mask is not valid, then do not display range\r
+ //\r
+ continue;\r
+ }\r
+ MtrrBase = (MtrrSettings.Variables.Mtrr[Index].Base & (~(SIZE_4KB - 1)));\r
+ MtrrLimit = MtrrBase + ((~(MtrrSettings.Variables.Mtrr[Index].Mask & (~(SIZE_4KB - 1)))) & Limit);\r
+\r
+ if (Base >= MtrrBase && Base < MtrrLimit) {\r
+ Found = TRUE;\r
+ }\r
+ \r
+ if (Base >= MtrrBase && MtrrBase > RangeBase) {\r
+ RangeBase = MtrrBase;\r
+ }\r
+ if (Base > MtrrLimit && MtrrLimit > RangeBase) {\r
+ RangeBase = MtrrLimit + 1;\r
+ }\r
+ if (Base < MtrrBase && MtrrBase < RangeLimit) {\r
+ RangeLimit = MtrrBase - 1;\r
+ }\r
+ if (Base < MtrrLimit && MtrrLimit <= RangeLimit) {\r
+ RangeLimit = MtrrLimit;\r
+ }\r
+ \r
+ if (Base > MtrrLimit && NoRangeBase < MtrrLimit) {\r
+ NoRangeBase = MtrrLimit + 1;\r
+ }\r
+ if (Base < MtrrBase && NoRangeLimit > MtrrBase) {\r
+ NoRangeLimit = MtrrBase - 1;\r
+ }\r
+ }\r
+ \r
+ if (Found) {\r
+ Base = RangeLimit + 1;\r
+ } else {\r
+ Base = NoRangeLimit + 1;\r
+ }\r
+ } while (Found);\r
+ DEBUG((DEBUG_CACHE, "%016lx\n\n", Base - 1));\r
);\r
}\r
\r
+/**\r
+ Checks if MTRR is supported.\r
+\r
+ @retval TRUE MTRR is supported.\r
+ @retval FALSE MTRR is not supported.\r
+\r
+**/\r
+BOOLEAN\r
+EFIAPI\r
+IsMtrrSupported (\r
+ VOID\r
+ )\r
+{\r
+ UINT32 RegEdx;\r
+ UINT64 MtrrCap;\r
+\r
+ //\r
+ // Check CPUID(1).EDX[12] for MTRR capability\r
+ //\r
+ AsmCpuid (1, NULL, NULL, NULL, &RegEdx);\r
+ if (BitFieldRead32 (RegEdx, 12, 12) == 0) {\r
+ return FALSE;\r
+ }\r
+\r
+ //\r
+ // Check IA32_MTRRCAP.[0..7] for number of variable MTRRs and IA32_MTRRCAP[8] for\r
+ // fixed MTRRs existence. If number of variable MTRRs is zero, or fixed MTRRs do not\r
+ // exist, return false.\r
+ //\r
+ MtrrCap = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP);\r
+ if ((BitFieldRead64 (MtrrCap, 0, 7) == 0) || (BitFieldRead64 (MtrrCap, 8, 8) == 0)) {\r
+ return FALSE;\r
+ }\r
+\r
+ return TRUE;\r
+}\r