/** @file\r
MTRR setting library\r
\r
- Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>\r
+ Copyright (c) 2008 - 2015, Intel Corporation. All rights reserved.<BR>\r
This program and the accompanying materials\r
are licensed and made available under the terms and conditions of the BSD License\r
which accompanies this distribution. The full text of the license may be found at\r
#include <Library/BaseMemoryLib.h>\r
#include <Library/DebugLib.h>\r
\r
+//\r
+// Context to save and restore when MTRRs are programmed\r
+//\r
+typedef struct {\r
+ UINTN Cr4;\r
+ BOOLEAN InterruptState;\r
+} MTRR_CONTEXT;\r
+\r
//\r
// This table defines the offset, base and length of the fixed MTRRs\r
//\r
-FIXED_MTRR MtrrLibFixedMtrrTable[] = {\r
+CONST FIXED_MTRR mMtrrLibFixedMtrrTable[] = {\r
{\r
MTRR_LIB_IA32_MTRR_FIX64K_00000,\r
0,\r
},\r
};\r
\r
+//\r
+// Lookup table used to print MTRRs\r
+//\r
+GLOBAL_REMOVE_IF_UNREFERENCED CONST CHAR8 *mMtrrMemoryCacheTypeShortName[] = {\r
+ "UC", // CacheUncacheable\r
+ "WC", // CacheWriteCombining\r
+ "R*", // Invalid\r
+ "R*", // Invalid\r
+ "WT", // CacheWriteThrough\r
+ "WP", // CacheWriteProtected\r
+ "WB", // CacheWriteBack\r
+ "R*" // Invalid\r
+};\r
+\r
/**\r
Returns the variable MTRR count for the CPU.\r
\r
VOID\r
)\r
{\r
+ UINT32 VariableMtrrCount;\r
+\r
if (!IsMtrrSupported ()) {\r
return 0;\r
}\r
\r
- return (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK);\r
+ VariableMtrrCount = (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK);\r
+ ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
+\r
+ return VariableMtrrCount;\r
}\r
\r
/**\r
)\r
{\r
UINT32 VariableMtrrCount;\r
+ UINT32 ReservedMtrrNumber;\r
\r
VariableMtrrCount = GetVariableMtrrCount ();\r
- if (VariableMtrrCount < RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER) {\r
+ ReservedMtrrNumber = PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);\r
+ if (VariableMtrrCount < ReservedMtrrNumber) {\r
return 0;\r
}\r
\r
- return VariableMtrrCount - RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER;\r
+ return VariableMtrrCount - ReservedMtrrNumber;\r
}\r
\r
/**\r
Returns the default MTRR cache type for the system.\r
\r
- @return MTRR default type\r
+ @return The default MTRR cache type.\r
\r
**/\r
-UINT64\r
-GetMtrrDefaultMemoryType (\r
+MTRR_MEMORY_CACHE_TYPE\r
+EFIAPI\r
+MtrrGetDefaultMemoryType (\r
VOID\r
-)\r
+ )\r
{\r
- return (AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE) & 0xff);\r
-}\r
+ if (!IsMtrrSupported ()) {\r
+ return CacheUncacheable;\r
+ }\r
\r
+ return (MTRR_MEMORY_CACHE_TYPE) (AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE) & 0x7);\r
+}\r
\r
/**\r
Preparation before programming MTRR.\r
This function will do some preparation for programming MTRRs:\r
disable cache, invalid cache and disable MTRR caching functionality\r
\r
- @return CR4 value before changing.\r
+ @param[out] MtrrContext Pointer to context to save\r
\r
**/\r
-UINTN\r
+VOID\r
PreMtrrChange (\r
- VOID\r
+ OUT MTRR_CONTEXT *MtrrContext\r
)\r
{\r
- UINTN Value;\r
-\r
+ //\r
+ // Disable interrupts and save current interrupt state\r
+ //\r
+ MtrrContext->InterruptState = SaveAndDisableInterrupts();\r
+ \r
//\r
// Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)\r
//\r
//\r
// Save original CR4 value and clear PGE flag (Bit 7)\r
//\r
- Value = AsmReadCr4 ();\r
- AsmWriteCr4 (Value & (~BIT7));\r
+ MtrrContext->Cr4 = AsmReadCr4 ();\r
+ AsmWriteCr4 (MtrrContext->Cr4 & (~BIT7));\r
\r
//\r
// Flush all TLBs\r
// Disable Mtrrs\r
//\r
AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 0);\r
-\r
- //\r
- // Return original CR4 value\r
- //\r
- return Value;\r
}\r
\r
-\r
/**\r
Cleaning up after programming MTRRs.\r
\r
This function will do some clean up after programming MTRRs:\r
- enable MTRR caching functionality, and enable cache\r
+ Flush all TLBs, re-enable caching, restore CR4.\r
\r
- @param Cr4 CR4 value to restore\r
+ @param[in] MtrrContext Pointer to context to restore\r
\r
**/\r
VOID\r
-PostMtrrChange (\r
- UINTN Cr4\r
+PostMtrrChangeEnableCache (\r
+ IN MTRR_CONTEXT *MtrrContext\r
)\r
{\r
- //\r
- // Enable Cache MTRR\r
- //\r
- AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 3);\r
-\r
//\r
// Flush all TLBs \r
//\r
//\r
// Restore original CR4 value\r
//\r
- AsmWriteCr4 (Cr4);\r
+ AsmWriteCr4 (MtrrContext->Cr4);\r
+ \r
+ //\r
+ // Restore original interrupt state\r
+ //\r
+ SetInterruptState (MtrrContext->InterruptState);\r
+}\r
+\r
+/**\r
+ Cleaning up after programming MTRRs.\r
+\r
+ This function will do some clean up after programming MTRRs:\r
+ enable MTRR caching functionality, and enable cache\r
+\r
+ @param[in] MtrrContext Pointer to context to restore\r
+\r
+**/\r
+VOID\r
+PostMtrrChange (\r
+ IN MTRR_CONTEXT *MtrrContext\r
+ )\r
+{\r
+ //\r
+ // Enable Cache MTRR\r
+ //\r
+ AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 3);\r
+\r
+ PostMtrrChangeEnableCache (MtrrContext);\r
}\r
\r
\r
ClearMask = 0;\r
\r
for (MsrNum = 0; MsrNum < MTRR_NUMBER_OF_FIXED_MTRR; MsrNum++) {\r
- if ((*Base >= MtrrLibFixedMtrrTable[MsrNum].BaseAddress) &&\r
+ if ((*Base >= mMtrrLibFixedMtrrTable[MsrNum].BaseAddress) &&\r
(*Base <\r
(\r
- MtrrLibFixedMtrrTable[MsrNum].BaseAddress +\r
- (8 * MtrrLibFixedMtrrTable[MsrNum].Length)\r
+ mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +\r
+ (8 * mMtrrLibFixedMtrrTable[MsrNum].Length)\r
)\r
)\r
) {\r
for (ByteShift = 0; ByteShift < 8; ByteShift++) {\r
if (*Base ==\r
(\r
- MtrrLibFixedMtrrTable[MsrNum].BaseAddress +\r
- (ByteShift * MtrrLibFixedMtrrTable[MsrNum].Length)\r
+ mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +\r
+ (ByteShift * mMtrrLibFixedMtrrTable[MsrNum].Length)\r
)\r
) {\r
break;\r
\r
for (\r
;\r
- ((ByteShift < 8) && (*Length >= MtrrLibFixedMtrrTable[MsrNum].Length));\r
+ ((ByteShift < 8) && (*Length >= mMtrrLibFixedMtrrTable[MsrNum].Length));\r
ByteShift++\r
) {\r
OrMask |= LShiftU64 ((UINT64) MemoryCacheType, (UINT32) (ByteShift * 8));\r
ClearMask |= LShiftU64 ((UINT64) 0xFF, (UINT32) (ByteShift * 8));\r
- *Length -= MtrrLibFixedMtrrTable[MsrNum].Length;\r
- *Base += MtrrLibFixedMtrrTable[MsrNum].Length;\r
+ *Length -= mMtrrLibFixedMtrrTable[MsrNum].Length;\r
+ *Base += mMtrrLibFixedMtrrTable[MsrNum].Length;\r
}\r
\r
if (ByteShift < 8 && (*Length != 0)) {\r
}\r
\r
TempQword =\r
- (AsmReadMsr64 (MtrrLibFixedMtrrTable[MsrNum].Msr) & ~ClearMask) | OrMask;\r
- AsmWriteMsr64 (MtrrLibFixedMtrrTable[MsrNum].Msr, TempQword);\r
+ (AsmReadMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr) & ~ClearMask) | OrMask;\r
+ AsmWriteMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr, TempQword);\r
return RETURN_SUCCESS;\r
}\r
\r
UINT64 MtrrEnd;\r
UINT64 EndAddress;\r
UINT32 FirmwareVariableMtrrCount;\r
+ BOOLEAN CoveredByExistingMtrr;\r
\r
FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();\r
\r
*OverwriteExistingMtrr = FALSE;\r
+ CoveredByExistingMtrr = FALSE;\r
EndAddress = *Base +*Length - 1;\r
\r
for (Index = 0; Index < FirmwareVariableMtrrCount; Index++) {\r
//\r
if (Attributes == VariableMtrr[Index].Type) {\r
//\r
- // if the Mtrr range contain the request range, return RETURN_SUCCESS\r
+ // if the Mtrr range contain the request range, set a flag, then continue to \r
+ // invalidate any MTRR of the same request range with higher priority cache type.\r
//\r
if (VariableMtrr[Index].BaseAddress <= *Base && MtrrEnd >= EndAddress) {\r
- *Length = 0;\r
- return RETURN_SUCCESS;\r
+ CoveredByExistingMtrr = TRUE;\r
+ continue;\r
}\r
//\r
// invalid this MTRR, and program the combine range\r
return RETURN_ACCESS_DENIED;\r
}\r
\r
+ if (CoveredByExistingMtrr) {\r
+ *Length = 0;\r
+ }\r
+\r
return RETURN_SUCCESS;\r
}\r
\r
\r
\r
/**\r
- Check the direction to program variable MTRRs.\r
+ Determine the MTRR numbers used to program a memory range.\r
\r
- This function determines which direction of programming the variable\r
- MTRRs will use fewer MTRRs.\r
+ This function first checks the alignment of the base address. If the alignment of the base address <= Length,\r
+ cover the memory range (BaseAddress, alignment) by a MTRR, then BaseAddress += alignment and Length -= alignment.\r
+ Repeat the step until alignment > Length.\r
\r
- @param Input Length of Memory to program MTRR\r
+ Then this function determines which direction of programming the variable MTRRs for the remaining length\r
+ will use fewer MTRRs.\r
+\r
+ @param BaseAddress Length of Memory to program MTRR\r
+ @param Length Length of Memory to program MTRR\r
@param MtrrNumber Pointer to the number of necessary MTRRs\r
\r
@retval TRUE Positive direction is better.\r
\r
**/\r
BOOLEAN\r
-GetDirection (\r
- IN UINT64 Input,\r
+GetMtrrNumberAndDirection (\r
+ IN UINT64 BaseAddress,\r
+ IN UINT64 Length,\r
IN UINTN *MtrrNumber\r
)\r
{\r
UINT64 TempQword;\r
+ UINT64 Alignment;\r
UINT32 Positive;\r
UINT32 Subtractive;\r
\r
- TempQword = Input;\r
+ *MtrrNumber = 0;\r
+\r
+ if (BaseAddress != 0) {\r
+ do {\r
+ //\r
+ // Calculate the alignment of the base address.\r
+ //\r
+ Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));\r
+\r
+ if (Alignment > Length) {\r
+ break;\r
+ }\r
+\r
+ (*MtrrNumber)++;\r
+ BaseAddress += Alignment;\r
+ Length -= Alignment;\r
+ } while (TRUE);\r
+\r
+ if (Length == 0) {\r
+ return TRUE;\r
+ }\r
+ }\r
+\r
+ TempQword = Length;\r
Positive = 0;\r
Subtractive = 0;\r
\r
Positive++;\r
} while (TempQword != 0);\r
\r
- TempQword = Power2MaxMemory (LShiftU64 (Input, 1)) - Input;\r
+ TempQword = Power2MaxMemory (LShiftU64 (Length, 1)) - Length;\r
Subtractive++;\r
do {\r
TempQword -= Power2MaxMemory (TempQword);\r
} while (TempQword != 0);\r
\r
if (Positive <= Subtractive) {\r
- *MtrrNumber = Positive;\r
+ *MtrrNumber += Positive;\r
return TRUE;\r
} else {\r
- *MtrrNumber = Subtractive;\r
+ *MtrrNumber += Subtractive;\r
return FALSE;\r
}\r
}\r
IN VARIABLE_MTRR *VariableMtrr\r
)\r
{\r
- UINTN Index;\r
- UINTN Cr4;\r
- UINTN VariableMtrrCount;\r
+ UINTN Index;\r
+ UINTN VariableMtrrCount;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
Index = 0;\r
VariableMtrrCount = GetVariableMtrrCount ();\r
while (Index < VariableMtrrCount) {\r
}\r
Index ++;\r
}\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChange (&MtrrContext);\r
}\r
\r
\r
IN UINT64 MtrrValidAddressMask\r
)\r
{\r
- UINT64 TempQword;\r
- UINTN Cr4;\r
+ UINT64 TempQword;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
\r
//\r
// MTRR Physical Base\r
(TempQword & MtrrValidAddressMask) | MTRR_LIB_CACHE_MTRR_ENABLED\r
);\r
\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChange (&MtrrContext);\r
}\r
\r
\r
// MtrrType is MTRR_CACHE_INVALID_TYPE, that means\r
// no mtrr covers the range\r
//\r
- return CacheUncacheable;\r
+ return MtrrGetDefaultMemoryType ();\r
}\r
}\r
\r
OUT UINT64 *MtrrValidAddressMask\r
)\r
{\r
- UINT32 RegEax;\r
- UINT8 PhysicalAddressBits;\r
+ UINT32 RegEax;\r
+ UINT8 PhysicalAddressBits;\r
\r
AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);\r
\r
*MtrrValidBitsMask = LShiftU64 (1, PhysicalAddressBits) - 1;\r
*MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;\r
} else {\r
- *MtrrValidBitsMask = MTRR_LIB_CACHE_VALID_ADDRESS;\r
- *MtrrValidAddressMask = 0xFFFFFFFF;\r
+ *MtrrValidBitsMask = MTRR_LIB_MSR_VALID_MASK;\r
+ *MtrrValidAddressMask = MTRR_LIB_CACHE_VALID_ADDRESS;\r
}\r
}\r
\r
UINT64 TempQword;\r
RETURN_STATUS Status;\r
UINT64 MemoryType;\r
- UINT64 Remainder;\r
+ UINT64 Alignment;\r
BOOLEAN OverLap;\r
BOOLEAN Positive;\r
UINT32 MsrNum;\r
UINT32 UsedMtrr;\r
UINT64 MtrrValidBitsMask;\r
UINT64 MtrrValidAddressMask;\r
- UINTN Cr4;\r
BOOLEAN OverwriteExistingMtrr;\r
UINT32 FirmwareVariableMtrrCount;\r
UINT32 VariableMtrrEnd;\r
+ MTRR_CONTEXT MtrrContext;\r
+\r
+ DEBUG((DEBUG_CACHE, "MtrrSetMemoryAttribute() %a:%016lx-%016lx\n", mMtrrMemoryCacheTypeShortName[Attribute], BaseAddress, Length));\r
\r
if (!IsMtrrSupported ()) {\r
- return RETURN_UNSUPPORTED;\r
+ Status = RETURN_UNSUPPORTED;\r
+ goto Done;\r
}\r
\r
FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();\r
// Check for an invalid parameter\r
//\r
if (Length == 0) {\r
- return RETURN_INVALID_PARAMETER;\r
+ Status = RETURN_INVALID_PARAMETER;\r
+ goto Done;\r
}\r
\r
if (\r
- (BaseAddress &~MtrrValidAddressMask) != 0 ||\r
- (Length &~MtrrValidAddressMask) != 0\r
+ (BaseAddress & ~MtrrValidAddressMask) != 0 ||\r
+ (Length & ~MtrrValidAddressMask) != 0\r
) {\r
- return RETURN_UNSUPPORTED;\r
+ Status = RETURN_UNSUPPORTED;\r
+ goto Done;\r
}\r
\r
//\r
//\r
Status = RETURN_SUCCESS;\r
while ((BaseAddress < BASE_1MB) && (Length > 0) && Status == RETURN_SUCCESS) {\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
Status = ProgramFixedMtrr (MemoryType, &BaseAddress, &Length);\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChange (&MtrrContext);\r
if (RETURN_ERROR (Status)) {\r
- return Status;\r
+ goto Done;\r
}\r
}\r
\r
\r
//\r
// Since memory ranges below 1MB will be overridden by the fixed MTRRs,\r
- // we can set the bade to 0 to save variable MTRRs.\r
+ // we can set the base to 0 to save variable MTRRs.\r
//\r
if (BaseAddress == BASE_1MB) {\r
BaseAddress = 0;\r
Length += SIZE_1MB;\r
}\r
\r
- //\r
- // Check memory base address alignment\r
- //\r
- DivU64x64Remainder (BaseAddress, Power2MaxMemory (LShiftU64 (Length, 1)), &Remainder);\r
- if (Remainder != 0) {\r
- DivU64x64Remainder (BaseAddress, Power2MaxMemory (Length), &Remainder);\r
- if (Remainder != 0) {\r
- Status = RETURN_UNSUPPORTED;\r
- goto Done;\r
- }\r
- }\r
-\r
//\r
// Check for overlap\r
//\r
\r
if (Length == 0) {\r
//\r
- // Combined successfully\r
+ // Combined successfully, invalidate the now-unused MTRRs\r
//\r
+ InvalidateMtrr(VariableMtrr);\r
Status = RETURN_SUCCESS;\r
goto Done;\r
}\r
}\r
\r
- //\r
- // Program Variable MTRRs\r
- //\r
- // Avoid hardcode here and read data dynamically\r
- //\r
- if (UsedMtrr >= FirmwareVariableMtrrCount) {\r
- Status = RETURN_OUT_OF_RESOURCES;\r
- goto Done;\r
- }\r
-\r
//\r
// The memory type is the same with the type specified by\r
// MTRR_LIB_IA32_MTRR_DEF_TYPE.\r
//\r
- if ((!OverwriteExistingMtrr) && (Attribute == GetMtrrDefaultMemoryType ())) {\r
+ if ((!OverwriteExistingMtrr) && (Attribute == MtrrGetDefaultMemoryType ())) {\r
//\r
// Invalidate the now-unused MTRRs\r
//\r
goto Done;\r
}\r
\r
- TempQword = Length;\r
+ Positive = GetMtrrNumberAndDirection (BaseAddress, Length, &MtrrNumber);\r
\r
+ if ((UsedMtrr + MtrrNumber) > FirmwareVariableMtrrCount) {\r
+ Status = RETURN_OUT_OF_RESOURCES;\r
+ goto Done;\r
+ }\r
\r
- if (TempQword == Power2MaxMemory (TempQword)) {\r
- //\r
- // Invalidate the now-unused MTRRs\r
- //\r
- InvalidateMtrr(VariableMtrr);\r
+ //\r
+ // Invalidate the now-unused MTRRs\r
+ //\r
+ InvalidateMtrr(VariableMtrr);\r
+\r
+ //\r
+ // Find first unused MTRR\r
+ //\r
+ for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;\r
+ MsrNum < VariableMtrrEnd;\r
+ MsrNum += 2\r
+ ) {\r
+ if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
+ break;\r
+ }\r
+ }\r
+\r
+ if (BaseAddress != 0) {\r
+ do {\r
+ //\r
+ // Calculate the alignment of the base address.\r
+ //\r
+ Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));\r
+\r
+ if (Alignment > Length) {\r
+ break;\r
+ }\r
+\r
+ //\r
+ // Find unused MTRR\r
+ //\r
+ for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
+ if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
+ break;\r
+ }\r
+ }\r
+\r
+ ProgramVariableMtrr (\r
+ MsrNum,\r
+ BaseAddress,\r
+ Alignment,\r
+ MemoryType,\r
+ MtrrValidAddressMask\r
+ );\r
+ BaseAddress += Alignment;\r
+ Length -= Alignment;\r
+ } while (TRUE);\r
+\r
+ if (Length == 0) {\r
+ goto Done;\r
+ }\r
+ }\r
+\r
+ TempQword = Length;\r
+\r
+ if (!Positive) {\r
+ Length = Power2MaxMemory (LShiftU64 (TempQword, 1));\r
\r
//\r
- // Find first unused MTRR\r
+ // Find unused MTRR\r
//\r
- for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;\r
- MsrNum < VariableMtrrEnd;\r
- MsrNum += 2\r
- ) {\r
+ for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
break;\r
}\r
MemoryType,\r
MtrrValidAddressMask\r
);\r
- } else {\r
-\r
- Positive = GetDirection (TempQword, &MtrrNumber);\r
-\r
- if ((UsedMtrr + MtrrNumber) > FirmwareVariableMtrrCount) {\r
- Status = RETURN_OUT_OF_RESOURCES;\r
- goto Done;\r
- }\r
-\r
- //\r
- // Invalidate the now-unused MTRRs\r
- //\r
- InvalidateMtrr(VariableMtrr);\r
+ BaseAddress += Length;\r
+ TempQword = Length - TempQword;\r
+ MemoryType = MTRR_CACHE_UNCACHEABLE;\r
+ }\r
\r
+ do {\r
//\r
- // Find first unused MTRR\r
+ // Find unused MTRR\r
//\r
- for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;\r
- MsrNum < VariableMtrrEnd;\r
- MsrNum += 2\r
- ) {\r
+ for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
break;\r
}\r
}\r
\r
+ Length = Power2MaxMemory (TempQword);\r
if (!Positive) {\r
- Length = Power2MaxMemory (LShiftU64 (TempQword, 1));\r
- ProgramVariableMtrr (\r
- MsrNum,\r
- BaseAddress,\r
- Length,\r
- MemoryType,\r
- MtrrValidAddressMask\r
- );\r
- BaseAddress += Length;\r
- TempQword = Length - TempQword;\r
- MemoryType = MTRR_CACHE_UNCACHEABLE;\r
+ BaseAddress -= Length;\r
}\r
\r
- do {\r
- //\r
- // Find unused MTRR\r
- //\r
- for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
- if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
- break;\r
- }\r
- }\r
-\r
- Length = Power2MaxMemory (TempQword);\r
- if (!Positive) {\r
- BaseAddress -= Length;\r
- }\r
+ ProgramVariableMtrr (\r
+ MsrNum,\r
+ BaseAddress,\r
+ Length,\r
+ MemoryType,\r
+ MtrrValidAddressMask\r
+ );\r
\r
- ProgramVariableMtrr (\r
- MsrNum,\r
- BaseAddress,\r
- Length,\r
- MemoryType,\r
- MtrrValidAddressMask\r
- );\r
+ if (Positive) {\r
+ BaseAddress += Length;\r
+ }\r
+ TempQword -= Length;\r
\r
- if (Positive) {\r
- BaseAddress += Length;\r
- }\r
- TempQword -= Length;\r
+ } while (TempQword > 0);\r
\r
- } while (TempQword > 0);\r
+Done:\r
+ DEBUG((DEBUG_CACHE, " Status = %r\n", Status));\r
+ if (!RETURN_ERROR (Status)) {\r
+ MtrrDebugPrintAllMtrrs ();\r
}\r
\r
-Done:\r
return Status;\r
-\r
}\r
\r
\r
// Go through the fixed MTRR\r
//\r
for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- if (Address >= MtrrLibFixedMtrrTable[Index].BaseAddress &&\r
+ if (Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress &&\r
Address < (\r
- MtrrLibFixedMtrrTable[Index].BaseAddress +\r
- (MtrrLibFixedMtrrTable[Index].Length * 8)\r
+ mMtrrLibFixedMtrrTable[Index].BaseAddress +\r
+ (mMtrrLibFixedMtrrTable[Index].Length * 8)\r
)\r
) {\r
SubIndex =\r
- ((UINTN)Address - MtrrLibFixedMtrrTable[Index].BaseAddress) /\r
- MtrrLibFixedMtrrTable[Index].Length;\r
- TempQword = AsmReadMsr64 (MtrrLibFixedMtrrTable[Index].Msr);\r
+ ((UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /\r
+ mMtrrLibFixedMtrrTable[Index].Length;\r
+ TempQword = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);\r
MtrrType = RShiftU64 (TempQword, SubIndex * 8) & 0xFF;\r
return GetMemoryCacheTypeFromMtrrType (MtrrType);\r
}\r
IN MTRR_VARIABLE_SETTINGS *VariableSettings\r
)\r
{\r
- UINTN Cr4;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
if (!IsMtrrSupported ()) {\r
return VariableSettings;\r
}\r
\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
MtrrSetVariableMtrrWorker (VariableSettings);\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChange (&MtrrContext);\r
return VariableSettings;\r
}\r
\r
\r
for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
FixedSettings->Mtrr[Index] =\r
- AsmReadMsr64 (MtrrLibFixedMtrrTable[Index].Msr);\r
+ AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);\r
};\r
\r
return FixedSettings;\r
\r
for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
AsmWriteMsr64 (\r
- MtrrLibFixedMtrrTable[Index].Msr,\r
+ mMtrrLibFixedMtrrTable[Index].Msr,\r
FixedSettings->Mtrr[Index]\r
);\r
}\r
IN MTRR_FIXED_SETTINGS *FixedSettings\r
)\r
{\r
- UINTN Cr4;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
if (!IsMtrrSupported ()) {\r
return FixedSettings;\r
}\r
\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
MtrrSetFixedMtrrWorker (FixedSettings);\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChange (&MtrrContext);\r
\r
return FixedSettings;\r
}\r
IN MTRR_SETTINGS *MtrrSetting\r
)\r
{\r
- UINTN Cr4;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
if (!IsMtrrSupported ()) {\r
return MtrrSetting;\r
}\r
\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
\r
//\r
// Set fixed MTRRs\r
//\r
AsmWriteMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);\r
\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChangeEnableCache (&MtrrContext);\r
\r
return MtrrSetting;\r
}\r
\r
-\r
/**\r
This function prints all MTRRs for debugging.\r
**/\r
)\r
{\r
DEBUG_CODE (\r
- {\r
- MTRR_SETTINGS MtrrSettings;\r
- UINTN Index;\r
- UINTN VariableMtrrCount;\r
+ MTRR_SETTINGS MtrrSettings;\r
+ UINTN Index;\r
+ UINTN Index1;\r
+ UINTN VariableMtrrCount;\r
+ UINT64 Base;\r
+ UINT64 Limit;\r
+ UINT64 MtrrBase;\r
+ UINT64 MtrrLimit;\r
+ UINT64 RangeBase;\r
+ UINT64 RangeLimit;\r
+ UINT64 NoRangeBase;\r
+ UINT64 NoRangeLimit;\r
+ UINT32 RegEax;\r
+ UINTN MemoryType;\r
+ UINTN PreviousMemoryType;\r
+ BOOLEAN Found;\r
+\r
+ if (!IsMtrrSupported ()) {\r
+ return;\r
+ }\r
+\r
+ DEBUG((DEBUG_CACHE, "MTRR Settings\n"));\r
+ DEBUG((DEBUG_CACHE, "=============\n"));\r
+ \r
+ MtrrGetAllMtrrs (&MtrrSettings);\r
+ DEBUG((DEBUG_CACHE, "MTRR Default Type: %016lx\n", MtrrSettings.MtrrDefType));\r
+ for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
+ DEBUG((DEBUG_CACHE, "Fixed MTRR[%02d] : %016lx\n", Index, MtrrSettings.Fixed.Mtrr[Index]));\r
+ }\r
\r
- if (!IsMtrrSupported ()) {\r
- return;\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+ for (Index = 0; Index < VariableMtrrCount; Index++) {\r
+ DEBUG((DEBUG_CACHE, "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",\r
+ Index,\r
+ MtrrSettings.Variables.Mtrr[Index].Base,\r
+ MtrrSettings.Variables.Mtrr[Index].Mask\r
+ ));\r
+ }\r
+ DEBUG((DEBUG_CACHE, "\n"));\r
+ DEBUG((DEBUG_CACHE, "MTRR Ranges\n"));\r
+ DEBUG((DEBUG_CACHE, "====================================\n"));\r
+\r
+ Base = 0;\r
+ PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;\r
+ for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
+ Base = mMtrrLibFixedMtrrTable[Index].BaseAddress;\r
+ for (Index1 = 0; Index1 < 8; Index1++) {\r
+ MemoryType = (UINTN)(RShiftU64 (MtrrSettings.Fixed.Mtrr[Index], Index1 * 8) & 0xff);\r
+ if (MemoryType > CacheWriteBack) {\r
+ MemoryType = MTRR_CACHE_INVALID_TYPE;\r
+ } \r
+ if (MemoryType != PreviousMemoryType) {\r
+ if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {\r
+ DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));\r
+ }\r
+ PreviousMemoryType = MemoryType;\r
+ DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));\r
+ }\r
+ Base += mMtrrLibFixedMtrrTable[Index].Length;\r
}\r
+ }\r
+ DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));\r
\r
- MtrrGetAllMtrrs (&MtrrSettings);\r
- DEBUG((EFI_D_ERROR, "DefaultType = %016lx\n", MtrrSettings.MtrrDefType));\r
- for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
- DEBUG((\r
- EFI_D_ERROR, "Fixed[%02d] = %016lx\n",\r
- Index,\r
- MtrrSettings.Fixed.Mtrr[Index]\r
- ));\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+\r
+ Limit = BIT36 - 1;\r
+ AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);\r
+ if (RegEax >= 0x80000008) {\r
+ AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);\r
+ Limit = LShiftU64 (1, RegEax & 0xff) - 1;\r
+ }\r
+ Base = BASE_1MB;\r
+ PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;\r
+ do {\r
+ MemoryType = MtrrGetMemoryAttribute (Base);\r
+ if (MemoryType > CacheWriteBack) {\r
+ MemoryType = MTRR_CACHE_INVALID_TYPE;\r
}\r
\r
- VariableMtrrCount = GetVariableMtrrCount ();\r
- for (Index = 0; Index < VariableMtrrCount; Index++) {\r
- DEBUG((\r
- EFI_D_ERROR, "Variable[%02d] = %016lx, %016lx\n",\r
- Index,\r
- MtrrSettings.Variables.Mtrr[Index].Base,\r
- MtrrSettings.Variables.Mtrr[Index].Mask\r
- ));\r
+ if (MemoryType != PreviousMemoryType) {\r
+ if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {\r
+ DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));\r
+ }\r
+ PreviousMemoryType = MemoryType;\r
+ DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));\r
}\r
- }\r
+ \r
+ RangeBase = BASE_1MB; \r
+ NoRangeBase = BASE_1MB;\r
+ RangeLimit = Limit;\r
+ NoRangeLimit = Limit;\r
+ \r
+ for (Index = 0, Found = FALSE; Index < VariableMtrrCount; Index++) {\r
+ if ((MtrrSettings.Variables.Mtrr[Index].Mask & BIT11) == 0) {\r
+ //\r
+ // If mask is not valid, then do not display range\r
+ //\r
+ continue;\r
+ }\r
+ MtrrBase = (MtrrSettings.Variables.Mtrr[Index].Base & (~(SIZE_4KB - 1)));\r
+ MtrrLimit = MtrrBase + ((~(MtrrSettings.Variables.Mtrr[Index].Mask & (~(SIZE_4KB - 1)))) & Limit);\r
+\r
+ if (Base >= MtrrBase && Base < MtrrLimit) {\r
+ Found = TRUE;\r
+ }\r
+ \r
+ if (Base >= MtrrBase && MtrrBase > RangeBase) {\r
+ RangeBase = MtrrBase;\r
+ }\r
+ if (Base > MtrrLimit && MtrrLimit > RangeBase) {\r
+ RangeBase = MtrrLimit + 1;\r
+ }\r
+ if (Base < MtrrBase && MtrrBase < RangeLimit) {\r
+ RangeLimit = MtrrBase - 1;\r
+ }\r
+ if (Base < MtrrLimit && MtrrLimit <= RangeLimit) {\r
+ RangeLimit = MtrrLimit;\r
+ }\r
+ \r
+ if (Base > MtrrLimit && NoRangeBase < MtrrLimit) {\r
+ NoRangeBase = MtrrLimit + 1;\r
+ }\r
+ if (Base < MtrrBase && NoRangeLimit > MtrrBase) {\r
+ NoRangeLimit = MtrrBase - 1;\r
+ }\r
+ }\r
+ \r
+ if (Found) {\r
+ Base = RangeLimit + 1;\r
+ } else {\r
+ Base = NoRangeLimit + 1;\r
+ }\r
+ } while (Base < Limit);\r
+ DEBUG((DEBUG_CACHE, "%016lx\n\n", Base - 1));\r
);\r
}\r
\r