/** @file\r
MTRR setting library\r
\r
- Copyright (c) 2008 - 2009, Intel Corporation\r
- All rights reserved. This program and the accompanying materials\r
+ Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>\r
+ This program and the accompanying materials\r
are licensed and made available under the terms and conditions of the BSD License\r
which accompanies this distribution. The full text of the license may be found at\r
http://opensource.org/licenses/bsd-license.php\r
},\r
};\r
\r
+/**\r
+ Returns the variable MTRR count for the CPU.\r
+\r
+ @return Variable MTRR count\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+GetVariableMtrrCount (\r
+ VOID\r
+ )\r
+{\r
+ if (!IsMtrrSupported ()) {\r
+ return 0;\r
+ }\r
+\r
+ return (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK);\r
+}\r
+\r
+/**\r
+ Returns the firmware usable variable MTRR count for the CPU.\r
+\r
+ @return Firmware usable variable MTRR count\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+GetFirmwareVariableMtrrCount (\r
+ VOID\r
+ )\r
+{\r
+ UINT32 VariableMtrrCount;\r
+\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+ if (VariableMtrrCount < RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER) {\r
+ return 0;\r
+ }\r
+\r
+ return VariableMtrrCount - RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER;\r
+}\r
\r
/**\r
Returns the default MTRR cache type for the system.\r
//\r
// Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)\r
//\r
- Value = AsmReadCr0 ();\r
- Value = (UINTN) BitFieldWrite64 (Value, 30, 30, 1);\r
- Value = (UINTN) BitFieldWrite64 (Value, 29, 29, 0);\r
- AsmWriteCr0 (Value);\r
- //\r
- // Flush cache\r
- //\r
- AsmWbinvd ();\r
+ AsmDisableCache ();\r
+\r
//\r
- // Clear PGE flag Bit 7\r
+ // Save original CR4 value and clear PGE flag (Bit 7)\r
//\r
Value = AsmReadCr4 ();\r
- AsmWriteCr4 ((UINTN) BitFieldWrite64 (Value, 7, 7, 0));\r
+ AsmWriteCr4 (Value & (~BIT7));\r
+\r
//\r
// Flush all TLBs\r
//\r
CpuFlushTlb ();\r
+\r
//\r
// Disable Mtrrs\r
//\r
AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 0);\r
\r
+ //\r
+ // Return original CR4 value\r
+ //\r
return Value;\r
}\r
\r
UINTN Cr4\r
)\r
{\r
- UINTN Value;\r
-\r
//\r
// Enable Cache MTRR\r
//\r
AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 3);\r
\r
//\r
- // Flush all TLBs and cache the second time\r
+ // Flush all TLBs \r
//\r
- AsmWbinvd ();\r
CpuFlushTlb ();\r
\r
//\r
// Enable Normal Mode caching CD=NW=0, CD(Bit30), NW(Bit29)\r
//\r
- Value = AsmReadCr0 ();\r
- Value = (UINTN) BitFieldWrite64 (Value, 30, 30, 0);\r
- Value = (UINTN) BitFieldWrite64 (Value, 29, 29, 0);\r
- AsmWriteCr0 (Value);\r
+ AsmEnableCache ();\r
\r
+ //\r
+ // Restore original CR4 value\r
+ //\r
AsmWriteCr4 (Cr4);\r
-\r
- return ;\r
}\r
\r
\r
UINTN Index;\r
UINT32 MsrNum;\r
UINT32 UsedMtrr;\r
+ UINT32 FirmwareVariableMtrrCount;\r
+ UINT32 VariableMtrrEnd;\r
+\r
+ if (!IsMtrrSupported ()) {\r
+ return 0;\r
+ }\r
+\r
+ FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();\r
+ VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;\r
\r
ZeroMem (VariableMtrr, sizeof (VARIABLE_MTRR) * MTRR_NUMBER_OF_VARIABLE_MTRR);\r
UsedMtrr = 0;\r
\r
for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE, Index = 0;\r
(\r
- (MsrNum < MTRR_LIB_IA32_VARIABLE_MTRR_END) &&\r
- (Index < FIRMWARE_VARIABLE_MTRR_NUMBER)\r
+ (MsrNum < VariableMtrrEnd) &&\r
+ (Index < FirmwareVariableMtrrCount)\r
);\r
MsrNum += 2\r
) {\r
UINT64 CombineEnd;\r
UINT64 MtrrEnd;\r
UINT64 EndAddress;\r
+ UINT32 FirmwareVariableMtrrCount;\r
+\r
+ FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();\r
\r
*OverwriteExistingMtrr = FALSE;\r
EndAddress = *Base +*Length - 1;\r
\r
- for (Index = 0; Index < FIRMWARE_VARIABLE_MTRR_NUMBER; Index++) {\r
+ for (Index = 0; Index < FirmwareVariableMtrrCount; Index++) {\r
\r
MtrrEnd = VariableMtrr[Index].BaseAddress + VariableMtrr[Index].Length - 1;\r
if (\r
{\r
UINTN Index;\r
UINTN Cr4;\r
+ UINTN VariableMtrrCount;\r
\r
Cr4 = PreMtrrChange ();\r
Index = 0;\r
- while (Index < MTRR_NUMBER_OF_VARIABLE_MTRR) {\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+ while (Index < VariableMtrrCount) {\r
if (VariableMtrr[Index].Valid == FALSE && VariableMtrr[Index].Used == TRUE ) {\r
AsmWriteMsr64 (VariableMtrr[Index].Msr, 0);\r
AsmWriteMsr64 (VariableMtrr[Index].Msr + 1, 0);\r
UINT64 MtrrValidAddressMask;\r
UINTN Cr4;\r
BOOLEAN OverwriteExistingMtrr;\r
+ UINT32 FirmwareVariableMtrrCount;\r
+ UINT32 VariableMtrrEnd;\r
+\r
+ if (!IsMtrrSupported ()) {\r
+ return RETURN_UNSUPPORTED;\r
+ }\r
+\r
+ FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();\r
+ VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;\r
\r
MtrrLibInitializeMtrrMask(&MtrrValidBitsMask, &MtrrValidAddressMask);\r
\r
//\r
// Avoid hardcode here and read data dynamically\r
//\r
- if (UsedMtrr >= FIRMWARE_VARIABLE_MTRR_NUMBER) {\r
+ if (UsedMtrr >= FirmwareVariableMtrrCount) {\r
Status = RETURN_OUT_OF_RESOURCES;\r
goto Done;\r
}\r
// Find first unused MTRR\r
//\r
for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;\r
- MsrNum < MTRR_LIB_IA32_VARIABLE_MTRR_END;\r
+ MsrNum < VariableMtrrEnd;\r
MsrNum += 2\r
) {\r
if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
\r
Positive = GetDirection (TempQword, &MtrrNumber);\r
\r
- if ((UsedMtrr + MtrrNumber) > FIRMWARE_VARIABLE_MTRR_NUMBER) {\r
+ if ((UsedMtrr + MtrrNumber) > FirmwareVariableMtrrCount) {\r
Status = RETURN_OUT_OF_RESOURCES;\r
goto Done;\r
}\r
// Find first unused MTRR\r
//\r
for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;\r
- MsrNum < MTRR_LIB_IA32_VARIABLE_MTRR_END;\r
+ MsrNum < VariableMtrrEnd;\r
MsrNum += 2\r
) {\r
if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
//\r
// Find unused MTRR\r
//\r
- for (; MsrNum < MTRR_LIB_IA32_VARIABLE_MTRR_END; MsrNum += 2) {\r
+ for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {\r
if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {\r
break;\r
}\r
VARIABLE_MTRR VariableMtrr[MTRR_NUMBER_OF_VARIABLE_MTRR];\r
UINT64 MtrrValidBitsMask;\r
UINT64 MtrrValidAddressMask;\r
+ UINTN VariableMtrrCount;\r
+\r
+ if (!IsMtrrSupported ()) {\r
+ return CacheUncacheable;\r
+ }\r
\r
//\r
// Check if MTRR is enabled, if not, return UC as attribute\r
//\r
// Go through the variable MTRR\r
//\r
- for (Index = 0; Index < MTRR_NUMBER_OF_VARIABLE_MTRR; Index++) {\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+ ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
+\r
+ for (Index = 0; Index < VariableMtrrCount; Index++) {\r
if (VariableMtrr[Index].Valid) {\r
if (Address >= VariableMtrr[Index].BaseAddress &&\r
Address < VariableMtrr[Index].BaseAddress+VariableMtrr[Index].Length) {\r
)\r
{\r
UINT32 Index;\r
+ UINT32 VariableMtrrCount;\r
\r
- for (Index = 0; Index < MTRR_NUMBER_OF_VARIABLE_MTRR; Index++) {\r
+ if (!IsMtrrSupported ()) {\r
+ return VariableSettings;\r
+ }\r
+\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+ ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
+\r
+ for (Index = 0; Index < VariableMtrrCount; Index++) {\r
VariableSettings->Mtrr[Index].Base =\r
AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1));\r
VariableSettings->Mtrr[Index].Mask =\r
)\r
{\r
UINT32 Index;\r
+ UINT32 VariableMtrrCount;\r
\r
- for (Index = 0; Index < MTRR_NUMBER_OF_VARIABLE_MTRR; Index++) {\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+ ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);\r
+\r
+ for (Index = 0; Index < VariableMtrrCount; Index++) {\r
AsmWriteMsr64 (\r
MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1),\r
VariableSettings->Mtrr[Index].Base\r
{\r
UINTN Cr4;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return VariableSettings;\r
+ }\r
+\r
Cr4 = PreMtrrChange ();\r
MtrrSetVariableMtrrWorker (VariableSettings);\r
PostMtrrChange (Cr4);\r
{\r
UINT32 Index;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return FixedSettings;\r
+ }\r
+\r
for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r
FixedSettings->Mtrr[Index] =\r
AsmReadMsr64 (MtrrLibFixedMtrrTable[Index].Msr);\r
{\r
UINTN Cr4;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return FixedSettings;\r
+ }\r
+\r
Cr4 = PreMtrrChange ();\r
MtrrSetFixedMtrrWorker (FixedSettings);\r
PostMtrrChange (Cr4);\r
OUT MTRR_SETTINGS *MtrrSetting\r
)\r
{\r
+ if (!IsMtrrSupported ()) {\r
+ return MtrrSetting;\r
+ }\r
+\r
//\r
// Get fixed MTRRs\r
//\r
{\r
UINTN Cr4;\r
\r
+ if (!IsMtrrSupported ()) {\r
+ return MtrrSetting;\r
+ }\r
+\r
Cr4 = PreMtrrChange ();\r
\r
//\r
{\r
MTRR_SETTINGS MtrrSettings;\r
UINTN Index;\r
+ UINTN VariableMtrrCount;\r
+\r
+ if (!IsMtrrSupported ()) {\r
+ return;\r
+ }\r
\r
MtrrGetAllMtrrs (&MtrrSettings);\r
DEBUG((EFI_D_ERROR, "DefaultType = %016lx\n", MtrrSettings.MtrrDefType));\r
MtrrSettings.Fixed.Mtrr[Index]\r
));\r
}\r
- for (Index = 0; Index < MTRR_NUMBER_OF_VARIABLE_MTRR; Index++) {\r
+\r
+ VariableMtrrCount = GetVariableMtrrCount ();\r
+ for (Index = 0; Index < VariableMtrrCount; Index++) {\r
DEBUG((\r
EFI_D_ERROR, "Variable[%02d] = %016lx, %016lx\n",\r
Index,\r
);\r
}\r
\r
+/**\r
+ Checks if MTRR is supported.\r
+\r
+ @retval TRUE MTRR is supported.\r
+ @retval FALSE MTRR is not supported.\r
+\r
+**/\r
+BOOLEAN\r
+EFIAPI\r
+IsMtrrSupported (\r
+ VOID\r
+ )\r
+{\r
+ UINT32 RegEdx;\r
+ UINT64 MtrrCap;\r
+\r
+ //\r
+ // Check CPUID(1).EDX[12] for MTRR capability\r
+ //\r
+ AsmCpuid (1, NULL, NULL, NULL, &RegEdx);\r
+ if (BitFieldRead32 (RegEdx, 12, 12) == 0) {\r
+ return FALSE;\r
+ }\r
+\r
+ //\r
+ // Check IA32_MTRRCAP.[0..7] for number of variable MTRRs and IA32_MTRRCAP[8] for\r
+ // fixed MTRRs existence. If number of variable MTRRs is zero, or fixed MTRRs do not\r
+ // exist, return false.\r
+ //\r
+ MtrrCap = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP);\r
+ if ((BitFieldRead64 (MtrrCap, 0, 7) == 0) || (BitFieldRead64 (MtrrCap, 8, 8) == 0)) {\r
+ return FALSE;\r
+ }\r
+\r
+ return TRUE;\r
+}\r