/** @file\r
MTRR setting library\r
\r
- Copyright (c) 2008 - 2012, Intel Corporation. All rights reserved.<BR>\r
+ Copyright (c) 2008 - 2015, Intel Corporation. All rights reserved.<BR>\r
This program and the accompanying materials\r
are licensed and made available under the terms and conditions of the BSD License\r
which accompanies this distribution. The full text of the license may be found at\r
#include <Library/BaseMemoryLib.h>\r
#include <Library/DebugLib.h>\r
\r
+//\r
+// Context to save and restore when MTRRs are programmed\r
+//\r
+typedef struct {\r
+ UINTN Cr4;\r
+ BOOLEAN InterruptState;\r
+} MTRR_CONTEXT;\r
+\r
//\r
// This table defines the offset, base and length of the fixed MTRRs\r
//\r
)\r
{\r
UINT32 VariableMtrrCount;\r
+ UINT32 ReservedMtrrNumber;\r
\r
VariableMtrrCount = GetVariableMtrrCount ();\r
- if (VariableMtrrCount < RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER) {\r
+ ReservedMtrrNumber = PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);\r
+ if (VariableMtrrCount < ReservedMtrrNumber) {\r
return 0;\r
}\r
\r
- return VariableMtrrCount - RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER;\r
+ return VariableMtrrCount - ReservedMtrrNumber;\r
}\r
\r
/**\r
This function will do some preparation for programming MTRRs:\r
disable cache, invalid cache and disable MTRR caching functionality\r
\r
- @return CR4 value before changing.\r
+ @param[out] MtrrContext Pointer to context to save\r
\r
**/\r
-UINTN\r
+VOID\r
PreMtrrChange (\r
- VOID\r
+ OUT MTRR_CONTEXT *MtrrContext\r
)\r
{\r
- UINTN Value;\r
-\r
+ //\r
+ // Disable interrupts and save current interrupt state\r
+ //\r
+ MtrrContext->InterruptState = SaveAndDisableInterrupts();\r
+ \r
//\r
// Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)\r
//\r
//\r
// Save original CR4 value and clear PGE flag (Bit 7)\r
//\r
- Value = AsmReadCr4 ();\r
- AsmWriteCr4 (Value & (~BIT7));\r
+ MtrrContext->Cr4 = AsmReadCr4 ();\r
+ AsmWriteCr4 (MtrrContext->Cr4 & (~BIT7));\r
\r
//\r
// Flush all TLBs\r
// Disable Mtrrs\r
//\r
AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 0);\r
-\r
- //\r
- // Return original CR4 value\r
- //\r
- return Value;\r
}\r
\r
/**\r
This function will do some clean up after programming MTRRs:\r
Flush all TLBs, re-enable caching, restore CR4.\r
\r
- @param Cr4 CR4 value to restore\r
+ @param[in] MtrrContext Pointer to context to restore\r
\r
**/\r
VOID\r
PostMtrrChangeEnableCache (\r
- IN UINTN Cr4\r
+ IN MTRR_CONTEXT *MtrrContext\r
)\r
{\r
//\r
//\r
// Restore original CR4 value\r
//\r
- AsmWriteCr4 (Cr4);\r
+ AsmWriteCr4 (MtrrContext->Cr4);\r
+ \r
+ //\r
+ // Restore original interrupt state\r
+ //\r
+ SetInterruptState (MtrrContext->InterruptState);\r
}\r
\r
/**\r
This function will do some clean up after programming MTRRs:\r
enable MTRR caching functionality, and enable cache\r
\r
- @param Cr4 CR4 value to restore\r
+ @param[in] MtrrContext Pointer to context to restore\r
\r
**/\r
VOID\r
PostMtrrChange (\r
- IN UINTN Cr4\r
+ IN MTRR_CONTEXT *MtrrContext\r
)\r
{\r
//\r
//\r
AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 3);\r
\r
- PostMtrrChangeEnableCache (Cr4);\r
+ PostMtrrChangeEnableCache (MtrrContext);\r
}\r
\r
\r
IN VARIABLE_MTRR *VariableMtrr\r
)\r
{\r
- UINTN Index;\r
- UINTN Cr4;\r
- UINTN VariableMtrrCount;\r
+ UINTN Index;\r
+ UINTN VariableMtrrCount;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
Index = 0;\r
VariableMtrrCount = GetVariableMtrrCount ();\r
while (Index < VariableMtrrCount) {\r
}\r
Index ++;\r
}\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChange (&MtrrContext);\r
}\r
\r
\r
IN UINT64 MtrrValidAddressMask\r
)\r
{\r
- UINT64 TempQword;\r
- UINTN Cr4;\r
+ UINT64 TempQword;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
\r
//\r
// MTRR Physical Base\r
(TempQword & MtrrValidAddressMask) | MTRR_LIB_CACHE_MTRR_ENABLED\r
);\r
\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChange (&MtrrContext);\r
}\r
\r
\r
// MtrrType is MTRR_CACHE_INVALID_TYPE, that means\r
// no mtrr covers the range\r
//\r
- return CacheUncacheable;\r
+ return MtrrGetDefaultMemoryType ();\r
}\r
}\r
\r
*MtrrValidBitsMask = LShiftU64 (1, PhysicalAddressBits) - 1;\r
*MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;\r
} else {\r
- *MtrrValidBitsMask = MTRR_LIB_CACHE_VALID_ADDRESS;\r
- *MtrrValidAddressMask = 0xFFFFFFFF;\r
+ *MtrrValidBitsMask = MTRR_LIB_MSR_VALID_MASK;\r
+ *MtrrValidAddressMask = MTRR_LIB_CACHE_VALID_ADDRESS;\r
}\r
}\r
\r
UINT32 UsedMtrr;\r
UINT64 MtrrValidBitsMask;\r
UINT64 MtrrValidAddressMask;\r
- UINTN Cr4;\r
BOOLEAN OverwriteExistingMtrr;\r
UINT32 FirmwareVariableMtrrCount;\r
UINT32 VariableMtrrEnd;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
DEBUG((DEBUG_CACHE, "MtrrSetMemoryAttribute() %a:%016lx-%016lx\n", mMtrrMemoryCacheTypeShortName[Attribute], BaseAddress, Length));\r
\r
//\r
Status = RETURN_SUCCESS;\r
while ((BaseAddress < BASE_1MB) && (Length > 0) && Status == RETURN_SUCCESS) {\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
Status = ProgramFixedMtrr (MemoryType, &BaseAddress, &Length);\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChange (&MtrrContext);\r
if (RETURN_ERROR (Status)) {\r
goto Done;\r
}\r
IN MTRR_VARIABLE_SETTINGS *VariableSettings\r
)\r
{\r
- UINTN Cr4;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
if (!IsMtrrSupported ()) {\r
return VariableSettings;\r
}\r
\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
MtrrSetVariableMtrrWorker (VariableSettings);\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChange (&MtrrContext);\r
return VariableSettings;\r
}\r
\r
IN MTRR_FIXED_SETTINGS *FixedSettings\r
)\r
{\r
- UINTN Cr4;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
if (!IsMtrrSupported ()) {\r
return FixedSettings;\r
}\r
\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
MtrrSetFixedMtrrWorker (FixedSettings);\r
- PostMtrrChange (Cr4);\r
+ PostMtrrChange (&MtrrContext);\r
\r
return FixedSettings;\r
}\r
IN MTRR_SETTINGS *MtrrSetting\r
)\r
{\r
- UINTN Cr4;\r
+ MTRR_CONTEXT MtrrContext;\r
\r
if (!IsMtrrSupported ()) {\r
return MtrrSetting;\r
}\r
\r
- Cr4 = PreMtrrChange ();\r
+ PreMtrrChange (&MtrrContext);\r
\r
//\r
// Set fixed MTRRs\r
//\r
AsmWriteMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);\r
\r
- PostMtrrChangeEnableCache (Cr4);\r
+ PostMtrrChangeEnableCache (&MtrrContext);\r
\r
return MtrrSetting;\r
}\r
\r
VariableMtrrCount = GetVariableMtrrCount ();\r
\r
+ Limit = BIT36 - 1;\r
+ AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);\r
+ if (RegEax >= 0x80000008) {\r
+ AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);\r
+ Limit = LShiftU64 (1, RegEax & 0xff) - 1;\r
+ }\r
Base = BASE_1MB;\r
PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;\r
do {\r
\r
RangeBase = BASE_1MB; \r
NoRangeBase = BASE_1MB;\r
- Limit = BIT36 - 1;\r
- AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);\r
- if (RegEax >= 0x80000008) {\r
- AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);\r
- Limit = LShiftU64 (1, RegEax & 0xff) - 1;\r
- }\r
RangeLimit = Limit;\r
NoRangeLimit = Limit;\r
\r
} else {\r
Base = NoRangeLimit + 1;\r
}\r
- } while (Found);\r
+ } while (Base < Limit);\r
DEBUG((DEBUG_CACHE, "%016lx\n\n", Base - 1));\r
);\r
}\r