/** @file\r
MTRR setting library\r
\r
- Copyright (c) 2008 - 2016, Intel Corporation. All rights reserved.<BR>\r
+ @par Note: \r
+ Most of services in this library instance are suggested to be invoked by BSP only,\r
+ except for MtrrSetAllMtrrs() which is used to sync BSP's MTRR setting to APs.\r
+\r
+ Copyright (c) 2008 - 2017, Intel Corporation. All rights reserved.<BR>\r
This program and the accompanying materials\r
are licensed and made available under the terms and conditions of the BSD License\r
which accompanies this distribution. The full text of the license may be found at\r
\r
#include <Base.h>\r
\r
+#include <Register/Cpuid.h>\r
+#include <Register/Msr.h>\r
+\r
#include <Library/MtrrLib.h>\r
#include <Library/BaseLib.h>\r
#include <Library/CpuLib.h>\r
#include <Library/BaseMemoryLib.h>\r
#include <Library/DebugLib.h>\r
\r
+#define OR_SEED 0x0101010101010101ull\r
+#define CLEAR_SEED 0xFFFFFFFFFFFFFFFFull\r
+\r
//\r
// Context to save and restore when MTRRs are programmed\r
//\r
)\r
{\r
UINT32 MsrNum;\r
- UINT32 ByteShift;\r
+ UINT32 LeftByteShift;\r
+ UINT32 RightByteShift;\r
UINT64 OrMask;\r
UINT64 ClearMask;\r
+ UINT64 SubLength;\r
\r
- OrMask = 0;\r
- ClearMask = 0;\r
-\r
+ //\r
+ // Find the fixed MTRR index to be programmed\r
+ //\r
for (MsrNum = *LastMsrNum + 1; MsrNum < MTRR_NUMBER_OF_FIXED_MTRR; MsrNum++) {\r
if ((*Base >= mMtrrLibFixedMtrrTable[MsrNum].BaseAddress) &&\r
(*Base <\r
}\r
\r
//\r
- // We found the fixed MTRR to be programmed\r
+ // Find the begin offset in fixed MTRR and calculate byte offset of left shift\r
//\r
- ByteShift = ((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrNum].BaseAddress)\r
+ LeftByteShift = ((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrNum].BaseAddress)\r
/ mMtrrLibFixedMtrrTable[MsrNum].Length;\r
\r
- if (ByteShift >= 8) {\r
+ if (LeftByteShift >= 8) {\r
return RETURN_UNSUPPORTED;\r
}\r
\r
- for (\r
- ;\r
- ((ByteShift < 8) && (*Length >= mMtrrLibFixedMtrrTable[MsrNum].Length));\r
- ByteShift++\r
- ) {\r
- OrMask |= LShiftU64 ((UINT64) MemoryCacheType, (UINT32) (ByteShift * 8));\r
- ClearMask |= LShiftU64 ((UINT64) 0xFF, (UINT32) (ByteShift * 8));\r
- *Length -= mMtrrLibFixedMtrrTable[MsrNum].Length;\r
- *Base += mMtrrLibFixedMtrrTable[MsrNum].Length;\r
+ //\r
+ // Find the end offset in fixed MTRR and calculate byte offset of right shift\r
+ //\r
+ SubLength = mMtrrLibFixedMtrrTable[MsrNum].Length * (8 - LeftByteShift);\r
+ if (*Length >= SubLength) {\r
+ RightByteShift = 0;\r
+ } else {\r
+ RightByteShift = 8 - LeftByteShift -\r
+ (UINT32)(*Length) / mMtrrLibFixedMtrrTable[MsrNum].Length;\r
+ if ((LeftByteShift >= 8) ||\r
+ (((UINT32)(*Length) % mMtrrLibFixedMtrrTable[MsrNum].Length) != 0)\r
+ ) {\r
+ return RETURN_UNSUPPORTED;\r
+ }\r
+ //\r
+ // Update SubLength by actual length\r
+ //\r
+ SubLength = *Length;\r
+ }\r
+\r
+ ClearMask = CLEAR_SEED;\r
+ OrMask = MultU64x32 (OR_SEED, (UINT32)MemoryCacheType);\r
+\r
+ if (LeftByteShift != 0) {\r
+ //\r
+ // Clear the low bits by LeftByteShift\r
+ //\r
+ ClearMask &= LShiftU64 (ClearMask, LeftByteShift * 8);\r
+ OrMask &= LShiftU64 (OrMask, LeftByteShift * 8);\r
}\r
\r
- if (ByteShift < 8 && (*Length != 0)) {\r
- return RETURN_UNSUPPORTED;\r
+ if (RightByteShift != 0) {\r
+ //\r
+ // Clear the high bits by RightByteShift\r
+ //\r
+ ClearMask &= RShiftU64 (ClearMask, RightByteShift * 8);\r
+ OrMask &= RShiftU64 (OrMask, RightByteShift * 8);\r
}\r
\r
+ *Length -= SubLength;\r
+ *Base += SubLength;\r
+\r
*LastMsrNum = MsrNum;\r
*ReturnClearMask = ClearMask;\r
*ReturnOrMask = OrMask;\r
@param[in] MtrrValidAddressMask The valid address mask for MTRR\r
@param[out] VariableMtrr The array to shadow variable MTRRs content\r
\r
- @return The return value of this paramter indicates the\r
+ @return The return value of this parameter indicates the\r
number of MTRRs which has been used.\r
\r
**/\r
continue;\r
} else {\r
//\r
- // The cache type is different, but the range is convered by one MTRR\r
+ // The cache type is different, but the range is covered by one MTRR\r
//\r
if (VariableMtrr[Index].BaseAddress == *Base && MtrrEnd == EndAddress) {\r
InvalidateShadowMtrr (Index, VariableMtrr, UsedMtrr);\r
/**\r
Worker function prints all MTRRs for debugging.\r
\r
- If MtrrSetting is not NULL, print MTRR settings from from input MTRR\r
+ If MtrrSetting is not NULL, print MTRR settings from input MTRR\r
settings buffer.\r
If MtrrSetting is NULL, print MTRR settings from MTRRs.\r
\r
\r
PostMtrrChangeEnableCache (&MtrrContext);\r
\r
- MtrrDebugPrintAllMtrrs ();\r
-\r
return MtrrSetting;\r
}\r
\r
VOID\r
)\r
{\r
- UINT32 RegEdx;\r
- UINT64 MtrrCap;\r
+ CPUID_VERSION_INFO_EDX Edx;\r
+ MSR_IA32_MTRRCAP_REGISTER MtrrCap;\r
\r
//\r
// Check CPUID(1).EDX[12] for MTRR capability\r
//\r
- AsmCpuid (1, NULL, NULL, NULL, &RegEdx);\r
- if (BitFieldRead32 (RegEdx, 12, 12) == 0) {\r
+ AsmCpuid (CPUID_VERSION_INFO, NULL, NULL, NULL, &Edx.Uint32);\r
+ if (Edx.Bits.MTRR == 0) {\r
return FALSE;\r
}\r
\r
//\r
- // Check IA32_MTRRCAP.[0..7] for number of variable MTRRs and IA32_MTRRCAP[8] for\r
- // fixed MTRRs existence. If number of variable MTRRs is zero, or fixed MTRRs do not\r
+ // Check number of variable MTRRs and fixed MTRRs existence.\r
+ // If number of variable MTRRs is zero, or fixed MTRRs do not\r
// exist, return false.\r
//\r
- MtrrCap = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP);\r
- if ((BitFieldRead64 (MtrrCap, 0, 7) == 0) || (BitFieldRead64 (MtrrCap, 8, 8) == 0)) {\r
+ MtrrCap.Uint64 = AsmReadMsr64 (MSR_IA32_MTRRCAP);\r
+ if ((MtrrCap.Bits.VCNT == 0) || (MtrrCap.Bits.FIX == 0)) {\r
return FALSE;\r
}\r
-\r
return TRUE;\r
}\r