]> git.proxmox.com Git - mirror_edk2.git/blob - IntelFsp2Pkg/Library/BaseCacheLib/CacheLib.c
f879c268e7ec2fa8e1e095f0eb19ded28595169e
[mirror_edk2.git] / IntelFsp2Pkg / Library / BaseCacheLib / CacheLib.c
1 /** @file
2
3 Copyright (c) 2014 - 2021, Intel Corporation. All rights reserved.<BR>
4 SPDX-License-Identifier: BSD-2-Clause-Patent
5
6 **/
7
8 #include <Uefi.h>
9 #include <Library/BaseLib.h>
10 #include <Library/CacheLib.h>
11 #include <Library/CacheAsRamLib.h>
12 #include "CacheLibInternal.h"
13
14 /**
15 Search the memory cache type for specific memory from MTRR.
16
17 @param[in] MemoryAddress the address of target memory
18 @param[in] MemoryLength the length of target memory
19 @param[in] ValidMtrrAddressMask the MTRR address mask
20 @param[out] UsedMsrNum the used MSR number
21 @param[out] UsedMemoryCacheType the cache type for the target memory
22
23 @retval EFI_SUCCESS The memory is found in MTRR and cache type is returned
24 @retval EFI_NOT_FOUND The memory is not found in MTRR
25
26 **/
27 EFI_STATUS
28 SearchForExactMtrr (
29 IN EFI_PHYSICAL_ADDRESS MemoryAddress,
30 IN UINT64 MemoryLength,
31 IN UINT64 ValidMtrrAddressMask,
32 OUT UINT32 *UsedMsrNum,
33 OUT EFI_MEMORY_CACHE_TYPE *MemoryCacheType
34 );
35
36 /**
37 Check if CacheType match current default setting.
38
39 @param[in] MemoryCacheType input cache type to be checked.
40
41 @retval TRUE MemoryCacheType is default MTRR setting.
42 @retval FALSE MemoryCacheType is NOT default MTRR setting.
43 **/
44 BOOLEAN
45 IsDefaultType (
46 IN EFI_MEMORY_CACHE_TYPE MemoryCacheType
47 );
48
49 /**
50 Return MTRR alignment requirement for base address and size.
51
52 @param[in] BaseAddress Base address.
53 @param[in] Size Size.
54
55 @retval Zero Aligned.
56 @retval Non-Zero Not aligned.
57
58 **/
59 UINT32
60 CheckMtrrAlignment (
61 IN UINT64 BaseAddress,
62 IN UINT64 Size
63 );
64
65 typedef struct {
66 UINT32 Msr;
67 UINT32 BaseAddress;
68 UINT32 Length;
69 } EFI_FIXED_MTRR;
70
71 EFI_FIXED_MTRR mFixedMtrrTable[] = {
72 { EFI_MSR_IA32_MTRR_FIX64K_00000, 0, 0x10000 },
73 { EFI_MSR_IA32_MTRR_FIX16K_80000, 0x80000, 0x4000 },
74 { EFI_MSR_IA32_MTRR_FIX16K_A0000, 0xA0000, 0x4000 },
75 { EFI_MSR_IA32_MTRR_FIX4K_C0000, 0xC0000, 0x1000 },
76 { EFI_MSR_IA32_MTRR_FIX4K_C8000, 0xC8000, 0x1000 },
77 { EFI_MSR_IA32_MTRR_FIX4K_D0000, 0xD0000, 0x1000 },
78 { EFI_MSR_IA32_MTRR_FIX4K_D8000, 0xD8000, 0x1000 },
79 { EFI_MSR_IA32_MTRR_FIX4K_E0000, 0xE0000, 0x1000 },
80 { EFI_MSR_IA32_MTRR_FIX4K_E8000, 0xE8000, 0x1000 },
81 { EFI_MSR_IA32_MTRR_FIX4K_F0000, 0xF0000, 0x1000 },
82 { EFI_MSR_IA32_MTRR_FIX4K_F8000, 0xF8000, 0x1000 }
83 };
84
85 /**
86 Given the input, check if the number of MTRR is lesser.
87 if positive or subtractive.
88
89 @param[in] Input Length of Memory to program MTRR.
90
91 @retval Zero do positive.
92 @retval Non-Zero do subtractive.
93
94 **/
95 INT8
96 CheckDirection (
97 IN UINT64 Input
98 )
99 {
100 return 0;
101 }
102
103 /**
104 Disable cache and its mtrr.
105
106 @param[out] OldMtrr To return the Old MTRR value
107
108 **/
109 VOID
110 EfiDisableCacheMtrr (
111 OUT UINT64 *OldMtrr
112 )
113 {
114 UINT64 TempQword;
115
116 //
117 // Disable Cache MTRR
118 //
119 *OldMtrr = AsmReadMsr64 (EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE);
120 TempQword = (*OldMtrr) & ~B_EFI_MSR_GLOBAL_MTRR_ENABLE & ~B_EFI_MSR_FIXED_MTRR_ENABLE;
121 AsmWriteMsr64 (EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE, TempQword);
122 AsmDisableCache ();
123 }
124
125 /**
126 Recover cache MTRR.
127
128 @param[in] EnableMtrr Whether to enable the MTRR
129 @param[in] OldMtrr The saved old MTRR value to restore when not to enable the MTRR
130
131 **/
132 VOID
133 EfiRecoverCacheMtrr (
134 IN BOOLEAN EnableMtrr,
135 IN UINT64 OldMtrr
136 )
137 {
138 UINT64 TempQword;
139
140 //
141 // Enable Cache MTRR
142 //
143 if (EnableMtrr) {
144 TempQword = AsmReadMsr64 (EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE);
145 TempQword |= (UINT64)(B_EFI_MSR_GLOBAL_MTRR_ENABLE | B_EFI_MSR_FIXED_MTRR_ENABLE);
146 } else {
147 TempQword = OldMtrr;
148 }
149
150 AsmWriteMsr64 (EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE, TempQword);
151
152 AsmEnableCache ();
153 }
154
155 /**
156 Programming MTRR according to Memory address, length, and type.
157
158 @param[in] MtrrNumber the variable MTRR index number
159 @param[in] MemoryAddress the address of target memory
160 @param[in] MemoryLength the length of target memory
161 @param[in] MemoryCacheType the cache type of target memory
162 @param[in] ValidMtrrAddressMask the MTRR address mask
163
164 **/
165 VOID
166 EfiProgramMtrr (
167 IN UINT32 MtrrNumber,
168 IN EFI_PHYSICAL_ADDRESS MemoryAddress,
169 IN UINT64 MemoryLength,
170 IN EFI_MEMORY_CACHE_TYPE MemoryCacheType,
171 IN UINT64 ValidMtrrAddressMask
172 )
173 {
174 UINT64 TempQword;
175 UINT64 OldMtrr;
176
177 if (MemoryLength == 0) {
178 return;
179 }
180
181 EfiDisableCacheMtrr (&OldMtrr);
182
183 //
184 // MTRR Physical Base
185 //
186 TempQword = (MemoryAddress & ValidMtrrAddressMask) | MemoryCacheType;
187 AsmWriteMsr64 (MtrrNumber, TempQword);
188
189 //
190 // MTRR Physical Mask
191 //
192 TempQword = ~(MemoryLength - 1);
193 AsmWriteMsr64 (MtrrNumber + 1, (TempQword & ValidMtrrAddressMask) | B_EFI_MSR_CACHE_MTRR_VALID);
194
195 EfiRecoverCacheMtrr (TRUE, OldMtrr);
196 }
197
198 /**
199 Calculate the maximum value which is a power of 2, but less the MemoryLength.
200
201 @param[in] MemoryAddress Memory address.
202 @param[in] MemoryLength The number to pass in.
203
204 @return The maximum value which is align to power of 2 and less the MemoryLength
205
206 **/
207 UINT64
208 Power2MaxMemory (
209 IN UINT64 MemoryAddress,
210 IN UINT64 MemoryLength
211 )
212 {
213 UINT64 Result;
214
215 if (MemoryLength == 0) {
216 return EFI_INVALID_PARAMETER;
217 }
218
219 //
220 // Compute initial power of 2 size to return
221 //
222 Result = GetPowerOfTwo64 (MemoryLength);
223
224 //
225 // Special case base of 0 as all ranges are valid
226 //
227 if (MemoryAddress == 0) {
228 return Result;
229 }
230
231 //
232 // Loop till a value that can be mapped to this base address is found
233 //
234 while (CheckMtrrAlignment (MemoryAddress, Result) != 0) {
235 //
236 // Need to try the next smaller power of 2
237 //
238 Result = RShiftU64 (Result, 1);
239 }
240
241 return Result;
242 }
243
244 /**
245 Return MTRR alignment requirement for base address and size.
246
247 @param[in] BaseAddress Base address.
248 @param[in] Size Size.
249
250 @retval Zero Aligned.
251 @retval Non-Zero Not aligned.
252
253 **/
254 UINT32
255 CheckMtrrAlignment (
256 IN UINT64 BaseAddress,
257 IN UINT64 Size
258 )
259 {
260 UINT32 ShiftedBase;
261 UINT32 ShiftedSize;
262
263 //
264 // Shift base and size right 12 bits to allow for larger memory sizes. The
265 // MTRRs do not use the first 12 bits so this is safe for now. Only supports
266 // up to 52 bits of physical address space.
267 //
268 ShiftedBase = (UINT32)RShiftU64 (BaseAddress, 12);
269 ShiftedSize = (UINT32)RShiftU64 (Size, 12);
270
271 //
272 // Return the results to the caller of the MOD
273 //
274 return ShiftedBase % ShiftedSize;
275 }
276
277 /**
278 Programs fixed MTRRs registers.
279
280 @param[in] MemoryCacheType The memory type to set.
281 @param[in] Base The base address of memory range.
282 @param[in] Length The length of memory range.
283
284 @retval RETURN_SUCCESS The cache type was updated successfully
285 @retval RETURN_UNSUPPORTED The requested range or cache type was invalid
286 for the fixed MTRRs.
287
288 **/
289 EFI_STATUS
290 ProgramFixedMtrr (
291 IN EFI_MEMORY_CACHE_TYPE MemoryCacheType,
292 IN UINT64 *Base,
293 IN UINT64 *Len
294 )
295 {
296 UINT32 MsrNum;
297 UINT32 ByteShift;
298 UINT64 TempQword;
299 UINT64 OrMask;
300 UINT64 ClearMask;
301
302 TempQword = 0;
303 OrMask = 0;
304 ClearMask = 0;
305
306 for (MsrNum = 0; MsrNum < V_EFI_FIXED_MTRR_NUMBER; MsrNum++) {
307 if ((*Base >= mFixedMtrrTable[MsrNum].BaseAddress) &&
308 (*Base < (mFixedMtrrTable[MsrNum].BaseAddress + 8 * mFixedMtrrTable[MsrNum].Length)))
309 {
310 break;
311 }
312 }
313
314 if (MsrNum == V_EFI_FIXED_MTRR_NUMBER ) {
315 return EFI_DEVICE_ERROR;
316 }
317
318 //
319 // We found the fixed MTRR to be programmed
320 //
321 for (ByteShift = 0; ByteShift < 8; ByteShift++) {
322 if ( *Base == (mFixedMtrrTable[MsrNum].BaseAddress + ByteShift * mFixedMtrrTable[MsrNum].Length)) {
323 break;
324 }
325 }
326
327 if (ByteShift == 8 ) {
328 return EFI_DEVICE_ERROR;
329 }
330
331 for ( ; ((ByteShift < 8) && (*Len >= mFixedMtrrTable[MsrNum].Length)); ByteShift++) {
332 OrMask |= LShiftU64 ((UINT64)MemoryCacheType, (UINT32)(ByteShift* 8));
333 ClearMask |= LShiftU64 ((UINT64)0xFF, (UINT32)(ByteShift * 8));
334 *Len -= mFixedMtrrTable[MsrNum].Length;
335 *Base += mFixedMtrrTable[MsrNum].Length;
336 }
337
338 TempQword = (AsmReadMsr64 (mFixedMtrrTable[MsrNum].Msr) & (~ClearMask)) | OrMask;
339 AsmWriteMsr64 (mFixedMtrrTable[MsrNum].Msr, TempQword);
340
341 return EFI_SUCCESS;
342 }
343
344 /**
345 Check if there is a valid variable MTRR that overlaps the given range.
346
347 @param[in] Start Base Address of the range to check.
348 @param[in] End End address of the range to check.
349
350 @retval TRUE Mtrr overlap.
351 @retval FALSE Mtrr not overlap.
352 **/
353 BOOLEAN
354 CheckMtrrOverlap (
355 IN EFI_PHYSICAL_ADDRESS Start,
356 IN EFI_PHYSICAL_ADDRESS End
357 )
358 {
359 return FALSE;
360 }
361
362 /**
363 Given the memory range and cache type, programs the MTRRs.
364
365 @param[in] MemoryAddress Base Address of Memory to program MTRR.
366 @param[in] MemoryLength Length of Memory to program MTRR.
367 @param[in] MemoryCacheType Cache Type.
368
369 @retval EFI_SUCCESS Mtrr are set successfully.
370 @retval EFI_LOAD_ERROR No empty MTRRs to use.
371 @retval EFI_INVALID_PARAMETER The input parameter is not valid.
372 @retval others An error occurs when setting MTTR.
373
374 **/
375 EFI_STATUS
376 EFIAPI
377 SetCacheAttributes (
378 IN EFI_PHYSICAL_ADDRESS MemoryAddress,
379 IN UINT64 MemoryLength,
380 IN EFI_MEMORY_CACHE_TYPE MemoryCacheType
381 )
382 {
383 EFI_STATUS Status;
384 UINT32 MsrNum, MsrNumEnd;
385 UINT64 TempQword;
386 UINT32 LastVariableMtrrForBios;
387 UINT64 OldMtrr;
388 UINT32 UsedMsrNum;
389 EFI_MEMORY_CACHE_TYPE UsedMemoryCacheType;
390 UINT64 ValidMtrrAddressMask;
391 UINT32 Cpuid_RegEax;
392
393 AsmCpuid (CPUID_EXTENDED_FUNCTION, &Cpuid_RegEax, NULL, NULL, NULL);
394 if (Cpuid_RegEax >= CPUID_VIR_PHY_ADDRESS_SIZE) {
395 AsmCpuid (CPUID_VIR_PHY_ADDRESS_SIZE, &Cpuid_RegEax, NULL, NULL, NULL);
396 ValidMtrrAddressMask = (LShiftU64 ((UINT64)1, (Cpuid_RegEax & 0xFF)) - 1) & (~(UINT64)0x0FFF);
397 } else {
398 ValidMtrrAddressMask = (LShiftU64 ((UINT64)1, 36) - 1) & (~(UINT64)0x0FFF);
399 }
400
401 //
402 // Check for invalid parameter
403 //
404 if (((MemoryAddress & ~ValidMtrrAddressMask) != 0) || ((MemoryLength & ~ValidMtrrAddressMask) != 0)) {
405 return EFI_INVALID_PARAMETER;
406 }
407
408 if (MemoryLength == 0) {
409 return EFI_INVALID_PARAMETER;
410 }
411
412 switch (MemoryCacheType) {
413 case EFI_CACHE_UNCACHEABLE:
414 case EFI_CACHE_WRITECOMBINING:
415 case EFI_CACHE_WRITETHROUGH:
416 case EFI_CACHE_WRITEPROTECTED:
417 case EFI_CACHE_WRITEBACK:
418 break;
419
420 default:
421 return EFI_INVALID_PARAMETER;
422 }
423
424 //
425 // Check if Fixed MTRR
426 //
427 if ((MemoryAddress + MemoryLength) <= (1 << 20)) {
428 Status = EFI_SUCCESS;
429 EfiDisableCacheMtrr (&OldMtrr);
430 while ((MemoryLength > 0) && (Status == EFI_SUCCESS)) {
431 Status = ProgramFixedMtrr (MemoryCacheType, &MemoryAddress, &MemoryLength);
432 }
433
434 EfiRecoverCacheMtrr (TRUE, OldMtrr);
435 return Status;
436 }
437
438 //
439 // Search if the range attribute has been set before
440 //
441 Status = SearchForExactMtrr (
442 MemoryAddress,
443 MemoryLength,
444 ValidMtrrAddressMask,
445 &UsedMsrNum,
446 &UsedMemoryCacheType
447 );
448
449 if (!EFI_ERROR (Status)) {
450 //
451 // Compare if it has the same type as current setting
452 //
453 if (UsedMemoryCacheType == MemoryCacheType) {
454 return EFI_SUCCESS;
455 } else {
456 //
457 // Different type
458 //
459
460 //
461 // Check if the set type is the same as Default Type
462 //
463 if (IsDefaultType (MemoryCacheType)) {
464 //
465 // Clear the MTRR
466 //
467 AsmWriteMsr64 (UsedMsrNum, 0);
468 AsmWriteMsr64 (UsedMsrNum + 1, 0);
469
470 return EFI_SUCCESS;
471 } else {
472 //
473 // Modify the MTRR type
474 //
475 EfiProgramMtrr (
476 UsedMsrNum,
477 MemoryAddress,
478 MemoryLength,
479 MemoryCacheType,
480 ValidMtrrAddressMask
481 );
482 return EFI_SUCCESS;
483 }
484 }
485 }
486
487 #if 0
488 //
489 // @bug - Need to create memory map so that when checking for overlap we
490 // can determine if an overlap exists based on all caching requests.
491 //
492 // Don't waste a variable MTRR if the caching attrib is same as default in MTRR_DEF_TYPE
493 //
494 if (MemoryCacheType == (AsmReadMsr64 (EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE) & B_EFI_MSR_CACHE_MEMORY_TYPE)) {
495 if (!CheckMtrrOverlap (MemoryAddress, MemoryAddress+MemoryLength-1)) {
496 return EFI_SUCCESS;
497 }
498 }
499
500 #endif
501
502 //
503 // Find first unused MTRR
504 //
505 MsrNumEnd = EFI_MSR_CACHE_VARIABLE_MTRR_BASE + (2 * (UINT32)(AsmReadMsr64 (EFI_MSR_IA32_MTRR_CAP) & B_EFI_MSR_IA32_MTRR_CAP_VARIABLE_SUPPORT));
506 for (MsrNum = EFI_MSR_CACHE_VARIABLE_MTRR_BASE; MsrNum < MsrNumEnd; MsrNum += 2) {
507 if ((AsmReadMsr64 (MsrNum+1) & B_EFI_MSR_CACHE_MTRR_VALID) == 0 ) {
508 break;
509 }
510 }
511
512 //
513 // Reserve 1 MTRR pair for OS.
514 //
515 LastVariableMtrrForBios = MsrNumEnd - 1 - (EFI_CACHE_NUM_VAR_MTRR_PAIRS_FOR_OS * 2);
516 if (MsrNum > LastVariableMtrrForBios) {
517 return EFI_LOAD_ERROR;
518 }
519
520 //
521 // Special case for 1 MB base address
522 //
523 if (MemoryAddress == BASE_1MB) {
524 MemoryAddress = 0;
525 }
526
527 //
528 // Program MTRRs
529 //
530 TempQword = MemoryLength;
531
532 if (TempQword == Power2MaxMemory (MemoryAddress, TempQword)) {
533 EfiProgramMtrr (
534 MsrNum,
535 MemoryAddress,
536 MemoryLength,
537 MemoryCacheType,
538 ValidMtrrAddressMask
539 );
540 } else {
541 //
542 // Fill in MTRRs with values. Direction can not be checked for this method
543 // as we are using WB as the default cache type and only setting areas to UC.
544 //
545 do {
546 //
547 // Do boundary check so we don't go past last MTRR register
548 // for BIOS use. Leave one MTRR pair for OS use.
549 //
550 if (MsrNum > LastVariableMtrrForBios) {
551 return EFI_LOAD_ERROR;
552 }
553
554 //
555 // Set next power of 2 region
556 //
557 MemoryLength = Power2MaxMemory (MemoryAddress, TempQword);
558 EfiProgramMtrr (
559 MsrNum,
560 MemoryAddress,
561 MemoryLength,
562 MemoryCacheType,
563 ValidMtrrAddressMask
564 );
565 MemoryAddress += MemoryLength;
566 TempQword -= MemoryLength;
567 MsrNum += 2;
568 } while (TempQword != 0);
569 }
570
571 return EFI_SUCCESS;
572 }
573
574 /**
575 Reset all the MTRRs to a known state.
576
577 @retval EFI_SUCCESS All MTRRs have been reset successfully.
578
579 **/
580 EFI_STATUS
581 EFIAPI
582 ResetCacheAttributes (
583 VOID
584 )
585 {
586 UINT32 MsrNum, MsrNumEnd;
587 UINT16 Index;
588 UINT64 OldMtrr;
589 UINT64 CacheType;
590 BOOLEAN DisableCar;
591
592 Index = 0;
593 DisableCar = TRUE;
594
595 //
596 // Determine default cache type
597 //
598 CacheType = EFI_CACHE_UNCACHEABLE;
599
600 //
601 // Set default cache type
602 //
603 AsmWriteMsr64 (EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE, CacheType);
604
605 //
606 // Disable CAR
607 //
608 DisableCacheAsRam (DisableCar);
609
610 EfiDisableCacheMtrr (&OldMtrr);
611
612 //
613 // Reset Fixed MTRRs
614 //
615 for (Index = 0; Index < V_EFI_FIXED_MTRR_NUMBER; Index++) {
616 AsmWriteMsr64 (mFixedMtrrTable[Index].Msr, 0);
617 }
618
619 //
620 // Reset Variable MTRRs
621 //
622 MsrNumEnd = EFI_MSR_CACHE_VARIABLE_MTRR_BASE + (2 * (UINT32)(AsmReadMsr64 (EFI_MSR_IA32_MTRR_CAP) & B_EFI_MSR_IA32_MTRR_CAP_VARIABLE_SUPPORT));
623 for (MsrNum = EFI_MSR_CACHE_VARIABLE_MTRR_BASE; MsrNum < MsrNumEnd; MsrNum++) {
624 AsmWriteMsr64 (MsrNum, 0);
625 }
626
627 //
628 // Enable Fixed and Variable MTRRs
629 //
630 EfiRecoverCacheMtrr (TRUE, OldMtrr);
631
632 return EFI_SUCCESS;
633 }
634
635 /**
636 Search the memory cache type for specific memory from MTRR.
637
638 @param[in] MemoryAddress the address of target memory
639 @param[in] MemoryLength the length of target memory
640 @param[in] ValidMtrrAddressMask the MTRR address mask
641 @param[out] UsedMsrNum the used MSR number
642 @param[out] UsedMemoryCacheType the cache type for the target memory
643
644 @retval EFI_SUCCESS The memory is found in MTRR and cache type is returned
645 @retval EFI_NOT_FOUND The memory is not found in MTRR
646
647 **/
648 EFI_STATUS
649 SearchForExactMtrr (
650 IN EFI_PHYSICAL_ADDRESS MemoryAddress,
651 IN UINT64 MemoryLength,
652 IN UINT64 ValidMtrrAddressMask,
653 OUT UINT32 *UsedMsrNum,
654 OUT EFI_MEMORY_CACHE_TYPE *UsedMemoryCacheType
655 )
656 {
657 UINT32 MsrNum, MsrNumEnd;
658 UINT64 TempQword;
659
660 if (MemoryLength == 0) {
661 return EFI_INVALID_PARAMETER;
662 }
663
664 MsrNumEnd = EFI_MSR_CACHE_VARIABLE_MTRR_BASE + (2 * (UINT32)(AsmReadMsr64 (EFI_MSR_IA32_MTRR_CAP) & B_EFI_MSR_IA32_MTRR_CAP_VARIABLE_SUPPORT));
665 for (MsrNum = EFI_MSR_CACHE_VARIABLE_MTRR_BASE; MsrNum < MsrNumEnd; MsrNum += 2) {
666 TempQword = AsmReadMsr64 (MsrNum+1);
667 if ((TempQword & B_EFI_MSR_CACHE_MTRR_VALID) == 0) {
668 continue;
669 }
670
671 if ((TempQword & ValidMtrrAddressMask) != ((~(MemoryLength - 1)) & ValidMtrrAddressMask)) {
672 continue;
673 }
674
675 TempQword = AsmReadMsr64 (MsrNum);
676 if ((TempQword & ValidMtrrAddressMask) != (MemoryAddress & ValidMtrrAddressMask)) {
677 continue;
678 }
679
680 *UsedMemoryCacheType = (EFI_MEMORY_CACHE_TYPE)(TempQword & B_EFI_MSR_CACHE_MEMORY_TYPE);
681 *UsedMsrNum = MsrNum;
682
683 return EFI_SUCCESS;
684 }
685
686 return EFI_NOT_FOUND;
687 }
688
689 /**
690 Check if CacheType match current default setting.
691
692 @param[in] MemoryCacheType input cache type to be checked.
693
694 @retval TRUE MemoryCacheType is default MTRR setting.
695 @retval TRUE MemoryCacheType is NOT default MTRR setting.
696 **/
697 BOOLEAN
698 IsDefaultType (
699 IN EFI_MEMORY_CACHE_TYPE MemoryCacheType
700 )
701 {
702 if ((AsmReadMsr64 (EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE) & B_EFI_MSR_CACHE_MEMORY_TYPE) != MemoryCacheType) {
703 return FALSE;
704 }
705
706 return TRUE;
707 }