]> git.proxmox.com Git - mirror_edk2.git/blob - IntelFspPkg/Library/BaseCacheLib/CacheLib.c
927cee13d31628fe84d55cdeedc93cb00da0ab4e
[mirror_edk2.git] / IntelFspPkg / Library / BaseCacheLib / CacheLib.c
1 /** @file
2
3 Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.<BR>
4 SPDX-License-Identifier: BSD-2-Clause-Patent
5
6 **/
7
8 #include <Uefi.h>
9 #include <Library/BaseLib.h>
10 #include <Library/CacheLib.h>
11 #include <Library/CacheAsRamLib.h>
12 #include "CacheLibInternal.h"
13
14 /**
15 Search the memory cache type for specific memory from MTRR.
16
17 @param[in] MemoryAddress the address of target memory
18 @param[in] MemoryLength the length of target memory
19 @param[in] ValidMtrrAddressMask the MTRR address mask
20 @param[out] UsedMsrNum the used MSR number
21 @param[out] UsedMemoryCacheType the cache type for the target memory
22
23 @retval EFI_SUCCESS The memory is found in MTRR and cache type is returned
24 @retval EFI_NOT_FOUND The memory is not found in MTRR
25
26 **/
27 EFI_STATUS
28 SearchForExactMtrr (
29 IN EFI_PHYSICAL_ADDRESS MemoryAddress,
30 IN UINT64 MemoryLength,
31 IN UINT64 ValidMtrrAddressMask,
32 OUT UINT32 *UsedMsrNum,
33 OUT EFI_MEMORY_CACHE_TYPE *MemoryCacheType
34 );
35
36 /**
37 Check if CacheType match current default setting.
38
39 @param[in] MemoryCacheType input cache type to be checked.
40
41 @retval TRUE MemoryCacheType is default MTRR setting.
42 @retval FALSE MemoryCacheType is NOT default MTRR setting.
43 **/
44 BOOLEAN
45 IsDefaultType (
46 IN EFI_MEMORY_CACHE_TYPE MemoryCacheType
47 );
48
49 /**
50 Return MTRR alignment requirement for base address and size.
51
52 @param[in] BaseAddress Base address.
53 @param[in] Size Size.
54
55 @retval Zero Alligned.
56 @retval Non-Zero Not alligned.
57
58 **/
59 UINT32
60 CheckMtrrAlignment (
61 IN UINT64 BaseAddress,
62 IN UINT64 Size
63 );
64
65 typedef struct {
66 UINT32 Msr;
67 UINT32 BaseAddress;
68 UINT32 Length;
69 } EFI_FIXED_MTRR;
70
71 EFI_FIXED_MTRR mFixedMtrrTable[] = {
72 { EFI_MSR_IA32_MTRR_FIX64K_00000, 0, 0x10000},
73 { EFI_MSR_IA32_MTRR_FIX16K_80000, 0x80000, 0x4000},
74 { EFI_MSR_IA32_MTRR_FIX16K_A0000, 0xA0000, 0x4000},
75 { EFI_MSR_IA32_MTRR_FIX4K_C0000, 0xC0000, 0x1000},
76 { EFI_MSR_IA32_MTRR_FIX4K_C8000, 0xC8000, 0x1000},
77 { EFI_MSR_IA32_MTRR_FIX4K_D0000, 0xD0000, 0x1000},
78 { EFI_MSR_IA32_MTRR_FIX4K_D8000, 0xD8000, 0x1000},
79 { EFI_MSR_IA32_MTRR_FIX4K_E0000, 0xE0000, 0x1000},
80 { EFI_MSR_IA32_MTRR_FIX4K_E8000, 0xE8000, 0x1000},
81 { EFI_MSR_IA32_MTRR_FIX4K_F0000, 0xF0000, 0x1000},
82 { EFI_MSR_IA32_MTRR_FIX4K_F8000, 0xF8000, 0x1000}
83 };
84
85 /**
86 Given the input, check if the number of MTRR is lesser.
87 if positive or subtractive.
88
89 @param[in] Input Length of Memory to program MTRR.
90
91 @retval Zero do positive.
92 @retval Non-Zero do subtractive.
93
94 **/
95 INT8
96 CheckDirection (
97 IN UINT64 Input
98 )
99 {
100 return 0;
101 }
102
103 /**
104 Disable cache and its mtrr.
105
106 @param[out] OldMtrr To return the Old MTRR value
107
108 **/
109 VOID
110 EfiDisableCacheMtrr (
111 OUT UINT64 *OldMtrr
112 )
113 {
114 UINT64 TempQword;
115
116 //
117 // Disable Cache MTRR
118 //
119 *OldMtrr = AsmReadMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE);
120 TempQword = (*OldMtrr) & ~B_EFI_MSR_GLOBAL_MTRR_ENABLE & ~B_EFI_MSR_FIXED_MTRR_ENABLE;
121 AsmWriteMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE, TempQword);
122 AsmDisableCache ();
123 }
124
125 /**
126 Recover cache MTRR.
127
128 @param[in] EnableMtrr Whether to enable the MTRR
129 @param[in] OldMtrr The saved old MTRR value to restore when not to enable the MTRR
130
131 **/
132 VOID
133 EfiRecoverCacheMtrr (
134 IN BOOLEAN EnableMtrr,
135 IN UINT64 OldMtrr
136 )
137 {
138 UINT64 TempQword;
139
140 //
141 // Enable Cache MTRR
142 //
143 if (EnableMtrr) {
144 TempQword = AsmReadMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE);
145 TempQword |= (UINT64)(B_EFI_MSR_GLOBAL_MTRR_ENABLE | B_EFI_MSR_FIXED_MTRR_ENABLE);
146 } else {
147 TempQword = OldMtrr;
148 }
149
150 AsmWriteMsr64 (EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE, TempQword);
151
152 AsmEnableCache ();
153 }
154
155 /**
156 Programming MTRR according to Memory address, length, and type.
157
158 @param[in] MtrrNumber the variable MTRR index number
159 @param[in] MemoryAddress the address of target memory
160 @param[in] MemoryLength the length of target memory
161 @param[in] MemoryCacheType the cache type of target memory
162 @param[in] ValidMtrrAddressMask the MTRR address mask
163
164 **/
165 VOID
166 EfiProgramMtrr (
167 IN UINTN MtrrNumber,
168 IN EFI_PHYSICAL_ADDRESS MemoryAddress,
169 IN UINT64 MemoryLength,
170 IN EFI_MEMORY_CACHE_TYPE MemoryCacheType,
171 IN UINT64 ValidMtrrAddressMask
172 )
173 {
174 UINT64 TempQword;
175 UINT64 OldMtrr;
176
177 if (MemoryLength == 0) {
178 return;
179 }
180
181 EfiDisableCacheMtrr (&OldMtrr);
182
183 //
184 // MTRR Physical Base
185 //
186 TempQword = (MemoryAddress & ValidMtrrAddressMask) | MemoryCacheType;
187 AsmWriteMsr64 (MtrrNumber, TempQword);
188
189 //
190 // MTRR Physical Mask
191 //
192 TempQword = ~(MemoryLength - 1);
193 AsmWriteMsr64 (MtrrNumber + 1, (TempQword & ValidMtrrAddressMask) | B_EFI_MSR_CACHE_MTRR_VALID);
194
195 EfiRecoverCacheMtrr (TRUE, OldMtrr);
196 }
197
198 /**
199 Calculate the maximum value which is a power of 2, but less the MemoryLength.
200
201 @param[in] MemoryAddress Memory address.
202 @param[in] MemoryLength The number to pass in.
203
204 @return The maximum value which is align to power of 2 and less the MemoryLength
205
206 **/
207 UINT64
208 Power2MaxMemory (
209 IN UINT64 MemoryAddress,
210 IN UINT64 MemoryLength
211 )
212 {
213 UINT64 Result;
214
215 if (MemoryLength == 0) {
216 return EFI_INVALID_PARAMETER;
217 }
218
219 //
220 // Compute inital power of 2 size to return
221 //
222 Result = GetPowerOfTwo64(MemoryLength);
223
224 //
225 // Special case base of 0 as all ranges are valid
226 //
227 if (MemoryAddress == 0) {
228 return Result;
229 }
230
231 //
232 // Loop till a value that can be mapped to this base address is found
233 //
234 while (CheckMtrrAlignment (MemoryAddress, Result) != 0) {
235 //
236 // Need to try the next smaller power of 2
237 //
238 Result = RShiftU64 (Result, 1);
239 }
240
241 return Result;
242 }
243
244 /**
245 Return MTRR alignment requirement for base address and size.
246
247 @param[in] BaseAddress Base address.
248 @param[in] Size Size.
249
250 @retval Zero Alligned.
251 @retval Non-Zero Not alligned.
252
253 **/
254 UINT32
255 CheckMtrrAlignment (
256 IN UINT64 BaseAddress,
257 IN UINT64 Size
258 )
259 {
260 UINT32 ShiftedBase;
261 UINT32 ShiftedSize;
262
263 //
264 // Shift base and size right 12 bits to allow for larger memory sizes. The
265 // MTRRs do not use the first 12 bits so this is safe for now. Only supports
266 // up to 52 bits of physical address space.
267 //
268 ShiftedBase = (UINT32) RShiftU64 (BaseAddress, 12);
269 ShiftedSize = (UINT32) RShiftU64 (Size, 12);
270
271 //
272 // Return the results to the caller of the MOD
273 //
274 return ShiftedBase % ShiftedSize;
275 }
276
277 /**
278 Programs fixed MTRRs registers.
279
280 @param[in] MemoryCacheType The memory type to set.
281 @param[in] Base The base address of memory range.
282 @param[in] Length The length of memory range.
283
284 @retval RETURN_SUCCESS The cache type was updated successfully
285 @retval RETURN_UNSUPPORTED The requested range or cache type was invalid
286 for the fixed MTRRs.
287
288 **/
289 EFI_STATUS
290 ProgramFixedMtrr (
291 IN EFI_MEMORY_CACHE_TYPE MemoryCacheType,
292 IN UINT64 *Base,
293 IN UINT64 *Len
294 )
295 {
296 UINT32 MsrNum;
297 UINT32 ByteShift;
298 UINT64 TempQword;
299 UINT64 OrMask;
300 UINT64 ClearMask;
301
302 TempQword = 0;
303 OrMask = 0;
304 ClearMask = 0;
305
306 for (MsrNum = 0; MsrNum < V_EFI_FIXED_MTRR_NUMBER; MsrNum++) {
307 if ((*Base >= mFixedMtrrTable[MsrNum].BaseAddress) &&
308 (*Base < (mFixedMtrrTable[MsrNum].BaseAddress + 8 * mFixedMtrrTable[MsrNum].Length))) {
309 break;
310 }
311 }
312 if (MsrNum == V_EFI_FIXED_MTRR_NUMBER ) {
313 return EFI_DEVICE_ERROR;
314 }
315 //
316 // We found the fixed MTRR to be programmed
317 //
318 for (ByteShift=0; ByteShift < 8; ByteShift++) {
319 if ( *Base == (mFixedMtrrTable[MsrNum].BaseAddress + ByteShift * mFixedMtrrTable[MsrNum].Length)) {
320 break;
321 }
322 }
323 if (ByteShift == 8 ) {
324 return EFI_DEVICE_ERROR;
325 }
326 for (; ((ByteShift<8) && (*Len >= mFixedMtrrTable[MsrNum].Length));ByteShift++) {
327 OrMask |= LShiftU64((UINT64) MemoryCacheType, (UINT32) (ByteShift* 8));
328 ClearMask |= LShiftU64((UINT64) 0xFF, (UINT32) (ByteShift * 8));
329 *Len -= mFixedMtrrTable[MsrNum].Length;
330 *Base += mFixedMtrrTable[MsrNum].Length;
331 }
332 TempQword = (AsmReadMsr64 (mFixedMtrrTable[MsrNum].Msr) & (~ClearMask)) | OrMask;
333 AsmWriteMsr64 (mFixedMtrrTable[MsrNum].Msr, TempQword);
334
335 return EFI_SUCCESS;
336 }
337
338 /**
339 Check if there is a valid variable MTRR that overlaps the given range.
340
341 @param[in] Start Base Address of the range to check.
342 @param[in] End End address of the range to check.
343
344 @retval TRUE Mtrr overlap.
345 @retval FALSE Mtrr not overlap.
346 **/
347 BOOLEAN
348 CheckMtrrOverlap (
349 IN EFI_PHYSICAL_ADDRESS Start,
350 IN EFI_PHYSICAL_ADDRESS End
351 )
352 {
353 return FALSE;
354 }
355
356 /**
357 Given the memory range and cache type, programs the MTRRs.
358
359 @param[in] MemoryAddress Base Address of Memory to program MTRR.
360 @param[in] MemoryLength Length of Memory to program MTRR.
361 @param[in] MemoryCacheType Cache Type.
362
363 @retval EFI_SUCCESS Mtrr are set successfully.
364 @retval EFI_LOAD_ERROR No empty MTRRs to use.
365 @retval EFI_INVALID_PARAMETER The input parameter is not valid.
366 @retval others An error occurs when setting MTTR.
367
368 **/
369 EFI_STATUS
370 EFIAPI
371 SetCacheAttributes (
372 IN EFI_PHYSICAL_ADDRESS MemoryAddress,
373 IN UINT64 MemoryLength,
374 IN EFI_MEMORY_CACHE_TYPE MemoryCacheType
375 )
376 {
377 EFI_STATUS Status;
378 UINT32 MsrNum, MsrNumEnd;
379 UINT64 TempQword;
380 UINT32 LastVariableMtrrForBios;
381 UINT64 OldMtrr;
382 UINT32 UsedMsrNum;
383 EFI_MEMORY_CACHE_TYPE UsedMemoryCacheType;
384 UINT64 ValidMtrrAddressMask;
385 UINT32 Cpuid_RegEax;
386
387 AsmCpuid (CPUID_EXTENDED_FUNCTION, &Cpuid_RegEax, NULL, NULL, NULL);
388 if (Cpuid_RegEax >= CPUID_VIR_PHY_ADDRESS_SIZE) {
389 AsmCpuid (CPUID_VIR_PHY_ADDRESS_SIZE, &Cpuid_RegEax, NULL, NULL, NULL);
390 ValidMtrrAddressMask = (LShiftU64((UINT64) 1, (Cpuid_RegEax & 0xFF)) - 1) & (~(UINT64)0x0FFF);
391 } else {
392 ValidMtrrAddressMask = (LShiftU64((UINT64) 1, 36) - 1) & (~(UINT64)0x0FFF);
393 }
394
395 //
396 // Check for invalid parameter
397 //
398 if ((MemoryAddress & ~ValidMtrrAddressMask) != 0 || (MemoryLength & ~ValidMtrrAddressMask) != 0) {
399 return EFI_INVALID_PARAMETER;
400 }
401
402 if (MemoryLength == 0) {
403 return EFI_INVALID_PARAMETER;
404 }
405
406 switch (MemoryCacheType) {
407 case EFI_CACHE_UNCACHEABLE:
408 case EFI_CACHE_WRITECOMBINING:
409 case EFI_CACHE_WRITETHROUGH:
410 case EFI_CACHE_WRITEPROTECTED:
411 case EFI_CACHE_WRITEBACK:
412 break;
413
414 default:
415 return EFI_INVALID_PARAMETER;
416 }
417
418 //
419 // Check if Fixed MTRR
420 //
421 if ((MemoryAddress + MemoryLength) <= (1 << 20)) {
422 Status = EFI_SUCCESS;
423 EfiDisableCacheMtrr (&OldMtrr);
424 while ((MemoryLength > 0) && (Status == EFI_SUCCESS)) {
425 Status = ProgramFixedMtrr (MemoryCacheType, &MemoryAddress, &MemoryLength);
426 }
427 EfiRecoverCacheMtrr (TRUE, OldMtrr);
428 return Status;
429 }
430
431 //
432 // Search if the range attribute has been set before
433 //
434 Status = SearchForExactMtrr(
435 MemoryAddress,
436 MemoryLength,
437 ValidMtrrAddressMask,
438 &UsedMsrNum,
439 &UsedMemoryCacheType
440 );
441
442 if (!EFI_ERROR(Status)) {
443 //
444 // Compare if it has the same type as current setting
445 //
446 if (UsedMemoryCacheType == MemoryCacheType) {
447 return EFI_SUCCESS;
448 } else {
449 //
450 // Different type
451 //
452
453 //
454 // Check if the set type is the same as Default Type
455 //
456 if (IsDefaultType(MemoryCacheType)) {
457 //
458 // Clear the MTRR
459 //
460 AsmWriteMsr64(UsedMsrNum, 0);
461 AsmWriteMsr64(UsedMsrNum + 1, 0);
462
463 return EFI_SUCCESS;
464 } else {
465 //
466 // Modify the MTRR type
467 //
468 EfiProgramMtrr(UsedMsrNum,
469 MemoryAddress,
470 MemoryLength,
471 MemoryCacheType,
472 ValidMtrrAddressMask
473 );
474 return EFI_SUCCESS;
475 }
476 }
477 }
478
479 #if 0
480 //
481 // @bug - Need to create memory map so that when checking for overlap we
482 // can determine if an overlap exists based on all caching requests.
483 //
484 // Don't waste a variable MTRR if the caching attrib is same as default in MTRR_DEF_TYPE
485 //
486 if (MemoryCacheType == (AsmReadMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE) & B_EFI_MSR_CACHE_MEMORY_TYPE)) {
487 if (!CheckMtrrOverlap (MemoryAddress, MemoryAddress+MemoryLength-1)) {
488 return EFI_SUCCESS;
489 }
490 }
491 #endif
492
493 //
494 // Find first unused MTRR
495 //
496 MsrNumEnd = EFI_MSR_CACHE_VARIABLE_MTRR_BASE + (2 * (UINT32)(AsmReadMsr64(EFI_MSR_IA32_MTRR_CAP) & B_EFI_MSR_IA32_MTRR_CAP_VARIABLE_SUPPORT));
497 for (MsrNum = EFI_MSR_CACHE_VARIABLE_MTRR_BASE; MsrNum < MsrNumEnd; MsrNum +=2) {
498 if ((AsmReadMsr64(MsrNum+1) & B_EFI_MSR_CACHE_MTRR_VALID) == 0 ) {
499 break;
500 }
501 }
502
503 //
504 // Reserve 1 MTRR pair for OS.
505 //
506 LastVariableMtrrForBios = MsrNumEnd - 1 - (EFI_CACHE_NUM_VAR_MTRR_PAIRS_FOR_OS * 2);
507 if (MsrNum > LastVariableMtrrForBios) {
508 return EFI_LOAD_ERROR;
509 }
510
511 //
512 // Special case for 1 MB base address
513 //
514 if (MemoryAddress == BASE_1MB) {
515 MemoryAddress = 0;
516 }
517
518 //
519 // Program MTRRs
520 //
521 TempQword = MemoryLength;
522
523 if (TempQword == Power2MaxMemory(MemoryAddress, TempQword)) {
524 EfiProgramMtrr(MsrNum,
525 MemoryAddress,
526 MemoryLength,
527 MemoryCacheType,
528 ValidMtrrAddressMask
529 );
530
531 } else {
532 //
533 // Fill in MTRRs with values. Direction can not be checked for this method
534 // as we are using WB as the default cache type and only setting areas to UC.
535 //
536 do {
537 //
538 // Do boundary check so we don't go past last MTRR register
539 // for BIOS use. Leave one MTRR pair for OS use.
540 //
541 if (MsrNum > LastVariableMtrrForBios) {
542 return EFI_LOAD_ERROR;
543 }
544
545 //
546 // Set next power of 2 region
547 //
548 MemoryLength = Power2MaxMemory(MemoryAddress, TempQword);
549 EfiProgramMtrr(MsrNum,
550 MemoryAddress,
551 MemoryLength,
552 MemoryCacheType,
553 ValidMtrrAddressMask
554 );
555 MemoryAddress += MemoryLength;
556 TempQword -= MemoryLength;
557 MsrNum += 2;
558 } while (TempQword != 0);
559 }
560
561 return EFI_SUCCESS;
562 }
563
564 /**
565 Reset all the MTRRs to a known state.
566
567 @retval EFI_SUCCESS All MTRRs have been reset successfully.
568
569 **/
570 EFI_STATUS
571 EFIAPI
572 ResetCacheAttributes (
573 VOID
574 )
575 {
576 UINT32 MsrNum, MsrNumEnd;
577 UINT16 Index;
578 UINT64 OldMtrr;
579 UINT64 CacheType;
580 BOOLEAN DisableCar;
581 Index = 0;
582 DisableCar = TRUE;
583
584 //
585 // Determine default cache type
586 //
587 CacheType = EFI_CACHE_UNCACHEABLE;
588
589 //
590 // Set default cache type
591 //
592 AsmWriteMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE, CacheType);
593
594 //
595 // Disable CAR
596 //
597 DisableCacheAsRam (DisableCar);
598
599 EfiDisableCacheMtrr (&OldMtrr);
600
601 //
602 // Reset Fixed MTRRs
603 //
604 for (Index = 0; Index < V_EFI_FIXED_MTRR_NUMBER; Index++) {
605 AsmWriteMsr64 (mFixedMtrrTable[Index].Msr, 0);
606 }
607
608 //
609 // Reset Variable MTRRs
610 //
611 MsrNumEnd = EFI_MSR_CACHE_VARIABLE_MTRR_BASE + (2 * (UINT32)(AsmReadMsr64(EFI_MSR_IA32_MTRR_CAP) & B_EFI_MSR_IA32_MTRR_CAP_VARIABLE_SUPPORT));
612 for (MsrNum = EFI_MSR_CACHE_VARIABLE_MTRR_BASE; MsrNum < MsrNumEnd; MsrNum++) {
613 AsmWriteMsr64 (MsrNum, 0);
614 }
615
616 //
617 // Enable Fixed and Variable MTRRs
618 //
619 EfiRecoverCacheMtrr (TRUE, OldMtrr);
620
621 return EFI_SUCCESS;
622 }
623
624 /**
625 Search the memory cache type for specific memory from MTRR.
626
627 @param[in] MemoryAddress the address of target memory
628 @param[in] MemoryLength the length of target memory
629 @param[in] ValidMtrrAddressMask the MTRR address mask
630 @param[out] UsedMsrNum the used MSR number
631 @param[out] UsedMemoryCacheType the cache type for the target memory
632
633 @retval EFI_SUCCESS The memory is found in MTRR and cache type is returned
634 @retval EFI_NOT_FOUND The memory is not found in MTRR
635
636 **/
637 EFI_STATUS
638 SearchForExactMtrr (
639 IN EFI_PHYSICAL_ADDRESS MemoryAddress,
640 IN UINT64 MemoryLength,
641 IN UINT64 ValidMtrrAddressMask,
642 OUT UINT32 *UsedMsrNum,
643 OUT EFI_MEMORY_CACHE_TYPE *UsedMemoryCacheType
644 )
645 {
646 UINT32 MsrNum, MsrNumEnd;
647 UINT64 TempQword;
648
649 if (MemoryLength == 0) {
650 return EFI_INVALID_PARAMETER;
651 }
652
653 MsrNumEnd = EFI_MSR_CACHE_VARIABLE_MTRR_BASE + (2 * (UINT32)(AsmReadMsr64(EFI_MSR_IA32_MTRR_CAP) & B_EFI_MSR_IA32_MTRR_CAP_VARIABLE_SUPPORT));
654 for (MsrNum = EFI_MSR_CACHE_VARIABLE_MTRR_BASE; MsrNum < MsrNumEnd; MsrNum +=2) {
655 TempQword = AsmReadMsr64(MsrNum+1);
656 if ((TempQword & B_EFI_MSR_CACHE_MTRR_VALID) == 0) {
657 continue;
658 }
659
660 if ((TempQword & ValidMtrrAddressMask) != ((~(MemoryLength - 1)) & ValidMtrrAddressMask)) {
661 continue;
662 }
663
664 TempQword = AsmReadMsr64 (MsrNum);
665 if ((TempQword & ValidMtrrAddressMask) != (MemoryAddress & ValidMtrrAddressMask)) {
666 continue;
667 }
668
669 *UsedMemoryCacheType = (EFI_MEMORY_CACHE_TYPE)(TempQword & B_EFI_MSR_CACHE_MEMORY_TYPE);
670 *UsedMsrNum = MsrNum;
671
672 return EFI_SUCCESS;
673 }
674
675 return EFI_NOT_FOUND;
676 }
677
678 /**
679 Check if CacheType match current default setting.
680
681 @param[in] MemoryCacheType input cache type to be checked.
682
683 @retval TRUE MemoryCacheType is default MTRR setting.
684 @retval TRUE MemoryCacheType is NOT default MTRR setting.
685 **/
686 BOOLEAN
687 IsDefaultType (
688 IN EFI_MEMORY_CACHE_TYPE MemoryCacheType
689 )
690 {
691 if ((AsmReadMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE) & B_EFI_MSR_CACHE_MEMORY_TYPE) != MemoryCacheType) {
692 return FALSE;
693 }
694
695 return TRUE;
696 }
697