]> git.proxmox.com Git - mirror_edk2.git/blob - UefiCpuPkg/Library/MtrrLib/MtrrLib.c
5f1a22c5fd1cf963caca42190ba323af4392a081
[mirror_edk2.git] / UefiCpuPkg / Library / MtrrLib / MtrrLib.c
1 /** @file
2 MTRR setting library
3
4 Copyright (c) 2008 - 2011, Intel Corporation. All rights reserved.<BR>
5 This program and the accompanying materials
6 are licensed and made available under the terms and conditions of the BSD License
7 which accompanies this distribution. The full text of the license may be found at
8 http://opensource.org/licenses/bsd-license.php
9
10 THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
11 WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
12
13 **/
14
15 #include <Base.h>
16
17 #include <Library/MtrrLib.h>
18 #include <Library/BaseLib.h>
19 #include <Library/CpuLib.h>
20 #include <Library/BaseMemoryLib.h>
21 #include <Library/DebugLib.h>
22
23 //
24 // This table defines the offset, base and length of the fixed MTRRs
25 //
26 CONST FIXED_MTRR mMtrrLibFixedMtrrTable[] = {
27 {
28 MTRR_LIB_IA32_MTRR_FIX64K_00000,
29 0,
30 SIZE_64KB
31 },
32 {
33 MTRR_LIB_IA32_MTRR_FIX16K_80000,
34 0x80000,
35 SIZE_16KB
36 },
37 {
38 MTRR_LIB_IA32_MTRR_FIX16K_A0000,
39 0xA0000,
40 SIZE_16KB
41 },
42 {
43 MTRR_LIB_IA32_MTRR_FIX4K_C0000,
44 0xC0000,
45 SIZE_4KB
46 },
47 {
48 MTRR_LIB_IA32_MTRR_FIX4K_C8000,
49 0xC8000,
50 SIZE_4KB
51 },
52 {
53 MTRR_LIB_IA32_MTRR_FIX4K_D0000,
54 0xD0000,
55 SIZE_4KB
56 },
57 {
58 MTRR_LIB_IA32_MTRR_FIX4K_D8000,
59 0xD8000,
60 SIZE_4KB
61 },
62 {
63 MTRR_LIB_IA32_MTRR_FIX4K_E0000,
64 0xE0000,
65 SIZE_4KB
66 },
67 {
68 MTRR_LIB_IA32_MTRR_FIX4K_E8000,
69 0xE8000,
70 SIZE_4KB
71 },
72 {
73 MTRR_LIB_IA32_MTRR_FIX4K_F0000,
74 0xF0000,
75 SIZE_4KB
76 },
77 {
78 MTRR_LIB_IA32_MTRR_FIX4K_F8000,
79 0xF8000,
80 SIZE_4KB
81 },
82 };
83
84 //
85 // Lookup table used to print MTRRs
86 //
87 GLOBAL_REMOVE_IF_UNREFERENCED CONST CHAR8 *mMtrrMemoryCacheTypeShortName[] = {
88 "UC", // CacheUncacheable
89 "WC", // CacheWriteCombining
90 "R*", // Invalid
91 "R*", // Invalid
92 "WT", // CacheWriteThrough
93 "WP", // CacheWriteProtected
94 "WB", // CacheWriteBack
95 "R*" // Invalid
96 };
97
98 /**
99 Returns the variable MTRR count for the CPU.
100
101 @return Variable MTRR count
102
103 **/
104 UINT32
105 EFIAPI
106 GetVariableMtrrCount (
107 VOID
108 )
109 {
110 UINT32 VariableMtrrCount;
111
112 if (!IsMtrrSupported ()) {
113 return 0;
114 }
115
116 VariableMtrrCount = (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK);
117 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
118
119 return VariableMtrrCount;
120 }
121
122 /**
123 Returns the firmware usable variable MTRR count for the CPU.
124
125 @return Firmware usable variable MTRR count
126
127 **/
128 UINT32
129 EFIAPI
130 GetFirmwareVariableMtrrCount (
131 VOID
132 )
133 {
134 UINT32 VariableMtrrCount;
135
136 VariableMtrrCount = GetVariableMtrrCount ();
137 if (VariableMtrrCount < RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER) {
138 return 0;
139 }
140
141 return VariableMtrrCount - RESERVED_FIRMWARE_VARIABLE_MTRR_NUMBER;
142 }
143
144 /**
145 Returns the default MTRR cache type for the system.
146
147 @return The default MTRR cache type.
148
149 **/
150 MTRR_MEMORY_CACHE_TYPE
151 EFIAPI
152 MtrrGetDefaultMemoryType (
153 VOID
154 )
155 {
156 if (!IsMtrrSupported ()) {
157 return CacheUncacheable;
158 }
159
160 return (MTRR_MEMORY_CACHE_TYPE) (AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE) & 0x7);
161 }
162
163 /**
164 Preparation before programming MTRR.
165
166 This function will do some preparation for programming MTRRs:
167 disable cache, invalid cache and disable MTRR caching functionality
168
169 @return CR4 value before changing.
170
171 **/
172 UINTN
173 PreMtrrChange (
174 VOID
175 )
176 {
177 UINTN Value;
178
179 //
180 // Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)
181 //
182 AsmDisableCache ();
183
184 //
185 // Save original CR4 value and clear PGE flag (Bit 7)
186 //
187 Value = AsmReadCr4 ();
188 AsmWriteCr4 (Value & (~BIT7));
189
190 //
191 // Flush all TLBs
192 //
193 CpuFlushTlb ();
194
195 //
196 // Disable Mtrrs
197 //
198 AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 0);
199
200 //
201 // Return original CR4 value
202 //
203 return Value;
204 }
205
206
207 /**
208 Cleaning up after programming MTRRs.
209
210 This function will do some clean up after programming MTRRs:
211 enable MTRR caching functionality, and enable cache
212
213 @param Cr4 CR4 value to restore
214
215 **/
216 VOID
217 PostMtrrChange (
218 UINTN Cr4
219 )
220 {
221 //
222 // Enable Cache MTRR
223 //
224 AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 3);
225
226 //
227 // Flush all TLBs
228 //
229 CpuFlushTlb ();
230
231 //
232 // Enable Normal Mode caching CD=NW=0, CD(Bit30), NW(Bit29)
233 //
234 AsmEnableCache ();
235
236 //
237 // Restore original CR4 value
238 //
239 AsmWriteCr4 (Cr4);
240 }
241
242
243 /**
244 Programs fixed MTRRs registers.
245
246 @param MemoryCacheType The memory type to set.
247 @param Base The base address of memory range.
248 @param Length The length of memory range.
249
250 @retval RETURN_SUCCESS The cache type was updated successfully
251 @retval RETURN_UNSUPPORTED The requested range or cache type was invalid
252 for the fixed MTRRs.
253
254 **/
255 RETURN_STATUS
256 ProgramFixedMtrr (
257 IN UINT64 MemoryCacheType,
258 IN OUT UINT64 *Base,
259 IN OUT UINT64 *Length
260 )
261 {
262 UINT32 MsrNum;
263 UINT32 ByteShift;
264 UINT64 TempQword;
265 UINT64 OrMask;
266 UINT64 ClearMask;
267
268 TempQword = 0;
269 OrMask = 0;
270 ClearMask = 0;
271
272 for (MsrNum = 0; MsrNum < MTRR_NUMBER_OF_FIXED_MTRR; MsrNum++) {
273 if ((*Base >= mMtrrLibFixedMtrrTable[MsrNum].BaseAddress) &&
274 (*Base <
275 (
276 mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +
277 (8 * mMtrrLibFixedMtrrTable[MsrNum].Length)
278 )
279 )
280 ) {
281 break;
282 }
283 }
284
285 if (MsrNum == MTRR_NUMBER_OF_FIXED_MTRR) {
286 return RETURN_UNSUPPORTED;
287 }
288
289 //
290 // We found the fixed MTRR to be programmed
291 //
292 for (ByteShift = 0; ByteShift < 8; ByteShift++) {
293 if (*Base ==
294 (
295 mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +
296 (ByteShift * mMtrrLibFixedMtrrTable[MsrNum].Length)
297 )
298 ) {
299 break;
300 }
301 }
302
303 if (ByteShift == 8) {
304 return RETURN_UNSUPPORTED;
305 }
306
307 for (
308 ;
309 ((ByteShift < 8) && (*Length >= mMtrrLibFixedMtrrTable[MsrNum].Length));
310 ByteShift++
311 ) {
312 OrMask |= LShiftU64 ((UINT64) MemoryCacheType, (UINT32) (ByteShift * 8));
313 ClearMask |= LShiftU64 ((UINT64) 0xFF, (UINT32) (ByteShift * 8));
314 *Length -= mMtrrLibFixedMtrrTable[MsrNum].Length;
315 *Base += mMtrrLibFixedMtrrTable[MsrNum].Length;
316 }
317
318 if (ByteShift < 8 && (*Length != 0)) {
319 return RETURN_UNSUPPORTED;
320 }
321
322 TempQword =
323 (AsmReadMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr) & ~ClearMask) | OrMask;
324 AsmWriteMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr, TempQword);
325 return RETURN_SUCCESS;
326 }
327
328
329 /**
330 Get the attribute of variable MTRRs.
331
332 This function shadows the content of variable MTRRs into an
333 internal array: VariableMtrr.
334
335 @param MtrrValidBitsMask The mask for the valid bit of the MTRR
336 @param MtrrValidAddressMask The valid address mask for MTRR
337 @param VariableMtrr The array to shadow variable MTRRs content
338
339 @return The return value of this paramter indicates the
340 number of MTRRs which has been used.
341
342 **/
343 UINT32
344 EFIAPI
345 MtrrGetMemoryAttributeInVariableMtrr (
346 IN UINT64 MtrrValidBitsMask,
347 IN UINT64 MtrrValidAddressMask,
348 OUT VARIABLE_MTRR *VariableMtrr
349 )
350 {
351 UINTN Index;
352 UINT32 MsrNum;
353 UINT32 UsedMtrr;
354 UINT32 FirmwareVariableMtrrCount;
355 UINT32 VariableMtrrEnd;
356
357 if (!IsMtrrSupported ()) {
358 return 0;
359 }
360
361 FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();
362 VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;
363
364 ZeroMem (VariableMtrr, sizeof (VARIABLE_MTRR) * MTRR_NUMBER_OF_VARIABLE_MTRR);
365 UsedMtrr = 0;
366
367 for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE, Index = 0;
368 (
369 (MsrNum < VariableMtrrEnd) &&
370 (Index < FirmwareVariableMtrrCount)
371 );
372 MsrNum += 2
373 ) {
374 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) != 0) {
375 VariableMtrr[Index].Msr = MsrNum;
376 VariableMtrr[Index].BaseAddress = (AsmReadMsr64 (MsrNum) &
377 MtrrValidAddressMask);
378 VariableMtrr[Index].Length = ((~(AsmReadMsr64 (MsrNum + 1) &
379 MtrrValidAddressMask)
380 ) &
381 MtrrValidBitsMask
382 ) + 1;
383 VariableMtrr[Index].Type = (AsmReadMsr64 (MsrNum) & 0x0ff);
384 VariableMtrr[Index].Valid = TRUE;
385 VariableMtrr[Index].Used = TRUE;
386 UsedMtrr = UsedMtrr + 1;
387 Index++;
388 }
389 }
390 return UsedMtrr;
391 }
392
393
394 /**
395 Checks overlap between given memory range and MTRRs.
396
397 @param Start The start address of memory range.
398 @param End The end address of memory range.
399 @param VariableMtrr The array to shadow variable MTRRs content
400
401 @retval TRUE Overlap exists.
402 @retval FALSE No overlap.
403
404 **/
405 BOOLEAN
406 CheckMemoryAttributeOverlap (
407 IN PHYSICAL_ADDRESS Start,
408 IN PHYSICAL_ADDRESS End,
409 IN VARIABLE_MTRR *VariableMtrr
410 )
411 {
412 UINT32 Index;
413
414 for (Index = 0; Index < 6; Index++) {
415 if (
416 VariableMtrr[Index].Valid &&
417 !(
418 (Start > (VariableMtrr[Index].BaseAddress +
419 VariableMtrr[Index].Length - 1)
420 ) ||
421 (End < VariableMtrr[Index].BaseAddress)
422 )
423 ) {
424 return TRUE;
425 }
426 }
427
428 return FALSE;
429 }
430
431
432 /**
433 Marks a variable MTRR as non-valid.
434
435 @param Index The index of the array VariableMtrr to be invalidated
436 @param VariableMtrr The array to shadow variable MTRRs content
437 @param UsedMtrr The number of MTRRs which has already been used
438
439 **/
440 VOID
441 InvalidateShadowMtrr (
442 IN UINTN Index,
443 IN VARIABLE_MTRR *VariableMtrr,
444 OUT UINT32 *UsedMtrr
445 )
446 {
447 VariableMtrr[Index].Valid = FALSE;
448 *UsedMtrr = *UsedMtrr - 1;
449 }
450
451
452 /**
453 Combine memory attributes.
454
455 If overlap exists between given memory range and MTRRs, try to combine them.
456
457 @param Attributes The memory type to set.
458 @param Base The base address of memory range.
459 @param Length The length of memory range.
460 @param VariableMtrr The array to shadow variable MTRRs content
461 @param UsedMtrr The number of MTRRs which has already been used
462 @param OverwriteExistingMtrr Returns whether an existing MTRR was used
463
464 @retval EFI_SUCCESS Memory region successfully combined.
465 @retval EFI_ACCESS_DENIED Memory region cannot be combined.
466
467 **/
468 RETURN_STATUS
469 CombineMemoryAttribute (
470 IN UINT64 Attributes,
471 IN OUT UINT64 *Base,
472 IN OUT UINT64 *Length,
473 IN VARIABLE_MTRR *VariableMtrr,
474 IN OUT UINT32 *UsedMtrr,
475 OUT BOOLEAN *OverwriteExistingMtrr
476 )
477 {
478 UINT32 Index;
479 UINT64 CombineStart;
480 UINT64 CombineEnd;
481 UINT64 MtrrEnd;
482 UINT64 EndAddress;
483 UINT32 FirmwareVariableMtrrCount;
484 BOOLEAN CoveredByExistingMtrr;
485
486 FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();
487
488 *OverwriteExistingMtrr = FALSE;
489 CoveredByExistingMtrr = FALSE;
490 EndAddress = *Base +*Length - 1;
491
492 for (Index = 0; Index < FirmwareVariableMtrrCount; Index++) {
493
494 MtrrEnd = VariableMtrr[Index].BaseAddress + VariableMtrr[Index].Length - 1;
495 if (
496 !VariableMtrr[Index].Valid ||
497 (
498 *Base > (MtrrEnd) ||
499 (EndAddress < VariableMtrr[Index].BaseAddress)
500 )
501 ) {
502 continue;
503 }
504
505 //
506 // Combine same attribute MTRR range
507 //
508 if (Attributes == VariableMtrr[Index].Type) {
509 //
510 // if the Mtrr range contain the request range, set a flag, then continue to
511 // invalidate any MTRR of the same request range with higher priority cache type.
512 //
513 if (VariableMtrr[Index].BaseAddress <= *Base && MtrrEnd >= EndAddress) {
514 CoveredByExistingMtrr = TRUE;
515 continue;
516 }
517 //
518 // invalid this MTRR, and program the combine range
519 //
520 CombineStart =
521 (*Base) < VariableMtrr[Index].BaseAddress ?
522 (*Base) :
523 VariableMtrr[Index].BaseAddress;
524 CombineEnd = EndAddress > MtrrEnd ? EndAddress : MtrrEnd;
525
526 //
527 // Record the MTRR usage status in VariableMtrr array.
528 //
529 InvalidateShadowMtrr (Index, VariableMtrr, UsedMtrr);
530 *Base = CombineStart;
531 *Length = CombineEnd - CombineStart + 1;
532 EndAddress = CombineEnd;
533 *OverwriteExistingMtrr = TRUE;
534 continue;
535 } else {
536 //
537 // The cache type is different, but the range is convered by one MTRR
538 //
539 if (VariableMtrr[Index].BaseAddress == *Base && MtrrEnd == EndAddress) {
540 InvalidateShadowMtrr (Index, VariableMtrr, UsedMtrr);
541 continue;
542 }
543
544 }
545
546 if ((Attributes== MTRR_CACHE_WRITE_THROUGH &&
547 VariableMtrr[Index].Type == MTRR_CACHE_WRITE_BACK) ||
548 (Attributes == MTRR_CACHE_WRITE_BACK &&
549 VariableMtrr[Index].Type == MTRR_CACHE_WRITE_THROUGH) ||
550 (Attributes == MTRR_CACHE_UNCACHEABLE) ||
551 (VariableMtrr[Index].Type == MTRR_CACHE_UNCACHEABLE)
552 ) {
553 *OverwriteExistingMtrr = TRUE;
554 continue;
555 }
556 //
557 // Other type memory overlap is invalid
558 //
559 return RETURN_ACCESS_DENIED;
560 }
561
562 if (CoveredByExistingMtrr) {
563 *Length = 0;
564 }
565
566 return RETURN_SUCCESS;
567 }
568
569
570 /**
571 Calculate the maximum value which is a power of 2, but less the MemoryLength.
572
573 @param MemoryLength The number to pass in.
574 @return The maximum value which is align to power of 2 and less the MemoryLength
575
576 **/
577 UINT64
578 Power2MaxMemory (
579 IN UINT64 MemoryLength
580 )
581 {
582 UINT64 Result;
583
584 if (RShiftU64 (MemoryLength, 32) != 0) {
585 Result = LShiftU64 (
586 (UINT64) GetPowerOfTwo32 (
587 (UINT32) RShiftU64 (MemoryLength, 32)
588 ),
589 32
590 );
591 } else {
592 Result = (UINT64) GetPowerOfTwo32 ((UINT32) MemoryLength);
593 }
594
595 return Result;
596 }
597
598
599 /**
600 Determine the MTRR numbers used to program a memory range.
601
602 This function first checks the alignment of the base address. If the alignment of the base address <= Length,
603 cover the memory range (BaseAddress, alignment) by a MTRR, then BaseAddress += alignment and Length -= alignment.
604 Repeat the step until alignment > Length.
605
606 Then this function determines which direction of programming the variable MTRRs for the remaining length
607 will use fewer MTRRs.
608
609 @param BaseAddress Length of Memory to program MTRR
610 @param Length Length of Memory to program MTRR
611 @param MtrrNumber Pointer to the number of necessary MTRRs
612
613 @retval TRUE Positive direction is better.
614 FALSE Negtive direction is better.
615
616 **/
617 BOOLEAN
618 GetMtrrNumberAndDirection (
619 IN UINT64 BaseAddress,
620 IN UINT64 Length,
621 IN UINTN *MtrrNumber
622 )
623 {
624 UINT64 TempQword;
625 UINT64 Alignment;
626 UINT32 Positive;
627 UINT32 Subtractive;
628
629 *MtrrNumber = 0;
630
631 if (BaseAddress != 0) {
632 do {
633 //
634 // Calculate the alignment of the base address.
635 //
636 Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));
637
638 if (Alignment > Length) {
639 break;
640 }
641
642 (*MtrrNumber)++;
643 BaseAddress += Alignment;
644 Length -= Alignment;
645 } while (TRUE);
646
647 if (Length == 0) {
648 return TRUE;
649 }
650 }
651
652 TempQword = Length;
653 Positive = 0;
654 Subtractive = 0;
655
656 do {
657 TempQword -= Power2MaxMemory (TempQword);
658 Positive++;
659 } while (TempQword != 0);
660
661 TempQword = Power2MaxMemory (LShiftU64 (Length, 1)) - Length;
662 Subtractive++;
663 do {
664 TempQword -= Power2MaxMemory (TempQword);
665 Subtractive++;
666 } while (TempQword != 0);
667
668 if (Positive <= Subtractive) {
669 *MtrrNumber += Positive;
670 return TRUE;
671 } else {
672 *MtrrNumber += Subtractive;
673 return FALSE;
674 }
675 }
676
677 /**
678 Invalid variable MTRRs according to the value in the shadow array.
679
680 This function programs MTRRs according to the values specified
681 in the shadow array.
682
683 @param VariableMtrr The array to shadow variable MTRRs content
684
685 **/
686 VOID
687 InvalidateMtrr (
688 IN VARIABLE_MTRR *VariableMtrr
689 )
690 {
691 UINTN Index;
692 UINTN Cr4;
693 UINTN VariableMtrrCount;
694
695 Cr4 = PreMtrrChange ();
696 Index = 0;
697 VariableMtrrCount = GetVariableMtrrCount ();
698 while (Index < VariableMtrrCount) {
699 if (!VariableMtrr[Index].Valid && VariableMtrr[Index].Used) {
700 AsmWriteMsr64 (VariableMtrr[Index].Msr, 0);
701 AsmWriteMsr64 (VariableMtrr[Index].Msr + 1, 0);
702 VariableMtrr[Index].Used = FALSE;
703 }
704 Index ++;
705 }
706 PostMtrrChange (Cr4);
707 }
708
709
710 /**
711 Programs variable MTRRs
712
713 This function programs variable MTRRs
714
715 @param MtrrNumber Index of MTRR to program.
716 @param BaseAddress Base address of memory region.
717 @param Length Length of memory region.
718 @param MemoryCacheType Memory type to set.
719 @param MtrrValidAddressMask The valid address mask for MTRR
720
721 **/
722 VOID
723 ProgramVariableMtrr (
724 IN UINTN MtrrNumber,
725 IN PHYSICAL_ADDRESS BaseAddress,
726 IN UINT64 Length,
727 IN UINT64 MemoryCacheType,
728 IN UINT64 MtrrValidAddressMask
729 )
730 {
731 UINT64 TempQword;
732 UINTN Cr4;
733
734 Cr4 = PreMtrrChange ();
735
736 //
737 // MTRR Physical Base
738 //
739 TempQword = (BaseAddress & MtrrValidAddressMask) | MemoryCacheType;
740 AsmWriteMsr64 ((UINT32) MtrrNumber, TempQword);
741
742 //
743 // MTRR Physical Mask
744 //
745 TempQword = ~(Length - 1);
746 AsmWriteMsr64 (
747 (UINT32) (MtrrNumber + 1),
748 (TempQword & MtrrValidAddressMask) | MTRR_LIB_CACHE_MTRR_ENABLED
749 );
750
751 PostMtrrChange (Cr4);
752 }
753
754
755 /**
756 Convert the Memory attibute value to MTRR_MEMORY_CACHE_TYPE.
757
758 @param MtrrType MTRR memory type
759
760 @return The enum item in MTRR_MEMORY_CACHE_TYPE
761
762 **/
763 MTRR_MEMORY_CACHE_TYPE
764 GetMemoryCacheTypeFromMtrrType (
765 IN UINT64 MtrrType
766 )
767 {
768 switch (MtrrType) {
769 case MTRR_CACHE_UNCACHEABLE:
770 return CacheUncacheable;
771 case MTRR_CACHE_WRITE_COMBINING:
772 return CacheWriteCombining;
773 case MTRR_CACHE_WRITE_THROUGH:
774 return CacheWriteThrough;
775 case MTRR_CACHE_WRITE_PROTECTED:
776 return CacheWriteProtected;
777 case MTRR_CACHE_WRITE_BACK:
778 return CacheWriteBack;
779 default:
780 //
781 // MtrrType is MTRR_CACHE_INVALID_TYPE, that means
782 // no mtrr covers the range
783 //
784 return CacheUncacheable;
785 }
786 }
787
788 /**
789 Initializes the valid bits mask and valid address mask for MTRRs.
790
791 This function initializes the valid bits mask and valid address mask for MTRRs.
792
793 @param MtrrValidBitsMask The mask for the valid bit of the MTRR
794 @param MtrrValidAddressMask The valid address mask for the MTRR
795
796 **/
797 VOID
798 MtrrLibInitializeMtrrMask (
799 OUT UINT64 *MtrrValidBitsMask,
800 OUT UINT64 *MtrrValidAddressMask
801 )
802 {
803 UINT32 RegEax;
804 UINT8 PhysicalAddressBits;
805
806 AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);
807
808 if (RegEax >= 0x80000008) {
809 AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);
810
811 PhysicalAddressBits = (UINT8) RegEax;
812
813 *MtrrValidBitsMask = LShiftU64 (1, PhysicalAddressBits) - 1;
814 *MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;
815 } else {
816 *MtrrValidBitsMask = MTRR_LIB_CACHE_VALID_ADDRESS;
817 *MtrrValidAddressMask = 0xFFFFFFFF;
818 }
819 }
820
821
822 /**
823 Determing the real attribute of a memory range.
824
825 This function is to arbitrate the real attribute of the memory when
826 there are 2 MTRR covers the same memory range. For further details,
827 please refer the IA32 Software Developer's Manual, Volume 3,
828 Section 10.11.4.1.
829
830 @param MtrrType1 the first kind of Memory type
831 @param MtrrType2 the second kind of memory type
832
833 **/
834 UINT64
835 MtrrPrecedence (
836 UINT64 MtrrType1,
837 UINT64 MtrrType2
838 )
839 {
840 UINT64 MtrrType;
841
842 MtrrType = MTRR_CACHE_INVALID_TYPE;
843 switch (MtrrType1) {
844 case MTRR_CACHE_UNCACHEABLE:
845 MtrrType = MTRR_CACHE_UNCACHEABLE;
846 break;
847 case MTRR_CACHE_WRITE_COMBINING:
848 if (
849 MtrrType2==MTRR_CACHE_WRITE_COMBINING ||
850 MtrrType2==MTRR_CACHE_UNCACHEABLE
851 ) {
852 MtrrType = MtrrType2;
853 }
854 break;
855 case MTRR_CACHE_WRITE_THROUGH:
856 if (
857 MtrrType2==MTRR_CACHE_WRITE_THROUGH ||
858 MtrrType2==MTRR_CACHE_WRITE_BACK
859 ) {
860 MtrrType = MTRR_CACHE_WRITE_THROUGH;
861 } else if(MtrrType2==MTRR_CACHE_UNCACHEABLE) {
862 MtrrType = MTRR_CACHE_UNCACHEABLE;
863 }
864 break;
865 case MTRR_CACHE_WRITE_PROTECTED:
866 if (MtrrType2 == MTRR_CACHE_WRITE_PROTECTED ||
867 MtrrType2 == MTRR_CACHE_UNCACHEABLE) {
868 MtrrType = MtrrType2;
869 }
870 break;
871 case MTRR_CACHE_WRITE_BACK:
872 if (
873 MtrrType2== MTRR_CACHE_UNCACHEABLE ||
874 MtrrType2==MTRR_CACHE_WRITE_THROUGH ||
875 MtrrType2== MTRR_CACHE_WRITE_BACK
876 ) {
877 MtrrType = MtrrType2;
878 }
879 break;
880 case MTRR_CACHE_INVALID_TYPE:
881 MtrrType = MtrrType2;
882 break;
883 default:
884 break;
885 }
886
887 if (MtrrType2 == MTRR_CACHE_INVALID_TYPE) {
888 MtrrType = MtrrType1;
889 }
890 return MtrrType;
891 }
892
893
894 /**
895 This function attempts to set the attributes for a memory range.
896
897 @param BaseAddress The physical address that is the start
898 address of a memory region.
899 @param Length The size in bytes of the memory region.
900 @param Attributes The bit mask of attributes to set for the
901 memory region.
902
903 @retval RETURN_SUCCESS The attributes were set for the memory
904 region.
905 @retval RETURN_INVALID_PARAMETER Length is zero.
906 @retval RETURN_UNSUPPORTED The processor does not support one or
907 more bytes of the memory resource range
908 specified by BaseAddress and Length.
909 @retval RETURN_UNSUPPORTED The bit mask of attributes is not support
910 for the memory resource range specified
911 by BaseAddress and Length.
912 @retval RETURN_ACCESS_DENIED The attributes for the memory resource
913 range specified by BaseAddress and Length
914 cannot be modified.
915 @retval RETURN_OUT_OF_RESOURCES There are not enough system resources to
916 modify the attributes of the memory
917 resource range.
918
919 **/
920 RETURN_STATUS
921 EFIAPI
922 MtrrSetMemoryAttribute (
923 IN PHYSICAL_ADDRESS BaseAddress,
924 IN UINT64 Length,
925 IN MTRR_MEMORY_CACHE_TYPE Attribute
926 )
927 {
928 UINT64 TempQword;
929 RETURN_STATUS Status;
930 UINT64 MemoryType;
931 UINT64 Alignment;
932 BOOLEAN OverLap;
933 BOOLEAN Positive;
934 UINT32 MsrNum;
935 UINTN MtrrNumber;
936 VARIABLE_MTRR VariableMtrr[MTRR_NUMBER_OF_VARIABLE_MTRR];
937 UINT32 UsedMtrr;
938 UINT64 MtrrValidBitsMask;
939 UINT64 MtrrValidAddressMask;
940 UINTN Cr4;
941 BOOLEAN OverwriteExistingMtrr;
942 UINT32 FirmwareVariableMtrrCount;
943 UINT32 VariableMtrrEnd;
944
945 DEBUG((DEBUG_CACHE, "MtrrSetMemoryAttribute() %a:%016lx-%016lx\n", mMtrrMemoryCacheTypeShortName[Attribute], BaseAddress, Length));
946
947 if (!IsMtrrSupported ()) {
948 Status = RETURN_UNSUPPORTED;
949 goto Done;
950 }
951
952 FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();
953 VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;
954
955 MtrrLibInitializeMtrrMask(&MtrrValidBitsMask, &MtrrValidAddressMask);
956
957 TempQword = 0;
958 MemoryType = (UINT64)Attribute;
959 OverwriteExistingMtrr = FALSE;
960
961 //
962 // Check for an invalid parameter
963 //
964 if (Length == 0) {
965 Status = RETURN_INVALID_PARAMETER;
966 goto Done;
967 }
968
969 if (
970 (BaseAddress & ~MtrrValidAddressMask) != 0 ||
971 (Length & ~MtrrValidAddressMask) != 0
972 ) {
973 Status = RETURN_UNSUPPORTED;
974 goto Done;
975 }
976
977 //
978 // Check if Fixed MTRR
979 //
980 Status = RETURN_SUCCESS;
981 while ((BaseAddress < BASE_1MB) && (Length > 0) && Status == RETURN_SUCCESS) {
982 Cr4 = PreMtrrChange ();
983 Status = ProgramFixedMtrr (MemoryType, &BaseAddress, &Length);
984 PostMtrrChange (Cr4);
985 if (RETURN_ERROR (Status)) {
986 goto Done;
987 }
988 }
989
990 if (Length == 0) {
991 //
992 // A Length of 0 can only make sense for fixed MTTR ranges.
993 // Since we just handled the fixed MTRRs, we can skip the
994 // variable MTRR section.
995 //
996 goto Done;
997 }
998
999 //
1000 // Since memory ranges below 1MB will be overridden by the fixed MTRRs,
1001 // we can set the base to 0 to save variable MTRRs.
1002 //
1003 if (BaseAddress == BASE_1MB) {
1004 BaseAddress = 0;
1005 Length += SIZE_1MB;
1006 }
1007
1008 //
1009 // Check for overlap
1010 //
1011 UsedMtrr = MtrrGetMemoryAttributeInVariableMtrr (MtrrValidBitsMask, MtrrValidAddressMask, VariableMtrr);
1012 OverLap = CheckMemoryAttributeOverlap (BaseAddress, BaseAddress + Length - 1, VariableMtrr);
1013 if (OverLap) {
1014 Status = CombineMemoryAttribute (MemoryType, &BaseAddress, &Length, VariableMtrr, &UsedMtrr, &OverwriteExistingMtrr);
1015 if (RETURN_ERROR (Status)) {
1016 goto Done;
1017 }
1018
1019 if (Length == 0) {
1020 //
1021 // Combined successfully, invalidate the now-unused MTRRs
1022 //
1023 InvalidateMtrr(VariableMtrr);
1024 Status = RETURN_SUCCESS;
1025 goto Done;
1026 }
1027 }
1028
1029 //
1030 // Program Variable MTRRs
1031 //
1032 // Avoid hardcode here and read data dynamically
1033 //
1034 if (UsedMtrr >= FirmwareVariableMtrrCount) {
1035 Status = RETURN_OUT_OF_RESOURCES;
1036 goto Done;
1037 }
1038
1039 //
1040 // The memory type is the same with the type specified by
1041 // MTRR_LIB_IA32_MTRR_DEF_TYPE.
1042 //
1043 if ((!OverwriteExistingMtrr) && (Attribute == MtrrGetDefaultMemoryType ())) {
1044 //
1045 // Invalidate the now-unused MTRRs
1046 //
1047 InvalidateMtrr(VariableMtrr);
1048 goto Done;
1049 }
1050
1051 Positive = GetMtrrNumberAndDirection (BaseAddress, Length, &MtrrNumber);
1052
1053 if ((UsedMtrr + MtrrNumber) > FirmwareVariableMtrrCount) {
1054 Status = RETURN_OUT_OF_RESOURCES;
1055 goto Done;
1056 }
1057
1058 //
1059 // Invalidate the now-unused MTRRs
1060 //
1061 InvalidateMtrr(VariableMtrr);
1062
1063 //
1064 // Find first unused MTRR
1065 //
1066 for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;
1067 MsrNum < VariableMtrrEnd;
1068 MsrNum += 2
1069 ) {
1070 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1071 break;
1072 }
1073 }
1074
1075 if (BaseAddress != 0) {
1076 do {
1077 //
1078 // Calculate the alignment of the base address.
1079 //
1080 Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));
1081
1082 if (Alignment > Length) {
1083 break;
1084 }
1085
1086 //
1087 // Find unused MTRR
1088 //
1089 for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {
1090 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1091 break;
1092 }
1093 }
1094
1095 ProgramVariableMtrr (
1096 MsrNum,
1097 BaseAddress,
1098 Alignment,
1099 MemoryType,
1100 MtrrValidAddressMask
1101 );
1102 BaseAddress += Alignment;
1103 Length -= Alignment;
1104 } while (TRUE);
1105
1106 if (Length == 0) {
1107 goto Done;
1108 }
1109 }
1110
1111 TempQword = Length;
1112
1113 if (!Positive) {
1114 Length = Power2MaxMemory (LShiftU64 (TempQword, 1));
1115
1116 //
1117 // Find unused MTRR
1118 //
1119 for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {
1120 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1121 break;
1122 }
1123 }
1124
1125 ProgramVariableMtrr (
1126 MsrNum,
1127 BaseAddress,
1128 Length,
1129 MemoryType,
1130 MtrrValidAddressMask
1131 );
1132 BaseAddress += Length;
1133 TempQword = Length - TempQword;
1134 MemoryType = MTRR_CACHE_UNCACHEABLE;
1135 }
1136
1137 do {
1138 //
1139 // Find unused MTRR
1140 //
1141 for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {
1142 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1143 break;
1144 }
1145 }
1146
1147 Length = Power2MaxMemory (TempQword);
1148 if (!Positive) {
1149 BaseAddress -= Length;
1150 }
1151
1152 ProgramVariableMtrr (
1153 MsrNum,
1154 BaseAddress,
1155 Length,
1156 MemoryType,
1157 MtrrValidAddressMask
1158 );
1159
1160 if (Positive) {
1161 BaseAddress += Length;
1162 }
1163 TempQword -= Length;
1164
1165 } while (TempQword > 0);
1166
1167 Done:
1168 DEBUG((DEBUG_CACHE, " Status = %r\n", Status));
1169 if (!RETURN_ERROR (Status)) {
1170 MtrrDebugPrintAllMtrrs ();
1171 }
1172
1173 return Status;
1174 }
1175
1176
1177 /**
1178 This function will get the memory cache type of the specific address.
1179
1180 This function is mainly for debug purpose.
1181
1182 @param Address The specific address
1183
1184 @return Memory cache type of the sepcific address
1185
1186 **/
1187 MTRR_MEMORY_CACHE_TYPE
1188 EFIAPI
1189 MtrrGetMemoryAttribute (
1190 IN PHYSICAL_ADDRESS Address
1191 )
1192 {
1193 UINT64 TempQword;
1194 UINTN Index;
1195 UINTN SubIndex;
1196 UINT64 MtrrType;
1197 UINT64 TempMtrrType;
1198 MTRR_MEMORY_CACHE_TYPE CacheType;
1199 VARIABLE_MTRR VariableMtrr[MTRR_NUMBER_OF_VARIABLE_MTRR];
1200 UINT64 MtrrValidBitsMask;
1201 UINT64 MtrrValidAddressMask;
1202 UINTN VariableMtrrCount;
1203
1204 if (!IsMtrrSupported ()) {
1205 return CacheUncacheable;
1206 }
1207
1208 //
1209 // Check if MTRR is enabled, if not, return UC as attribute
1210 //
1211 TempQword = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE);
1212 MtrrType = MTRR_CACHE_INVALID_TYPE;
1213
1214 if ((TempQword & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1215 return CacheUncacheable;
1216 }
1217
1218 //
1219 // If address is less than 1M, then try to go through the fixed MTRR
1220 //
1221 if (Address < BASE_1MB) {
1222 if ((TempQword & MTRR_LIB_CACHE_FIXED_MTRR_ENABLED) != 0) {
1223 //
1224 // Go through the fixed MTRR
1225 //
1226 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1227 if (Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress &&
1228 Address < (
1229 mMtrrLibFixedMtrrTable[Index].BaseAddress +
1230 (mMtrrLibFixedMtrrTable[Index].Length * 8)
1231 )
1232 ) {
1233 SubIndex =
1234 ((UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /
1235 mMtrrLibFixedMtrrTable[Index].Length;
1236 TempQword = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
1237 MtrrType = RShiftU64 (TempQword, SubIndex * 8) & 0xFF;
1238 return GetMemoryCacheTypeFromMtrrType (MtrrType);
1239 }
1240 }
1241 }
1242 }
1243 MtrrLibInitializeMtrrMask(&MtrrValidBitsMask, &MtrrValidAddressMask);
1244 MtrrGetMemoryAttributeInVariableMtrr(
1245 MtrrValidBitsMask,
1246 MtrrValidAddressMask,
1247 VariableMtrr
1248 );
1249
1250 //
1251 // Go through the variable MTRR
1252 //
1253 VariableMtrrCount = GetVariableMtrrCount ();
1254 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
1255
1256 for (Index = 0; Index < VariableMtrrCount; Index++) {
1257 if (VariableMtrr[Index].Valid) {
1258 if (Address >= VariableMtrr[Index].BaseAddress &&
1259 Address < VariableMtrr[Index].BaseAddress+VariableMtrr[Index].Length) {
1260 TempMtrrType = VariableMtrr[Index].Type;
1261 MtrrType = MtrrPrecedence (MtrrType, TempMtrrType);
1262 }
1263 }
1264 }
1265 CacheType = GetMemoryCacheTypeFromMtrrType (MtrrType);
1266
1267 return CacheType;
1268 }
1269
1270
1271 /**
1272 This function will get the raw value in variable MTRRs
1273
1274 @param VariableSettings A buffer to hold variable MTRRs content.
1275
1276 @return The VariableSettings input pointer
1277
1278 **/
1279 MTRR_VARIABLE_SETTINGS*
1280 EFIAPI
1281 MtrrGetVariableMtrr (
1282 OUT MTRR_VARIABLE_SETTINGS *VariableSettings
1283 )
1284 {
1285 UINT32 Index;
1286 UINT32 VariableMtrrCount;
1287
1288 if (!IsMtrrSupported ()) {
1289 return VariableSettings;
1290 }
1291
1292 VariableMtrrCount = GetVariableMtrrCount ();
1293 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
1294
1295 for (Index = 0; Index < VariableMtrrCount; Index++) {
1296 VariableSettings->Mtrr[Index].Base =
1297 AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1));
1298 VariableSettings->Mtrr[Index].Mask =
1299 AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1) + 1);
1300 }
1301
1302 return VariableSettings;
1303 }
1304
1305
1306 /**
1307 Worker function setting variable MTRRs
1308
1309 @param VariableSettings A buffer to hold variable MTRRs content.
1310
1311 **/
1312 VOID
1313 MtrrSetVariableMtrrWorker (
1314 IN MTRR_VARIABLE_SETTINGS *VariableSettings
1315 )
1316 {
1317 UINT32 Index;
1318 UINT32 VariableMtrrCount;
1319
1320 VariableMtrrCount = GetVariableMtrrCount ();
1321 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
1322
1323 for (Index = 0; Index < VariableMtrrCount; Index++) {
1324 AsmWriteMsr64 (
1325 MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1),
1326 VariableSettings->Mtrr[Index].Base
1327 );
1328 AsmWriteMsr64 (
1329 MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1) + 1,
1330 VariableSettings->Mtrr[Index].Mask
1331 );
1332 }
1333 }
1334
1335
1336 /**
1337 This function sets variable MTRRs
1338
1339 @param VariableSettings A buffer to hold variable MTRRs content.
1340
1341 @return The pointer of VariableSettings
1342
1343 **/
1344 MTRR_VARIABLE_SETTINGS*
1345 EFIAPI
1346 MtrrSetVariableMtrr (
1347 IN MTRR_VARIABLE_SETTINGS *VariableSettings
1348 )
1349 {
1350 UINTN Cr4;
1351
1352 if (!IsMtrrSupported ()) {
1353 return VariableSettings;
1354 }
1355
1356 Cr4 = PreMtrrChange ();
1357 MtrrSetVariableMtrrWorker (VariableSettings);
1358 PostMtrrChange (Cr4);
1359 return VariableSettings;
1360 }
1361
1362
1363 /**
1364 This function gets the content in fixed MTRRs
1365
1366 @param FixedSettings A buffer to hold fixed Mtrrs content.
1367
1368 @retval The pointer of FixedSettings
1369
1370 **/
1371 MTRR_FIXED_SETTINGS*
1372 EFIAPI
1373 MtrrGetFixedMtrr (
1374 OUT MTRR_FIXED_SETTINGS *FixedSettings
1375 )
1376 {
1377 UINT32 Index;
1378
1379 if (!IsMtrrSupported ()) {
1380 return FixedSettings;
1381 }
1382
1383 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1384 FixedSettings->Mtrr[Index] =
1385 AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
1386 };
1387
1388 return FixedSettings;
1389 }
1390
1391 /**
1392 Worker function setting fixed MTRRs
1393
1394 @param FixedSettings A buffer to hold fixed Mtrrs content.
1395
1396 **/
1397 VOID
1398 MtrrSetFixedMtrrWorker (
1399 IN MTRR_FIXED_SETTINGS *FixedSettings
1400 )
1401 {
1402 UINT32 Index;
1403
1404 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1405 AsmWriteMsr64 (
1406 mMtrrLibFixedMtrrTable[Index].Msr,
1407 FixedSettings->Mtrr[Index]
1408 );
1409 }
1410 }
1411
1412
1413 /**
1414 This function sets fixed MTRRs
1415
1416 @param FixedSettings A buffer to hold fixed Mtrrs content.
1417
1418 @retval The pointer of FixedSettings
1419
1420 **/
1421 MTRR_FIXED_SETTINGS*
1422 EFIAPI
1423 MtrrSetFixedMtrr (
1424 IN MTRR_FIXED_SETTINGS *FixedSettings
1425 )
1426 {
1427 UINTN Cr4;
1428
1429 if (!IsMtrrSupported ()) {
1430 return FixedSettings;
1431 }
1432
1433 Cr4 = PreMtrrChange ();
1434 MtrrSetFixedMtrrWorker (FixedSettings);
1435 PostMtrrChange (Cr4);
1436
1437 return FixedSettings;
1438 }
1439
1440
1441 /**
1442 This function gets the content in all MTRRs (variable and fixed)
1443
1444 @param MtrrSetting A buffer to hold all Mtrrs content.
1445
1446 @retval the pointer of MtrrSetting
1447
1448 **/
1449 MTRR_SETTINGS *
1450 EFIAPI
1451 MtrrGetAllMtrrs (
1452 OUT MTRR_SETTINGS *MtrrSetting
1453 )
1454 {
1455 if (!IsMtrrSupported ()) {
1456 return MtrrSetting;
1457 }
1458
1459 //
1460 // Get fixed MTRRs
1461 //
1462 MtrrGetFixedMtrr (&MtrrSetting->Fixed);
1463
1464 //
1465 // Get variable MTRRs
1466 //
1467 MtrrGetVariableMtrr (&MtrrSetting->Variables);
1468
1469 //
1470 // Get MTRR_DEF_TYPE value
1471 //
1472 MtrrSetting->MtrrDefType = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE);
1473
1474 return MtrrSetting;
1475 }
1476
1477
1478 /**
1479 This function sets all MTRRs (variable and fixed)
1480
1481 @param MtrrSetting A buffer holding all MTRRs content.
1482
1483 @retval The pointer of MtrrSetting
1484
1485 **/
1486 MTRR_SETTINGS *
1487 EFIAPI
1488 MtrrSetAllMtrrs (
1489 IN MTRR_SETTINGS *MtrrSetting
1490 )
1491 {
1492 UINTN Cr4;
1493
1494 if (!IsMtrrSupported ()) {
1495 return MtrrSetting;
1496 }
1497
1498 Cr4 = PreMtrrChange ();
1499
1500 //
1501 // Set fixed MTRRs
1502 //
1503 MtrrSetFixedMtrrWorker (&MtrrSetting->Fixed);
1504
1505 //
1506 // Set variable MTRRs
1507 //
1508 MtrrSetVariableMtrrWorker (&MtrrSetting->Variables);
1509
1510 //
1511 // Set MTRR_DEF_TYPE value
1512 //
1513 AsmWriteMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);
1514
1515 PostMtrrChange (Cr4);
1516
1517 return MtrrSetting;
1518 }
1519
1520 /**
1521 This function prints all MTRRs for debugging.
1522 **/
1523 VOID
1524 EFIAPI
1525 MtrrDebugPrintAllMtrrs (
1526 VOID
1527 )
1528 {
1529 DEBUG_CODE (
1530 MTRR_SETTINGS MtrrSettings;
1531 UINTN Index;
1532 UINTN Index1;
1533 UINTN VariableMtrrCount;
1534 UINT64 Base;
1535 UINT64 Limit;
1536 UINT64 MtrrBase;
1537 UINT64 MtrrLimit;
1538 UINT64 RangeBase;
1539 UINT64 RangeLimit;
1540 UINT64 NoRangeBase;
1541 UINT64 NoRangeLimit;
1542 UINT32 RegEax;
1543 UINTN MemoryType;
1544 UINTN PreviousMemoryType;
1545 BOOLEAN Found;
1546
1547 if (!IsMtrrSupported ()) {
1548 return;
1549 }
1550
1551 DEBUG((DEBUG_CACHE, "MTRR Settings\n"));
1552 DEBUG((DEBUG_CACHE, "=============\n"));
1553
1554 MtrrGetAllMtrrs (&MtrrSettings);
1555 DEBUG((DEBUG_CACHE, "MTRR Default Type: %016lx\n", MtrrSettings.MtrrDefType));
1556 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1557 DEBUG((DEBUG_CACHE, "Fixed MTRR[%02d] : %016lx\n", Index, MtrrSettings.Fixed.Mtrr[Index]));
1558 }
1559
1560 VariableMtrrCount = GetVariableMtrrCount ();
1561 for (Index = 0; Index < VariableMtrrCount; Index++) {
1562 DEBUG((DEBUG_CACHE, "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",
1563 Index,
1564 MtrrSettings.Variables.Mtrr[Index].Base,
1565 MtrrSettings.Variables.Mtrr[Index].Mask
1566 ));
1567 }
1568 DEBUG((DEBUG_CACHE, "\n"));
1569 DEBUG((DEBUG_CACHE, "MTRR Ranges\n"));
1570 DEBUG((DEBUG_CACHE, "====================================\n"));
1571
1572 Base = 0;
1573 PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;
1574 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1575 Base = mMtrrLibFixedMtrrTable[Index].BaseAddress;
1576 for (Index1 = 0; Index1 < 8; Index1++) {
1577 MemoryType = (UINTN)(RShiftU64 (MtrrSettings.Fixed.Mtrr[Index], Index1 * 8) & 0xff);
1578 if (MemoryType > CacheWriteBack) {
1579 MemoryType = MTRR_CACHE_INVALID_TYPE;
1580 }
1581 if (MemoryType != PreviousMemoryType) {
1582 if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {
1583 DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));
1584 }
1585 PreviousMemoryType = MemoryType;
1586 DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));
1587 }
1588 Base += mMtrrLibFixedMtrrTable[Index].Length;
1589 }
1590 }
1591 DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));
1592
1593 VariableMtrrCount = GetVariableMtrrCount ();
1594
1595 Base = BASE_1MB;
1596 PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;
1597 do {
1598 MemoryType = MtrrGetMemoryAttribute (Base);
1599 if (MemoryType > CacheWriteBack) {
1600 MemoryType = MTRR_CACHE_INVALID_TYPE;
1601 }
1602
1603 if (MemoryType != PreviousMemoryType) {
1604 if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {
1605 DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));
1606 }
1607 PreviousMemoryType = MemoryType;
1608 DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));
1609 }
1610
1611 RangeBase = BASE_1MB;
1612 NoRangeBase = BASE_1MB;
1613 Limit = BIT36 - 1;
1614 AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);
1615 if (RegEax >= 0x80000008) {
1616 AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);
1617 Limit = LShiftU64 (1, RegEax & 0xff) - 1;
1618 }
1619 RangeLimit = Limit;
1620 NoRangeLimit = Limit;
1621
1622 for (Index = 0, Found = FALSE; Index < VariableMtrrCount; Index++) {
1623 if ((MtrrSettings.Variables.Mtrr[Index].Mask & BIT11) == 0) {
1624 //
1625 // If mask is not valid, then do not display range
1626 //
1627 continue;
1628 }
1629 MtrrBase = (MtrrSettings.Variables.Mtrr[Index].Base & (~(SIZE_4KB - 1)));
1630 MtrrLimit = MtrrBase + ((~(MtrrSettings.Variables.Mtrr[Index].Mask & (~(SIZE_4KB - 1)))) & Limit);
1631
1632 if (Base >= MtrrBase && Base < MtrrLimit) {
1633 Found = TRUE;
1634 }
1635
1636 if (Base >= MtrrBase && MtrrBase > RangeBase) {
1637 RangeBase = MtrrBase;
1638 }
1639 if (Base > MtrrLimit && MtrrLimit > RangeBase) {
1640 RangeBase = MtrrLimit + 1;
1641 }
1642 if (Base < MtrrBase && MtrrBase < RangeLimit) {
1643 RangeLimit = MtrrBase - 1;
1644 }
1645 if (Base < MtrrLimit && MtrrLimit <= RangeLimit) {
1646 RangeLimit = MtrrLimit;
1647 }
1648
1649 if (Base > MtrrLimit && NoRangeBase < MtrrLimit) {
1650 NoRangeBase = MtrrLimit + 1;
1651 }
1652 if (Base < MtrrBase && NoRangeLimit > MtrrBase) {
1653 NoRangeLimit = MtrrBase - 1;
1654 }
1655 }
1656
1657 if (Found) {
1658 Base = RangeLimit + 1;
1659 } else {
1660 Base = NoRangeLimit + 1;
1661 }
1662 } while (Found);
1663 DEBUG((DEBUG_CACHE, "%016lx\n\n", Base - 1));
1664 );
1665 }
1666
1667 /**
1668 Checks if MTRR is supported.
1669
1670 @retval TRUE MTRR is supported.
1671 @retval FALSE MTRR is not supported.
1672
1673 **/
1674 BOOLEAN
1675 EFIAPI
1676 IsMtrrSupported (
1677 VOID
1678 )
1679 {
1680 UINT32 RegEdx;
1681 UINT64 MtrrCap;
1682
1683 //
1684 // Check CPUID(1).EDX[12] for MTRR capability
1685 //
1686 AsmCpuid (1, NULL, NULL, NULL, &RegEdx);
1687 if (BitFieldRead32 (RegEdx, 12, 12) == 0) {
1688 return FALSE;
1689 }
1690
1691 //
1692 // Check IA32_MTRRCAP.[0..7] for number of variable MTRRs and IA32_MTRRCAP[8] for
1693 // fixed MTRRs existence. If number of variable MTRRs is zero, or fixed MTRRs do not
1694 // exist, return false.
1695 //
1696 MtrrCap = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP);
1697 if ((BitFieldRead64 (MtrrCap, 0, 7) == 0) || (BitFieldRead64 (MtrrCap, 8, 8) == 0)) {
1698 return FALSE;
1699 }
1700
1701 return TRUE;
1702 }