]> git.proxmox.com Git - mirror_edk2.git/blob - UefiCpuPkg/Library/MtrrLib/MtrrLib.c
UefiCpuPkg/MtrrLib: Add worker functions not invoke IsMtrrSupported()
[mirror_edk2.git] / UefiCpuPkg / Library / MtrrLib / MtrrLib.c
1 /** @file
2 MTRR setting library
3
4 Copyright (c) 2008 - 2015, Intel Corporation. All rights reserved.<BR>
5 This program and the accompanying materials
6 are licensed and made available under the terms and conditions of the BSD License
7 which accompanies this distribution. The full text of the license may be found at
8 http://opensource.org/licenses/bsd-license.php
9
10 THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
11 WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
12
13 **/
14
15 #include <Base.h>
16
17 #include <Library/MtrrLib.h>
18 #include <Library/BaseLib.h>
19 #include <Library/CpuLib.h>
20 #include <Library/BaseMemoryLib.h>
21 #include <Library/DebugLib.h>
22
23 //
24 // Context to save and restore when MTRRs are programmed
25 //
26 typedef struct {
27 UINTN Cr4;
28 BOOLEAN InterruptState;
29 } MTRR_CONTEXT;
30
31 //
32 // This table defines the offset, base and length of the fixed MTRRs
33 //
34 CONST FIXED_MTRR mMtrrLibFixedMtrrTable[] = {
35 {
36 MTRR_LIB_IA32_MTRR_FIX64K_00000,
37 0,
38 SIZE_64KB
39 },
40 {
41 MTRR_LIB_IA32_MTRR_FIX16K_80000,
42 0x80000,
43 SIZE_16KB
44 },
45 {
46 MTRR_LIB_IA32_MTRR_FIX16K_A0000,
47 0xA0000,
48 SIZE_16KB
49 },
50 {
51 MTRR_LIB_IA32_MTRR_FIX4K_C0000,
52 0xC0000,
53 SIZE_4KB
54 },
55 {
56 MTRR_LIB_IA32_MTRR_FIX4K_C8000,
57 0xC8000,
58 SIZE_4KB
59 },
60 {
61 MTRR_LIB_IA32_MTRR_FIX4K_D0000,
62 0xD0000,
63 SIZE_4KB
64 },
65 {
66 MTRR_LIB_IA32_MTRR_FIX4K_D8000,
67 0xD8000,
68 SIZE_4KB
69 },
70 {
71 MTRR_LIB_IA32_MTRR_FIX4K_E0000,
72 0xE0000,
73 SIZE_4KB
74 },
75 {
76 MTRR_LIB_IA32_MTRR_FIX4K_E8000,
77 0xE8000,
78 SIZE_4KB
79 },
80 {
81 MTRR_LIB_IA32_MTRR_FIX4K_F0000,
82 0xF0000,
83 SIZE_4KB
84 },
85 {
86 MTRR_LIB_IA32_MTRR_FIX4K_F8000,
87 0xF8000,
88 SIZE_4KB
89 }
90 };
91
92 //
93 // Lookup table used to print MTRRs
94 //
95 GLOBAL_REMOVE_IF_UNREFERENCED CONST CHAR8 *mMtrrMemoryCacheTypeShortName[] = {
96 "UC", // CacheUncacheable
97 "WC", // CacheWriteCombining
98 "R*", // Invalid
99 "R*", // Invalid
100 "WT", // CacheWriteThrough
101 "WP", // CacheWriteProtected
102 "WB", // CacheWriteBack
103 "R*" // Invalid
104 };
105
106 /**
107 Worker function returns the variable MTRR count for the CPU.
108
109 @return Variable MTRR count
110
111 **/
112 UINT32
113 GetVariableMtrrCountWorker (
114 VOID
115 )
116 {
117 UINT32 VariableMtrrCount;
118
119 VariableMtrrCount = (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK);
120 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
121 return VariableMtrrCount;
122 }
123
124 /**
125 Returns the variable MTRR count for the CPU.
126
127 @return Variable MTRR count
128
129 **/
130 UINT32
131 EFIAPI
132 GetVariableMtrrCount (
133 VOID
134 )
135 {
136 if (!IsMtrrSupported ()) {
137 return 0;
138 }
139 return GetVariableMtrrCountWorker ();
140 }
141
142 /**
143 Worker function returns the firmware usable variable MTRR count for the CPU.
144
145 @return Firmware usable variable MTRR count
146
147 **/
148 UINT32
149 GetFirmwareVariableMtrrCountWorker (
150 VOID
151 )
152 {
153 UINT32 VariableMtrrCount;
154 UINT32 ReservedMtrrNumber;
155
156 VariableMtrrCount = GetVariableMtrrCountWorker ();
157 ReservedMtrrNumber = PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);
158 if (VariableMtrrCount < ReservedMtrrNumber) {
159 return 0;
160 }
161
162 return VariableMtrrCount - ReservedMtrrNumber;
163 }
164
165 /**
166 Returns the firmware usable variable MTRR count for the CPU.
167
168 @return Firmware usable variable MTRR count
169
170 **/
171 UINT32
172 EFIAPI
173 GetFirmwareVariableMtrrCount (
174 VOID
175 )
176 {
177 if (!IsMtrrSupported ()) {
178 return 0;
179 }
180 return GetFirmwareVariableMtrrCountWorker ();
181 }
182
183 /**
184 Worker function returns the default MTRR cache type for the system.
185
186 @return The default MTRR cache type.
187
188 **/
189 MTRR_MEMORY_CACHE_TYPE
190 MtrrGetDefaultMemoryTypeWorker (
191 VOID
192 )
193 {
194 return (MTRR_MEMORY_CACHE_TYPE) (AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE) & 0x7);
195 }
196
197
198 /**
199 Returns the default MTRR cache type for the system.
200
201 @return The default MTRR cache type.
202
203 **/
204 MTRR_MEMORY_CACHE_TYPE
205 EFIAPI
206 MtrrGetDefaultMemoryType (
207 VOID
208 )
209 {
210 if (!IsMtrrSupported ()) {
211 return CacheUncacheable;
212 }
213 return MtrrGetDefaultMemoryTypeWorker ();
214 }
215
216 /**
217 Preparation before programming MTRR.
218
219 This function will do some preparation for programming MTRRs:
220 disable cache, invalid cache and disable MTRR caching functionality
221
222 @param[out] MtrrContext Pointer to context to save
223
224 **/
225 VOID
226 PreMtrrChange (
227 OUT MTRR_CONTEXT *MtrrContext
228 )
229 {
230 //
231 // Disable interrupts and save current interrupt state
232 //
233 MtrrContext->InterruptState = SaveAndDisableInterrupts();
234
235 //
236 // Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)
237 //
238 AsmDisableCache ();
239
240 //
241 // Save original CR4 value and clear PGE flag (Bit 7)
242 //
243 MtrrContext->Cr4 = AsmReadCr4 ();
244 AsmWriteCr4 (MtrrContext->Cr4 & (~BIT7));
245
246 //
247 // Flush all TLBs
248 //
249 CpuFlushTlb ();
250
251 //
252 // Disable MTRRs
253 //
254 AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 0);
255 }
256
257 /**
258 Cleaning up after programming MTRRs.
259
260 This function will do some clean up after programming MTRRs:
261 Flush all TLBs, re-enable caching, restore CR4.
262
263 @param[in] MtrrContext Pointer to context to restore
264
265 **/
266 VOID
267 PostMtrrChangeEnableCache (
268 IN MTRR_CONTEXT *MtrrContext
269 )
270 {
271 //
272 // Flush all TLBs
273 //
274 CpuFlushTlb ();
275
276 //
277 // Enable Normal Mode caching CD=NW=0, CD(Bit30), NW(Bit29)
278 //
279 AsmEnableCache ();
280
281 //
282 // Restore original CR4 value
283 //
284 AsmWriteCr4 (MtrrContext->Cr4);
285
286 //
287 // Restore original interrupt state
288 //
289 SetInterruptState (MtrrContext->InterruptState);
290 }
291
292 /**
293 Cleaning up after programming MTRRs.
294
295 This function will do some clean up after programming MTRRs:
296 enable MTRR caching functionality, and enable cache
297
298 @param[in] MtrrContext Pointer to context to restore
299
300 **/
301 VOID
302 PostMtrrChange (
303 IN MTRR_CONTEXT *MtrrContext
304 )
305 {
306 //
307 // Enable Cache MTRR
308 //
309 AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 3);
310
311 PostMtrrChangeEnableCache (MtrrContext);
312 }
313
314
315 /**
316 Programs fixed MTRRs registers.
317
318 @param[in] MemoryCacheType The memory type to set.
319 @param[in, out] Base The base address of memory range.
320 @param[in, out] Length The length of memory range.
321
322 @retval RETURN_SUCCESS The cache type was updated successfully
323 @retval RETURN_UNSUPPORTED The requested range or cache type was invalid
324 for the fixed MTRRs.
325
326 **/
327 RETURN_STATUS
328 ProgramFixedMtrr (
329 IN UINT64 MemoryCacheType,
330 IN OUT UINT64 *Base,
331 IN OUT UINT64 *Length
332 )
333 {
334 UINT32 MsrNum;
335 UINT32 ByteShift;
336 UINT64 TempQword;
337 UINT64 OrMask;
338 UINT64 ClearMask;
339
340 TempQword = 0;
341 OrMask = 0;
342 ClearMask = 0;
343
344 for (MsrNum = 0; MsrNum < MTRR_NUMBER_OF_FIXED_MTRR; MsrNum++) {
345 if ((*Base >= mMtrrLibFixedMtrrTable[MsrNum].BaseAddress) &&
346 (*Base <
347 (
348 mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +
349 (8 * mMtrrLibFixedMtrrTable[MsrNum].Length)
350 )
351 )
352 ) {
353 break;
354 }
355 }
356
357 if (MsrNum == MTRR_NUMBER_OF_FIXED_MTRR) {
358 return RETURN_UNSUPPORTED;
359 }
360
361 //
362 // We found the fixed MTRR to be programmed
363 //
364 for (ByteShift = 0; ByteShift < 8; ByteShift++) {
365 if (*Base ==
366 (
367 mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +
368 (ByteShift * mMtrrLibFixedMtrrTable[MsrNum].Length)
369 )
370 ) {
371 break;
372 }
373 }
374
375 if (ByteShift == 8) {
376 return RETURN_UNSUPPORTED;
377 }
378
379 for (
380 ;
381 ((ByteShift < 8) && (*Length >= mMtrrLibFixedMtrrTable[MsrNum].Length));
382 ByteShift++
383 ) {
384 OrMask |= LShiftU64 ((UINT64) MemoryCacheType, (UINT32) (ByteShift * 8));
385 ClearMask |= LShiftU64 ((UINT64) 0xFF, (UINT32) (ByteShift * 8));
386 *Length -= mMtrrLibFixedMtrrTable[MsrNum].Length;
387 *Base += mMtrrLibFixedMtrrTable[MsrNum].Length;
388 }
389
390 if (ByteShift < 8 && (*Length != 0)) {
391 return RETURN_UNSUPPORTED;
392 }
393
394 TempQword =
395 (AsmReadMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr) & ~ClearMask) | OrMask;
396 AsmWriteMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr, TempQword);
397 return RETURN_SUCCESS;
398 }
399
400
401 /**
402 Gets the attribute of variable MTRRs.
403
404 This function shadows the content of variable MTRRs into an
405 internal array: VariableMtrr.
406
407 @param[in] MtrrValidBitsMask The mask for the valid bit of the MTRR
408 @param[in] MtrrValidAddressMask The valid address mask for MTRR
409 @param[out] VariableMtrr The array to shadow variable MTRRs content
410
411 @return The return value of this paramter indicates the
412 number of MTRRs which has been used.
413
414 **/
415 UINT32
416 EFIAPI
417 MtrrGetMemoryAttributeInVariableMtrr (
418 IN UINT64 MtrrValidBitsMask,
419 IN UINT64 MtrrValidAddressMask,
420 OUT VARIABLE_MTRR *VariableMtrr
421 )
422 {
423 UINTN Index;
424 UINT32 MsrNum;
425 UINT32 UsedMtrr;
426 UINT32 FirmwareVariableMtrrCount;
427 UINT32 VariableMtrrEnd;
428
429 if (!IsMtrrSupported ()) {
430 return 0;
431 }
432
433 FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();
434 VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;
435
436 ZeroMem (VariableMtrr, sizeof (VARIABLE_MTRR) * MTRR_NUMBER_OF_VARIABLE_MTRR);
437 UsedMtrr = 0;
438
439 for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE, Index = 0;
440 (
441 (MsrNum < VariableMtrrEnd) &&
442 (Index < FirmwareVariableMtrrCount)
443 );
444 MsrNum += 2
445 ) {
446 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) != 0) {
447 VariableMtrr[Index].Msr = MsrNum;
448 VariableMtrr[Index].BaseAddress = (AsmReadMsr64 (MsrNum) &
449 MtrrValidAddressMask);
450 VariableMtrr[Index].Length = ((~(AsmReadMsr64 (MsrNum + 1) &
451 MtrrValidAddressMask)
452 ) &
453 MtrrValidBitsMask
454 ) + 1;
455 VariableMtrr[Index].Type = (AsmReadMsr64 (MsrNum) & 0x0ff);
456 VariableMtrr[Index].Valid = TRUE;
457 VariableMtrr[Index].Used = TRUE;
458 UsedMtrr = UsedMtrr + 1;
459 Index++;
460 }
461 }
462 return UsedMtrr;
463 }
464
465
466 /**
467 Checks overlap between given memory range and MTRRs.
468
469 @param[in] Start The start address of memory range.
470 @param[in] End The end address of memory range.
471 @param[in] VariableMtrr The array to shadow variable MTRRs content
472
473 @retval TRUE Overlap exists.
474 @retval FALSE No overlap.
475
476 **/
477 BOOLEAN
478 CheckMemoryAttributeOverlap (
479 IN PHYSICAL_ADDRESS Start,
480 IN PHYSICAL_ADDRESS End,
481 IN VARIABLE_MTRR *VariableMtrr
482 )
483 {
484 UINT32 Index;
485
486 for (Index = 0; Index < 6; Index++) {
487 if (
488 VariableMtrr[Index].Valid &&
489 !(
490 (Start > (VariableMtrr[Index].BaseAddress +
491 VariableMtrr[Index].Length - 1)
492 ) ||
493 (End < VariableMtrr[Index].BaseAddress)
494 )
495 ) {
496 return TRUE;
497 }
498 }
499
500 return FALSE;
501 }
502
503
504 /**
505 Marks a variable MTRR as non-valid.
506
507 @param[in] Index The index of the array VariableMtrr to be invalidated
508 @param[in] VariableMtrr The array to shadow variable MTRRs content
509 @param[out] UsedMtrr The number of MTRRs which has already been used
510
511 **/
512 VOID
513 InvalidateShadowMtrr (
514 IN UINTN Index,
515 IN VARIABLE_MTRR *VariableMtrr,
516 OUT UINT32 *UsedMtrr
517 )
518 {
519 VariableMtrr[Index].Valid = FALSE;
520 *UsedMtrr = *UsedMtrr - 1;
521 }
522
523
524 /**
525 Combines memory attributes.
526
527 If overlap exists between given memory range and MTRRs, try to combine them.
528
529 @param[in] Attributes The memory type to set.
530 @param[in, out] Base The base address of memory range.
531 @param[in, out] Length The length of memory range.
532 @param[in] VariableMtrr The array to shadow variable MTRRs content
533 @param[in, out] UsedMtrr The number of MTRRs which has already been used
534 @param[out] OverwriteExistingMtrr Returns whether an existing MTRR was used
535
536 @retval EFI_SUCCESS Memory region successfully combined.
537 @retval EFI_ACCESS_DENIED Memory region cannot be combined.
538
539 **/
540 RETURN_STATUS
541 CombineMemoryAttribute (
542 IN UINT64 Attributes,
543 IN OUT UINT64 *Base,
544 IN OUT UINT64 *Length,
545 IN VARIABLE_MTRR *VariableMtrr,
546 IN OUT UINT32 *UsedMtrr,
547 OUT BOOLEAN *OverwriteExistingMtrr
548 )
549 {
550 UINT32 Index;
551 UINT64 CombineStart;
552 UINT64 CombineEnd;
553 UINT64 MtrrEnd;
554 UINT64 EndAddress;
555 UINT32 FirmwareVariableMtrrCount;
556 BOOLEAN CoveredByExistingMtrr;
557
558 FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();
559
560 *OverwriteExistingMtrr = FALSE;
561 CoveredByExistingMtrr = FALSE;
562 EndAddress = *Base +*Length - 1;
563
564 for (Index = 0; Index < FirmwareVariableMtrrCount; Index++) {
565
566 MtrrEnd = VariableMtrr[Index].BaseAddress + VariableMtrr[Index].Length - 1;
567 if (
568 !VariableMtrr[Index].Valid ||
569 (
570 *Base > (MtrrEnd) ||
571 (EndAddress < VariableMtrr[Index].BaseAddress)
572 )
573 ) {
574 continue;
575 }
576
577 //
578 // Combine same attribute MTRR range
579 //
580 if (Attributes == VariableMtrr[Index].Type) {
581 //
582 // if the MTRR range contain the request range, set a flag, then continue to
583 // invalidate any MTRR of the same request range with higher priority cache type.
584 //
585 if (VariableMtrr[Index].BaseAddress <= *Base && MtrrEnd >= EndAddress) {
586 CoveredByExistingMtrr = TRUE;
587 continue;
588 }
589 //
590 // invalid this MTRR, and program the combine range
591 //
592 CombineStart =
593 (*Base) < VariableMtrr[Index].BaseAddress ?
594 (*Base) :
595 VariableMtrr[Index].BaseAddress;
596 CombineEnd = EndAddress > MtrrEnd ? EndAddress : MtrrEnd;
597
598 //
599 // Record the MTRR usage status in VariableMtrr array.
600 //
601 InvalidateShadowMtrr (Index, VariableMtrr, UsedMtrr);
602 *Base = CombineStart;
603 *Length = CombineEnd - CombineStart + 1;
604 EndAddress = CombineEnd;
605 *OverwriteExistingMtrr = TRUE;
606 continue;
607 } else {
608 //
609 // The cache type is different, but the range is convered by one MTRR
610 //
611 if (VariableMtrr[Index].BaseAddress == *Base && MtrrEnd == EndAddress) {
612 InvalidateShadowMtrr (Index, VariableMtrr, UsedMtrr);
613 continue;
614 }
615
616 }
617
618 if ((Attributes== MTRR_CACHE_WRITE_THROUGH &&
619 VariableMtrr[Index].Type == MTRR_CACHE_WRITE_BACK) ||
620 (Attributes == MTRR_CACHE_WRITE_BACK &&
621 VariableMtrr[Index].Type == MTRR_CACHE_WRITE_THROUGH) ||
622 (Attributes == MTRR_CACHE_UNCACHEABLE) ||
623 (VariableMtrr[Index].Type == MTRR_CACHE_UNCACHEABLE)
624 ) {
625 *OverwriteExistingMtrr = TRUE;
626 continue;
627 }
628 //
629 // Other type memory overlap is invalid
630 //
631 return RETURN_ACCESS_DENIED;
632 }
633
634 if (CoveredByExistingMtrr) {
635 *Length = 0;
636 }
637
638 return RETURN_SUCCESS;
639 }
640
641
642 /**
643 Calculates the maximum value which is a power of 2, but less the MemoryLength.
644
645 @param[in] MemoryLength The number to pass in.
646
647 @return The maximum value which is align to power of 2 and less the MemoryLength
648
649 **/
650 UINT64
651 Power2MaxMemory (
652 IN UINT64 MemoryLength
653 )
654 {
655 UINT64 Result;
656
657 if (RShiftU64 (MemoryLength, 32) != 0) {
658 Result = LShiftU64 (
659 (UINT64) GetPowerOfTwo32 (
660 (UINT32) RShiftU64 (MemoryLength, 32)
661 ),
662 32
663 );
664 } else {
665 Result = (UINT64) GetPowerOfTwo32 ((UINT32) MemoryLength);
666 }
667
668 return Result;
669 }
670
671
672 /**
673 Determines the MTRR numbers used to program a memory range.
674
675 This function first checks the alignment of the base address.
676 If the alignment of the base address <= Length, cover the memory range
677 (BaseAddress, alignment) by a MTRR, then BaseAddress += alignment and
678 Length -= alignment. Repeat the step until alignment > Length.
679
680 Then this function determines which direction of programming the variable
681 MTRRs for the remaining length will use fewer MTRRs.
682
683 @param[in] BaseAddress Length of Memory to program MTRR
684 @param[in] Length Length of Memory to program MTRR
685 @param[in] MtrrNumber Pointer to the number of necessary MTRRs
686
687 @retval TRUE Positive direction is better.
688 FALSE Negative direction is better.
689
690 **/
691 BOOLEAN
692 GetMtrrNumberAndDirection (
693 IN UINT64 BaseAddress,
694 IN UINT64 Length,
695 IN UINTN *MtrrNumber
696 )
697 {
698 UINT64 TempQword;
699 UINT64 Alignment;
700 UINT32 Positive;
701 UINT32 Subtractive;
702
703 *MtrrNumber = 0;
704
705 if (BaseAddress != 0) {
706 do {
707 //
708 // Calculate the alignment of the base address.
709 //
710 Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));
711
712 if (Alignment > Length) {
713 break;
714 }
715
716 (*MtrrNumber)++;
717 BaseAddress += Alignment;
718 Length -= Alignment;
719 } while (TRUE);
720
721 if (Length == 0) {
722 return TRUE;
723 }
724 }
725
726 TempQword = Length;
727 Positive = 0;
728 Subtractive = 0;
729
730 do {
731 TempQword -= Power2MaxMemory (TempQword);
732 Positive++;
733 } while (TempQword != 0);
734
735 TempQword = Power2MaxMemory (LShiftU64 (Length, 1)) - Length;
736 Subtractive++;
737 do {
738 TempQword -= Power2MaxMemory (TempQword);
739 Subtractive++;
740 } while (TempQword != 0);
741
742 if (Positive <= Subtractive) {
743 *MtrrNumber += Positive;
744 return TRUE;
745 } else {
746 *MtrrNumber += Subtractive;
747 return FALSE;
748 }
749 }
750
751 /**
752 Invalid variable MTRRs according to the value in the shadow array.
753
754 This function programs MTRRs according to the values specified
755 in the shadow array.
756
757 @param[in, out] VariableMtrr Shadow of variable MTRR contents
758
759 **/
760 VOID
761 InvalidateMtrr (
762 IN OUT VARIABLE_MTRR *VariableMtrr
763 )
764 {
765 UINTN Index;
766 UINTN VariableMtrrCount;
767 MTRR_CONTEXT MtrrContext;
768
769 PreMtrrChange (&MtrrContext);
770 Index = 0;
771 VariableMtrrCount = GetVariableMtrrCount ();
772 while (Index < VariableMtrrCount) {
773 if (!VariableMtrr[Index].Valid && VariableMtrr[Index].Used) {
774 AsmWriteMsr64 (VariableMtrr[Index].Msr, 0);
775 AsmWriteMsr64 (VariableMtrr[Index].Msr + 1, 0);
776 VariableMtrr[Index].Used = FALSE;
777 }
778 Index ++;
779 }
780 PostMtrrChange (&MtrrContext);
781 }
782
783
784 /**
785 Programs variable MTRRs
786
787 This function programs variable MTRRs
788
789 @param[in] MtrrNumber Index of MTRR to program.
790 @param[in] BaseAddress Base address of memory region.
791 @param[in] Length Length of memory region.
792 @param[in] MemoryCacheType Memory type to set.
793 @param[in] MtrrValidAddressMask The valid address mask for MTRR
794
795 **/
796 VOID
797 ProgramVariableMtrr (
798 IN UINTN MtrrNumber,
799 IN PHYSICAL_ADDRESS BaseAddress,
800 IN UINT64 Length,
801 IN UINT64 MemoryCacheType,
802 IN UINT64 MtrrValidAddressMask
803 )
804 {
805 UINT64 TempQword;
806 MTRR_CONTEXT MtrrContext;
807
808 PreMtrrChange (&MtrrContext);
809
810 //
811 // MTRR Physical Base
812 //
813 TempQword = (BaseAddress & MtrrValidAddressMask) | MemoryCacheType;
814 AsmWriteMsr64 ((UINT32) MtrrNumber, TempQword);
815
816 //
817 // MTRR Physical Mask
818 //
819 TempQword = ~(Length - 1);
820 AsmWriteMsr64 (
821 (UINT32) (MtrrNumber + 1),
822 (TempQword & MtrrValidAddressMask) | MTRR_LIB_CACHE_MTRR_ENABLED
823 );
824
825 PostMtrrChange (&MtrrContext);
826 }
827
828
829 /**
830 Converts the Memory attribute value to MTRR_MEMORY_CACHE_TYPE.
831
832 @param[in] MtrrType MTRR memory type
833
834 @return The enum item in MTRR_MEMORY_CACHE_TYPE
835
836 **/
837 MTRR_MEMORY_CACHE_TYPE
838 GetMemoryCacheTypeFromMtrrType (
839 IN UINT64 MtrrType
840 )
841 {
842 switch (MtrrType) {
843 case MTRR_CACHE_UNCACHEABLE:
844 return CacheUncacheable;
845 case MTRR_CACHE_WRITE_COMBINING:
846 return CacheWriteCombining;
847 case MTRR_CACHE_WRITE_THROUGH:
848 return CacheWriteThrough;
849 case MTRR_CACHE_WRITE_PROTECTED:
850 return CacheWriteProtected;
851 case MTRR_CACHE_WRITE_BACK:
852 return CacheWriteBack;
853 default:
854 //
855 // MtrrType is MTRR_CACHE_INVALID_TYPE, that means
856 // no MTRR covers the range
857 //
858 return MtrrGetDefaultMemoryType ();
859 }
860 }
861
862 /**
863 Initializes the valid bits mask and valid address mask for MTRRs.
864
865 This function initializes the valid bits mask and valid address mask for MTRRs.
866
867 @param[out] MtrrValidBitsMask The mask for the valid bit of the MTRR
868 @param[out] MtrrValidAddressMask The valid address mask for the MTRR
869
870 **/
871 VOID
872 MtrrLibInitializeMtrrMask (
873 OUT UINT64 *MtrrValidBitsMask,
874 OUT UINT64 *MtrrValidAddressMask
875 )
876 {
877 UINT32 RegEax;
878 UINT8 PhysicalAddressBits;
879
880 AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);
881
882 if (RegEax >= 0x80000008) {
883 AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);
884
885 PhysicalAddressBits = (UINT8) RegEax;
886
887 *MtrrValidBitsMask = LShiftU64 (1, PhysicalAddressBits) - 1;
888 *MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;
889 } else {
890 *MtrrValidBitsMask = MTRR_LIB_MSR_VALID_MASK;
891 *MtrrValidAddressMask = MTRR_LIB_CACHE_VALID_ADDRESS;
892 }
893 }
894
895
896 /**
897 Determines the real attribute of a memory range.
898
899 This function is to arbitrate the real attribute of the memory when
900 there are 2 MTRRs covers the same memory range. For further details,
901 please refer the IA32 Software Developer's Manual, Volume 3,
902 Section 10.11.4.1.
903
904 @param[in] MtrrType1 The first kind of Memory type
905 @param[in] MtrrType2 The second kind of memory type
906
907 **/
908 UINT64
909 MtrrPrecedence (
910 IN UINT64 MtrrType1,
911 IN UINT64 MtrrType2
912 )
913 {
914 UINT64 MtrrType;
915
916 MtrrType = MTRR_CACHE_INVALID_TYPE;
917 switch (MtrrType1) {
918 case MTRR_CACHE_UNCACHEABLE:
919 MtrrType = MTRR_CACHE_UNCACHEABLE;
920 break;
921 case MTRR_CACHE_WRITE_COMBINING:
922 if (
923 MtrrType2==MTRR_CACHE_WRITE_COMBINING ||
924 MtrrType2==MTRR_CACHE_UNCACHEABLE
925 ) {
926 MtrrType = MtrrType2;
927 }
928 break;
929 case MTRR_CACHE_WRITE_THROUGH:
930 if (
931 MtrrType2==MTRR_CACHE_WRITE_THROUGH ||
932 MtrrType2==MTRR_CACHE_WRITE_BACK
933 ) {
934 MtrrType = MTRR_CACHE_WRITE_THROUGH;
935 } else if(MtrrType2==MTRR_CACHE_UNCACHEABLE) {
936 MtrrType = MTRR_CACHE_UNCACHEABLE;
937 }
938 break;
939 case MTRR_CACHE_WRITE_PROTECTED:
940 if (MtrrType2 == MTRR_CACHE_WRITE_PROTECTED ||
941 MtrrType2 == MTRR_CACHE_UNCACHEABLE) {
942 MtrrType = MtrrType2;
943 }
944 break;
945 case MTRR_CACHE_WRITE_BACK:
946 if (
947 MtrrType2== MTRR_CACHE_UNCACHEABLE ||
948 MtrrType2==MTRR_CACHE_WRITE_THROUGH ||
949 MtrrType2== MTRR_CACHE_WRITE_BACK
950 ) {
951 MtrrType = MtrrType2;
952 }
953 break;
954 case MTRR_CACHE_INVALID_TYPE:
955 MtrrType = MtrrType2;
956 break;
957 default:
958 break;
959 }
960
961 if (MtrrType2 == MTRR_CACHE_INVALID_TYPE) {
962 MtrrType = MtrrType1;
963 }
964 return MtrrType;
965 }
966
967
968 /**
969 This function attempts to set the attributes for a memory range.
970
971 @param[in] BaseAddress The physical address that is the start
972 address of a memory region.
973 @param[in] Length The size in bytes of the memory region.
974 @param[in] Attribute The bit mask of attributes to set for the
975 memory region.
976
977 @retval RETURN_SUCCESS The attributes were set for the memory
978 region.
979 @retval RETURN_INVALID_PARAMETER Length is zero.
980 @retval RETURN_UNSUPPORTED The processor does not support one or
981 more bytes of the memory resource range
982 specified by BaseAddress and Length.
983 @retval RETURN_UNSUPPORTED The bit mask of attributes is not support
984 for the memory resource range specified
985 by BaseAddress and Length.
986 @retval RETURN_ACCESS_DENIED The attributes for the memory resource
987 range specified by BaseAddress and Length
988 cannot be modified.
989 @retval RETURN_OUT_OF_RESOURCES There are not enough system resources to
990 modify the attributes of the memory
991 resource range.
992
993 **/
994 RETURN_STATUS
995 EFIAPI
996 MtrrSetMemoryAttribute (
997 IN PHYSICAL_ADDRESS BaseAddress,
998 IN UINT64 Length,
999 IN MTRR_MEMORY_CACHE_TYPE Attribute
1000 )
1001 {
1002 UINT64 TempQword;
1003 RETURN_STATUS Status;
1004 UINT64 MemoryType;
1005 UINT64 Alignment;
1006 BOOLEAN OverLap;
1007 BOOLEAN Positive;
1008 UINT32 MsrNum;
1009 UINTN MtrrNumber;
1010 VARIABLE_MTRR VariableMtrr[MTRR_NUMBER_OF_VARIABLE_MTRR];
1011 UINT32 UsedMtrr;
1012 UINT64 MtrrValidBitsMask;
1013 UINT64 MtrrValidAddressMask;
1014 BOOLEAN OverwriteExistingMtrr;
1015 UINT32 FirmwareVariableMtrrCount;
1016 UINT32 VariableMtrrEnd;
1017 MTRR_CONTEXT MtrrContext;
1018
1019 DEBUG((DEBUG_CACHE, "MtrrSetMemoryAttribute() %a:%016lx-%016lx\n", mMtrrMemoryCacheTypeShortName[Attribute], BaseAddress, Length));
1020
1021 if (!IsMtrrSupported ()) {
1022 Status = RETURN_UNSUPPORTED;
1023 goto Done;
1024 }
1025
1026 FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();
1027 VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;
1028
1029 MtrrLibInitializeMtrrMask(&MtrrValidBitsMask, &MtrrValidAddressMask);
1030
1031 TempQword = 0;
1032 MemoryType = (UINT64)Attribute;
1033 OverwriteExistingMtrr = FALSE;
1034
1035 //
1036 // Check for an invalid parameter
1037 //
1038 if (Length == 0) {
1039 Status = RETURN_INVALID_PARAMETER;
1040 goto Done;
1041 }
1042
1043 if (
1044 (BaseAddress & ~MtrrValidAddressMask) != 0 ||
1045 (Length & ~MtrrValidAddressMask) != 0
1046 ) {
1047 Status = RETURN_UNSUPPORTED;
1048 goto Done;
1049 }
1050
1051 //
1052 // Check if Fixed MTRR
1053 //
1054 Status = RETURN_SUCCESS;
1055 while ((BaseAddress < BASE_1MB) && (Length > 0) && Status == RETURN_SUCCESS) {
1056 PreMtrrChange (&MtrrContext);
1057 Status = ProgramFixedMtrr (MemoryType, &BaseAddress, &Length);
1058 PostMtrrChange (&MtrrContext);
1059 if (RETURN_ERROR (Status)) {
1060 goto Done;
1061 }
1062 }
1063
1064 if (Length == 0) {
1065 //
1066 // A Length of 0 can only make sense for fixed MTTR ranges.
1067 // Since we just handled the fixed MTRRs, we can skip the
1068 // variable MTRR section.
1069 //
1070 goto Done;
1071 }
1072
1073 //
1074 // Since memory ranges below 1MB will be overridden by the fixed MTRRs,
1075 // we can set the base to 0 to save variable MTRRs.
1076 //
1077 if (BaseAddress == BASE_1MB) {
1078 BaseAddress = 0;
1079 Length += SIZE_1MB;
1080 }
1081
1082 //
1083 // Check for overlap
1084 //
1085 UsedMtrr = MtrrGetMemoryAttributeInVariableMtrr (MtrrValidBitsMask, MtrrValidAddressMask, VariableMtrr);
1086 OverLap = CheckMemoryAttributeOverlap (BaseAddress, BaseAddress + Length - 1, VariableMtrr);
1087 if (OverLap) {
1088 Status = CombineMemoryAttribute (MemoryType, &BaseAddress, &Length, VariableMtrr, &UsedMtrr, &OverwriteExistingMtrr);
1089 if (RETURN_ERROR (Status)) {
1090 goto Done;
1091 }
1092
1093 if (Length == 0) {
1094 //
1095 // Combined successfully, invalidate the now-unused MTRRs
1096 //
1097 InvalidateMtrr(VariableMtrr);
1098 Status = RETURN_SUCCESS;
1099 goto Done;
1100 }
1101 }
1102
1103 //
1104 // The memory type is the same with the type specified by
1105 // MTRR_LIB_IA32_MTRR_DEF_TYPE.
1106 //
1107 if ((!OverwriteExistingMtrr) && (Attribute == MtrrGetDefaultMemoryType ())) {
1108 //
1109 // Invalidate the now-unused MTRRs
1110 //
1111 InvalidateMtrr(VariableMtrr);
1112 goto Done;
1113 }
1114
1115 Positive = GetMtrrNumberAndDirection (BaseAddress, Length, &MtrrNumber);
1116
1117 if ((UsedMtrr + MtrrNumber) > FirmwareVariableMtrrCount) {
1118 Status = RETURN_OUT_OF_RESOURCES;
1119 goto Done;
1120 }
1121
1122 //
1123 // Invalidate the now-unused MTRRs
1124 //
1125 InvalidateMtrr(VariableMtrr);
1126
1127 //
1128 // Find first unused MTRR
1129 //
1130 for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;
1131 MsrNum < VariableMtrrEnd;
1132 MsrNum += 2
1133 ) {
1134 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1135 break;
1136 }
1137 }
1138
1139 if (BaseAddress != 0) {
1140 do {
1141 //
1142 // Calculate the alignment of the base address.
1143 //
1144 Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));
1145
1146 if (Alignment > Length) {
1147 break;
1148 }
1149
1150 //
1151 // Find unused MTRR
1152 //
1153 for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {
1154 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1155 break;
1156 }
1157 }
1158
1159 ProgramVariableMtrr (
1160 MsrNum,
1161 BaseAddress,
1162 Alignment,
1163 MemoryType,
1164 MtrrValidAddressMask
1165 );
1166 BaseAddress += Alignment;
1167 Length -= Alignment;
1168 } while (TRUE);
1169
1170 if (Length == 0) {
1171 goto Done;
1172 }
1173 }
1174
1175 TempQword = Length;
1176
1177 if (!Positive) {
1178 Length = Power2MaxMemory (LShiftU64 (TempQword, 1));
1179
1180 //
1181 // Find unused MTRR
1182 //
1183 for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {
1184 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1185 break;
1186 }
1187 }
1188
1189 ProgramVariableMtrr (
1190 MsrNum,
1191 BaseAddress,
1192 Length,
1193 MemoryType,
1194 MtrrValidAddressMask
1195 );
1196 BaseAddress += Length;
1197 TempQword = Length - TempQword;
1198 MemoryType = MTRR_CACHE_UNCACHEABLE;
1199 }
1200
1201 do {
1202 //
1203 // Find unused MTRR
1204 //
1205 for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {
1206 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1207 break;
1208 }
1209 }
1210
1211 Length = Power2MaxMemory (TempQword);
1212 if (!Positive) {
1213 BaseAddress -= Length;
1214 }
1215
1216 ProgramVariableMtrr (
1217 MsrNum,
1218 BaseAddress,
1219 Length,
1220 MemoryType,
1221 MtrrValidAddressMask
1222 );
1223
1224 if (Positive) {
1225 BaseAddress += Length;
1226 }
1227 TempQword -= Length;
1228
1229 } while (TempQword > 0);
1230
1231 Done:
1232 DEBUG((DEBUG_CACHE, " Status = %r\n", Status));
1233 if (!RETURN_ERROR (Status)) {
1234 MtrrDebugPrintAllMtrrs ();
1235 }
1236
1237 return Status;
1238 }
1239
1240
1241 /**
1242 This function will get the memory cache type of the specific address.
1243
1244 This function is mainly for debug purpose.
1245
1246 @param[in] Address The specific address
1247
1248 @return Memory cache type of the specific address
1249
1250 **/
1251 MTRR_MEMORY_CACHE_TYPE
1252 EFIAPI
1253 MtrrGetMemoryAttribute (
1254 IN PHYSICAL_ADDRESS Address
1255 )
1256 {
1257 UINT64 TempQword;
1258 UINTN Index;
1259 UINTN SubIndex;
1260 UINT64 MtrrType;
1261 UINT64 TempMtrrType;
1262 MTRR_MEMORY_CACHE_TYPE CacheType;
1263 VARIABLE_MTRR VariableMtrr[MTRR_NUMBER_OF_VARIABLE_MTRR];
1264 UINT64 MtrrValidBitsMask;
1265 UINT64 MtrrValidAddressMask;
1266 UINTN VariableMtrrCount;
1267
1268 if (!IsMtrrSupported ()) {
1269 return CacheUncacheable;
1270 }
1271
1272 //
1273 // Check if MTRR is enabled, if not, return UC as attribute
1274 //
1275 TempQword = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE);
1276 MtrrType = MTRR_CACHE_INVALID_TYPE;
1277
1278 if ((TempQword & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1279 return CacheUncacheable;
1280 }
1281
1282 //
1283 // If address is less than 1M, then try to go through the fixed MTRR
1284 //
1285 if (Address < BASE_1MB) {
1286 if ((TempQword & MTRR_LIB_CACHE_FIXED_MTRR_ENABLED) != 0) {
1287 //
1288 // Go through the fixed MTRR
1289 //
1290 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1291 if (Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress &&
1292 Address < (
1293 mMtrrLibFixedMtrrTable[Index].BaseAddress +
1294 (mMtrrLibFixedMtrrTable[Index].Length * 8)
1295 )
1296 ) {
1297 SubIndex =
1298 ((UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /
1299 mMtrrLibFixedMtrrTable[Index].Length;
1300 TempQword = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
1301 MtrrType = RShiftU64 (TempQword, SubIndex * 8) & 0xFF;
1302 return GetMemoryCacheTypeFromMtrrType (MtrrType);
1303 }
1304 }
1305 }
1306 }
1307 MtrrLibInitializeMtrrMask(&MtrrValidBitsMask, &MtrrValidAddressMask);
1308 MtrrGetMemoryAttributeInVariableMtrr(
1309 MtrrValidBitsMask,
1310 MtrrValidAddressMask,
1311 VariableMtrr
1312 );
1313
1314 //
1315 // Go through the variable MTRR
1316 //
1317 VariableMtrrCount = GetVariableMtrrCount ();
1318 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
1319
1320 for (Index = 0; Index < VariableMtrrCount; Index++) {
1321 if (VariableMtrr[Index].Valid) {
1322 if (Address >= VariableMtrr[Index].BaseAddress &&
1323 Address < VariableMtrr[Index].BaseAddress+VariableMtrr[Index].Length) {
1324 TempMtrrType = VariableMtrr[Index].Type;
1325 MtrrType = MtrrPrecedence (MtrrType, TempMtrrType);
1326 }
1327 }
1328 }
1329 CacheType = GetMemoryCacheTypeFromMtrrType (MtrrType);
1330
1331 return CacheType;
1332 }
1333
1334
1335 /**
1336 Worker function will get the raw value in variable MTRRs
1337
1338 @param[out] VariableSettings A buffer to hold variable MTRRs content.
1339
1340 @return The VariableSettings input pointer
1341
1342 **/
1343 MTRR_VARIABLE_SETTINGS*
1344 MtrrGetVariableMtrrWorker (
1345 OUT MTRR_VARIABLE_SETTINGS *VariableSettings
1346 )
1347 {
1348 UINT32 Index;
1349 UINT32 VariableMtrrCount;
1350
1351 VariableMtrrCount = GetVariableMtrrCount ();
1352 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
1353
1354 for (Index = 0; Index < VariableMtrrCount; Index++) {
1355 VariableSettings->Mtrr[Index].Base =
1356 AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1));
1357 VariableSettings->Mtrr[Index].Mask =
1358 AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1) + 1);
1359 }
1360
1361 return VariableSettings;
1362 }
1363
1364 /**
1365 This function will get the raw value in variable MTRRs
1366
1367 @param[out] VariableSettings A buffer to hold variable MTRRs content.
1368
1369 @return The VariableSettings input pointer
1370
1371 **/
1372 MTRR_VARIABLE_SETTINGS*
1373 EFIAPI
1374 MtrrGetVariableMtrr (
1375 OUT MTRR_VARIABLE_SETTINGS *VariableSettings
1376 )
1377 {
1378 if (!IsMtrrSupported ()) {
1379 return VariableSettings;
1380 }
1381
1382 return MtrrGetVariableMtrrWorker (
1383 VariableSettings
1384 );
1385 }
1386
1387
1388 /**
1389 Worker function setting variable MTRRs
1390
1391 @param[in] VariableSettings A buffer to hold variable MTRRs content.
1392
1393 **/
1394 VOID
1395 MtrrSetVariableMtrrWorker (
1396 IN MTRR_VARIABLE_SETTINGS *VariableSettings
1397 )
1398 {
1399 UINT32 Index;
1400 UINT32 VariableMtrrCount;
1401
1402 VariableMtrrCount = GetVariableMtrrCount ();
1403 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
1404
1405 for (Index = 0; Index < VariableMtrrCount; Index++) {
1406 AsmWriteMsr64 (
1407 MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1),
1408 VariableSettings->Mtrr[Index].Base
1409 );
1410 AsmWriteMsr64 (
1411 MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1) + 1,
1412 VariableSettings->Mtrr[Index].Mask
1413 );
1414 }
1415 }
1416
1417
1418 /**
1419 This function sets variable MTRRs
1420
1421 @param[in] VariableSettings A buffer to hold variable MTRRs content.
1422
1423 @return The pointer of VariableSettings
1424
1425 **/
1426 MTRR_VARIABLE_SETTINGS*
1427 EFIAPI
1428 MtrrSetVariableMtrr (
1429 IN MTRR_VARIABLE_SETTINGS *VariableSettings
1430 )
1431 {
1432 MTRR_CONTEXT MtrrContext;
1433
1434 if (!IsMtrrSupported ()) {
1435 return VariableSettings;
1436 }
1437
1438 PreMtrrChange (&MtrrContext);
1439 MtrrSetVariableMtrrWorker (VariableSettings);
1440 PostMtrrChange (&MtrrContext);
1441 return VariableSettings;
1442 }
1443
1444 /**
1445 Worker function gets the content in fixed MTRRs
1446
1447 @param[out] FixedSettings A buffer to hold fixed MTRRs content.
1448
1449 @retval The pointer of FixedSettings
1450
1451 **/
1452 MTRR_FIXED_SETTINGS*
1453 MtrrGetFixedMtrrWorker (
1454 OUT MTRR_FIXED_SETTINGS *FixedSettings
1455 )
1456 {
1457 UINT32 Index;
1458
1459 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1460 FixedSettings->Mtrr[Index] =
1461 AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
1462 }
1463
1464 return FixedSettings;
1465 }
1466
1467
1468 /**
1469 This function gets the content in fixed MTRRs
1470
1471 @param[out] FixedSettings A buffer to hold fixed MTRRs content.
1472
1473 @retval The pointer of FixedSettings
1474
1475 **/
1476 MTRR_FIXED_SETTINGS*
1477 EFIAPI
1478 MtrrGetFixedMtrr (
1479 OUT MTRR_FIXED_SETTINGS *FixedSettings
1480 )
1481 {
1482 if (!IsMtrrSupported ()) {
1483 return FixedSettings;
1484 }
1485
1486 return MtrrGetFixedMtrrWorker (FixedSettings);
1487 }
1488
1489 /**
1490 Worker function setting fixed MTRRs
1491
1492 @param[in] FixedSettings A buffer to hold fixed Mtrrs content.
1493
1494 **/
1495 VOID
1496 MtrrSetFixedMtrrWorker (
1497 IN MTRR_FIXED_SETTINGS *FixedSettings
1498 )
1499 {
1500 UINT32 Index;
1501
1502 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1503 AsmWriteMsr64 (
1504 mMtrrLibFixedMtrrTable[Index].Msr,
1505 FixedSettings->Mtrr[Index]
1506 );
1507 }
1508 }
1509
1510
1511 /**
1512 This function sets fixed MTRRs
1513
1514 @param[in] FixedSettings A buffer to hold fixed Mtrrs content.
1515
1516 @retval The pointer of FixedSettings
1517
1518 **/
1519 MTRR_FIXED_SETTINGS*
1520 EFIAPI
1521 MtrrSetFixedMtrr (
1522 IN MTRR_FIXED_SETTINGS *FixedSettings
1523 )
1524 {
1525 MTRR_CONTEXT MtrrContext;
1526
1527 if (!IsMtrrSupported ()) {
1528 return FixedSettings;
1529 }
1530
1531 PreMtrrChange (&MtrrContext);
1532 MtrrSetFixedMtrrWorker (FixedSettings);
1533 PostMtrrChange (&MtrrContext);
1534
1535 return FixedSettings;
1536 }
1537
1538
1539 /**
1540 This function gets the content in all MTRRs (variable and fixed)
1541
1542 @param[out] MtrrSetting A buffer to hold all Mtrrs content.
1543
1544 @retval the pointer of MtrrSetting
1545
1546 **/
1547 MTRR_SETTINGS *
1548 EFIAPI
1549 MtrrGetAllMtrrs (
1550 OUT MTRR_SETTINGS *MtrrSetting
1551 )
1552 {
1553 if (!IsMtrrSupported ()) {
1554 return MtrrSetting;
1555 }
1556
1557 //
1558 // Get fixed MTRRs
1559 //
1560 MtrrGetFixedMtrr (&MtrrSetting->Fixed);
1561
1562 //
1563 // Get variable MTRRs
1564 //
1565 MtrrGetVariableMtrr (&MtrrSetting->Variables);
1566
1567 //
1568 // Get MTRR_DEF_TYPE value
1569 //
1570 MtrrSetting->MtrrDefType = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE);
1571
1572 return MtrrSetting;
1573 }
1574
1575
1576 /**
1577 This function sets all MTRRs (variable and fixed)
1578
1579 @param[in] MtrrSetting A buffer holding all MTRRs content.
1580
1581 @retval The pointer of MtrrSetting
1582
1583 **/
1584 MTRR_SETTINGS *
1585 EFIAPI
1586 MtrrSetAllMtrrs (
1587 IN MTRR_SETTINGS *MtrrSetting
1588 )
1589 {
1590 MTRR_CONTEXT MtrrContext;
1591
1592 if (!IsMtrrSupported ()) {
1593 return MtrrSetting;
1594 }
1595
1596 PreMtrrChange (&MtrrContext);
1597
1598 //
1599 // Set fixed MTRRs
1600 //
1601 MtrrSetFixedMtrrWorker (&MtrrSetting->Fixed);
1602
1603 //
1604 // Set variable MTRRs
1605 //
1606 MtrrSetVariableMtrrWorker (&MtrrSetting->Variables);
1607
1608 //
1609 // Set MTRR_DEF_TYPE value
1610 //
1611 AsmWriteMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);
1612
1613 PostMtrrChangeEnableCache (&MtrrContext);
1614
1615 return MtrrSetting;
1616 }
1617
1618 /**
1619 This function prints all MTRRs for debugging.
1620 **/
1621 VOID
1622 EFIAPI
1623 MtrrDebugPrintAllMtrrs (
1624 VOID
1625 )
1626 {
1627 DEBUG_CODE (
1628 MTRR_SETTINGS MtrrSettings;
1629 UINTN Index;
1630 UINTN Index1;
1631 UINTN VariableMtrrCount;
1632 UINT64 Base;
1633 UINT64 Limit;
1634 UINT64 MtrrBase;
1635 UINT64 MtrrLimit;
1636 UINT64 RangeBase;
1637 UINT64 RangeLimit;
1638 UINT64 NoRangeBase;
1639 UINT64 NoRangeLimit;
1640 UINT32 RegEax;
1641 UINTN MemoryType;
1642 UINTN PreviousMemoryType;
1643 BOOLEAN Found;
1644
1645 if (!IsMtrrSupported ()) {
1646 return;
1647 }
1648
1649 DEBUG((DEBUG_CACHE, "MTRR Settings\n"));
1650 DEBUG((DEBUG_CACHE, "=============\n"));
1651
1652 MtrrGetAllMtrrs (&MtrrSettings);
1653 DEBUG((DEBUG_CACHE, "MTRR Default Type: %016lx\n", MtrrSettings.MtrrDefType));
1654 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1655 DEBUG((DEBUG_CACHE, "Fixed MTRR[%02d] : %016lx\n", Index, MtrrSettings.Fixed.Mtrr[Index]));
1656 }
1657
1658 VariableMtrrCount = GetVariableMtrrCount ();
1659 for (Index = 0; Index < VariableMtrrCount; Index++) {
1660 DEBUG((DEBUG_CACHE, "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",
1661 Index,
1662 MtrrSettings.Variables.Mtrr[Index].Base,
1663 MtrrSettings.Variables.Mtrr[Index].Mask
1664 ));
1665 }
1666 DEBUG((DEBUG_CACHE, "\n"));
1667 DEBUG((DEBUG_CACHE, "MTRR Ranges\n"));
1668 DEBUG((DEBUG_CACHE, "====================================\n"));
1669
1670 Base = 0;
1671 PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;
1672 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1673 Base = mMtrrLibFixedMtrrTable[Index].BaseAddress;
1674 for (Index1 = 0; Index1 < 8; Index1++) {
1675 MemoryType = (UINTN)(RShiftU64 (MtrrSettings.Fixed.Mtrr[Index], Index1 * 8) & 0xff);
1676 if (MemoryType > CacheWriteBack) {
1677 MemoryType = MTRR_CACHE_INVALID_TYPE;
1678 }
1679 if (MemoryType != PreviousMemoryType) {
1680 if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {
1681 DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));
1682 }
1683 PreviousMemoryType = MemoryType;
1684 DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));
1685 }
1686 Base += mMtrrLibFixedMtrrTable[Index].Length;
1687 }
1688 }
1689 DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));
1690
1691 VariableMtrrCount = GetVariableMtrrCount ();
1692
1693 Limit = BIT36 - 1;
1694 AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);
1695 if (RegEax >= 0x80000008) {
1696 AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);
1697 Limit = LShiftU64 (1, RegEax & 0xff) - 1;
1698 }
1699 Base = BASE_1MB;
1700 PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;
1701 do {
1702 MemoryType = MtrrGetMemoryAttribute (Base);
1703 if (MemoryType > CacheWriteBack) {
1704 MemoryType = MTRR_CACHE_INVALID_TYPE;
1705 }
1706
1707 if (MemoryType != PreviousMemoryType) {
1708 if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {
1709 DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));
1710 }
1711 PreviousMemoryType = MemoryType;
1712 DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));
1713 }
1714
1715 RangeBase = BASE_1MB;
1716 NoRangeBase = BASE_1MB;
1717 RangeLimit = Limit;
1718 NoRangeLimit = Limit;
1719
1720 for (Index = 0, Found = FALSE; Index < VariableMtrrCount; Index++) {
1721 if ((MtrrSettings.Variables.Mtrr[Index].Mask & BIT11) == 0) {
1722 //
1723 // If mask is not valid, then do not display range
1724 //
1725 continue;
1726 }
1727 MtrrBase = (MtrrSettings.Variables.Mtrr[Index].Base & (~(SIZE_4KB - 1)));
1728 MtrrLimit = MtrrBase + ((~(MtrrSettings.Variables.Mtrr[Index].Mask & (~(SIZE_4KB - 1)))) & Limit);
1729
1730 if (Base >= MtrrBase && Base < MtrrLimit) {
1731 Found = TRUE;
1732 }
1733
1734 if (Base >= MtrrBase && MtrrBase > RangeBase) {
1735 RangeBase = MtrrBase;
1736 }
1737 if (Base > MtrrLimit && MtrrLimit > RangeBase) {
1738 RangeBase = MtrrLimit + 1;
1739 }
1740 if (Base < MtrrBase && MtrrBase < RangeLimit) {
1741 RangeLimit = MtrrBase - 1;
1742 }
1743 if (Base < MtrrLimit && MtrrLimit <= RangeLimit) {
1744 RangeLimit = MtrrLimit;
1745 }
1746
1747 if (Base > MtrrLimit && NoRangeBase < MtrrLimit) {
1748 NoRangeBase = MtrrLimit + 1;
1749 }
1750 if (Base < MtrrBase && NoRangeLimit > MtrrBase) {
1751 NoRangeLimit = MtrrBase - 1;
1752 }
1753 }
1754
1755 if (Found) {
1756 Base = RangeLimit + 1;
1757 } else {
1758 Base = NoRangeLimit + 1;
1759 }
1760 } while (Base < Limit);
1761 DEBUG((DEBUG_CACHE, "%016lx\n\n", Base - 1));
1762 );
1763 }
1764
1765 /**
1766 Checks if MTRR is supported.
1767
1768 @retval TRUE MTRR is supported.
1769 @retval FALSE MTRR is not supported.
1770
1771 **/
1772 BOOLEAN
1773 EFIAPI
1774 IsMtrrSupported (
1775 VOID
1776 )
1777 {
1778 UINT32 RegEdx;
1779 UINT64 MtrrCap;
1780
1781 //
1782 // Check CPUID(1).EDX[12] for MTRR capability
1783 //
1784 AsmCpuid (1, NULL, NULL, NULL, &RegEdx);
1785 if (BitFieldRead32 (RegEdx, 12, 12) == 0) {
1786 return FALSE;
1787 }
1788
1789 //
1790 // Check IA32_MTRRCAP.[0..7] for number of variable MTRRs and IA32_MTRRCAP[8] for
1791 // fixed MTRRs existence. If number of variable MTRRs is zero, or fixed MTRRs do not
1792 // exist, return false.
1793 //
1794 MtrrCap = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP);
1795 if ((BitFieldRead64 (MtrrCap, 0, 7) == 0) || (BitFieldRead64 (MtrrCap, 8, 8) == 0)) {
1796 return FALSE;
1797 }
1798
1799 return TRUE;
1800 }