]> git.proxmox.com Git - mirror_edk2.git/blob - UefiCpuPkg/Library/MtrrLib/MtrrLib.c
UefiCpuPkg/MtrrLib: Adjust functions order
[mirror_edk2.git] / UefiCpuPkg / Library / MtrrLib / MtrrLib.c
1 /** @file
2 MTRR setting library
3
4 Copyright (c) 2008 - 2015, Intel Corporation. All rights reserved.<BR>
5 This program and the accompanying materials
6 are licensed and made available under the terms and conditions of the BSD License
7 which accompanies this distribution. The full text of the license may be found at
8 http://opensource.org/licenses/bsd-license.php
9
10 THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
11 WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
12
13 **/
14
15 #include <Base.h>
16
17 #include <Library/MtrrLib.h>
18 #include <Library/BaseLib.h>
19 #include <Library/CpuLib.h>
20 #include <Library/BaseMemoryLib.h>
21 #include <Library/DebugLib.h>
22
23 //
24 // Context to save and restore when MTRRs are programmed
25 //
26 typedef struct {
27 UINTN Cr4;
28 BOOLEAN InterruptState;
29 } MTRR_CONTEXT;
30
31 //
32 // This table defines the offset, base and length of the fixed MTRRs
33 //
34 CONST FIXED_MTRR mMtrrLibFixedMtrrTable[] = {
35 {
36 MTRR_LIB_IA32_MTRR_FIX64K_00000,
37 0,
38 SIZE_64KB
39 },
40 {
41 MTRR_LIB_IA32_MTRR_FIX16K_80000,
42 0x80000,
43 SIZE_16KB
44 },
45 {
46 MTRR_LIB_IA32_MTRR_FIX16K_A0000,
47 0xA0000,
48 SIZE_16KB
49 },
50 {
51 MTRR_LIB_IA32_MTRR_FIX4K_C0000,
52 0xC0000,
53 SIZE_4KB
54 },
55 {
56 MTRR_LIB_IA32_MTRR_FIX4K_C8000,
57 0xC8000,
58 SIZE_4KB
59 },
60 {
61 MTRR_LIB_IA32_MTRR_FIX4K_D0000,
62 0xD0000,
63 SIZE_4KB
64 },
65 {
66 MTRR_LIB_IA32_MTRR_FIX4K_D8000,
67 0xD8000,
68 SIZE_4KB
69 },
70 {
71 MTRR_LIB_IA32_MTRR_FIX4K_E0000,
72 0xE0000,
73 SIZE_4KB
74 },
75 {
76 MTRR_LIB_IA32_MTRR_FIX4K_E8000,
77 0xE8000,
78 SIZE_4KB
79 },
80 {
81 MTRR_LIB_IA32_MTRR_FIX4K_F0000,
82 0xF0000,
83 SIZE_4KB
84 },
85 {
86 MTRR_LIB_IA32_MTRR_FIX4K_F8000,
87 0xF8000,
88 SIZE_4KB
89 }
90 };
91
92 //
93 // Lookup table used to print MTRRs
94 //
95 GLOBAL_REMOVE_IF_UNREFERENCED CONST CHAR8 *mMtrrMemoryCacheTypeShortName[] = {
96 "UC", // CacheUncacheable
97 "WC", // CacheWriteCombining
98 "R*", // Invalid
99 "R*", // Invalid
100 "WT", // CacheWriteThrough
101 "WP", // CacheWriteProtected
102 "WB", // CacheWriteBack
103 "R*" // Invalid
104 };
105
106 /**
107 Worker function returns the variable MTRR count for the CPU.
108
109 @return Variable MTRR count
110
111 **/
112 UINT32
113 GetVariableMtrrCountWorker (
114 VOID
115 )
116 {
117 UINT32 VariableMtrrCount;
118
119 VariableMtrrCount = (UINT32)(AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP) & MTRR_LIB_IA32_MTRR_CAP_VCNT_MASK);
120 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
121 return VariableMtrrCount;
122 }
123
124 /**
125 Returns the variable MTRR count for the CPU.
126
127 @return Variable MTRR count
128
129 **/
130 UINT32
131 EFIAPI
132 GetVariableMtrrCount (
133 VOID
134 )
135 {
136 if (!IsMtrrSupported ()) {
137 return 0;
138 }
139 return GetVariableMtrrCountWorker ();
140 }
141
142 /**
143 Worker function returns the firmware usable variable MTRR count for the CPU.
144
145 @return Firmware usable variable MTRR count
146
147 **/
148 UINT32
149 GetFirmwareVariableMtrrCountWorker (
150 VOID
151 )
152 {
153 UINT32 VariableMtrrCount;
154 UINT32 ReservedMtrrNumber;
155
156 VariableMtrrCount = GetVariableMtrrCountWorker ();
157 ReservedMtrrNumber = PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);
158 if (VariableMtrrCount < ReservedMtrrNumber) {
159 return 0;
160 }
161
162 return VariableMtrrCount - ReservedMtrrNumber;
163 }
164
165 /**
166 Returns the firmware usable variable MTRR count for the CPU.
167
168 @return Firmware usable variable MTRR count
169
170 **/
171 UINT32
172 EFIAPI
173 GetFirmwareVariableMtrrCount (
174 VOID
175 )
176 {
177 if (!IsMtrrSupported ()) {
178 return 0;
179 }
180 return GetFirmwareVariableMtrrCountWorker ();
181 }
182
183 /**
184 Worker function returns the default MTRR cache type for the system.
185
186 @return The default MTRR cache type.
187
188 **/
189 MTRR_MEMORY_CACHE_TYPE
190 MtrrGetDefaultMemoryTypeWorker (
191 VOID
192 )
193 {
194 return (MTRR_MEMORY_CACHE_TYPE) (AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE) & 0x7);
195 }
196
197
198 /**
199 Returns the default MTRR cache type for the system.
200
201 @return The default MTRR cache type.
202
203 **/
204 MTRR_MEMORY_CACHE_TYPE
205 EFIAPI
206 MtrrGetDefaultMemoryType (
207 VOID
208 )
209 {
210 if (!IsMtrrSupported ()) {
211 return CacheUncacheable;
212 }
213 return MtrrGetDefaultMemoryTypeWorker ();
214 }
215
216 /**
217 Preparation before programming MTRR.
218
219 This function will do some preparation for programming MTRRs:
220 disable cache, invalid cache and disable MTRR caching functionality
221
222 @param[out] MtrrContext Pointer to context to save
223
224 **/
225 VOID
226 PreMtrrChange (
227 OUT MTRR_CONTEXT *MtrrContext
228 )
229 {
230 //
231 // Disable interrupts and save current interrupt state
232 //
233 MtrrContext->InterruptState = SaveAndDisableInterrupts();
234
235 //
236 // Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)
237 //
238 AsmDisableCache ();
239
240 //
241 // Save original CR4 value and clear PGE flag (Bit 7)
242 //
243 MtrrContext->Cr4 = AsmReadCr4 ();
244 AsmWriteCr4 (MtrrContext->Cr4 & (~BIT7));
245
246 //
247 // Flush all TLBs
248 //
249 CpuFlushTlb ();
250
251 //
252 // Disable MTRRs
253 //
254 AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 0);
255 }
256
257 /**
258 Cleaning up after programming MTRRs.
259
260 This function will do some clean up after programming MTRRs:
261 Flush all TLBs, re-enable caching, restore CR4.
262
263 @param[in] MtrrContext Pointer to context to restore
264
265 **/
266 VOID
267 PostMtrrChangeEnableCache (
268 IN MTRR_CONTEXT *MtrrContext
269 )
270 {
271 //
272 // Flush all TLBs
273 //
274 CpuFlushTlb ();
275
276 //
277 // Enable Normal Mode caching CD=NW=0, CD(Bit30), NW(Bit29)
278 //
279 AsmEnableCache ();
280
281 //
282 // Restore original CR4 value
283 //
284 AsmWriteCr4 (MtrrContext->Cr4);
285
286 //
287 // Restore original interrupt state
288 //
289 SetInterruptState (MtrrContext->InterruptState);
290 }
291
292 /**
293 Cleaning up after programming MTRRs.
294
295 This function will do some clean up after programming MTRRs:
296 enable MTRR caching functionality, and enable cache
297
298 @param[in] MtrrContext Pointer to context to restore
299
300 **/
301 VOID
302 PostMtrrChange (
303 IN MTRR_CONTEXT *MtrrContext
304 )
305 {
306 //
307 // Enable Cache MTRR
308 //
309 AsmMsrBitFieldWrite64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, 10, 11, 3);
310
311 PostMtrrChangeEnableCache (MtrrContext);
312 }
313
314 /**
315 Worker function gets the content in fixed MTRRs
316
317 @param[out] FixedSettings A buffer to hold fixed MTRRs content.
318
319 @retval The pointer of FixedSettings
320
321 **/
322 MTRR_FIXED_SETTINGS*
323 MtrrGetFixedMtrrWorker (
324 OUT MTRR_FIXED_SETTINGS *FixedSettings
325 )
326 {
327 UINT32 Index;
328
329 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
330 FixedSettings->Mtrr[Index] =
331 AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
332 }
333
334 return FixedSettings;
335 }
336
337
338 /**
339 This function gets the content in fixed MTRRs
340
341 @param[out] FixedSettings A buffer to hold fixed MTRRs content.
342
343 @retval The pointer of FixedSettings
344
345 **/
346 MTRR_FIXED_SETTINGS*
347 EFIAPI
348 MtrrGetFixedMtrr (
349 OUT MTRR_FIXED_SETTINGS *FixedSettings
350 )
351 {
352 if (!IsMtrrSupported ()) {
353 return FixedSettings;
354 }
355
356 return MtrrGetFixedMtrrWorker (FixedSettings);
357 }
358
359
360 /**
361 Worker function will get the raw value in variable MTRRs
362
363 @param[out] VariableSettings A buffer to hold variable MTRRs content.
364
365 @return The VariableSettings input pointer
366
367 **/
368 MTRR_VARIABLE_SETTINGS*
369 MtrrGetVariableMtrrWorker (
370 OUT MTRR_VARIABLE_SETTINGS *VariableSettings
371 )
372 {
373 UINT32 Index;
374 UINT32 VariableMtrrCount;
375
376 VariableMtrrCount = GetVariableMtrrCount ();
377 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
378
379 for (Index = 0; Index < VariableMtrrCount; Index++) {
380 VariableSettings->Mtrr[Index].Base =
381 AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1));
382 VariableSettings->Mtrr[Index].Mask =
383 AsmReadMsr64 (MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1) + 1);
384 }
385
386 return VariableSettings;
387 }
388
389 /**
390 This function will get the raw value in variable MTRRs
391
392 @param[out] VariableSettings A buffer to hold variable MTRRs content.
393
394 @return The VariableSettings input pointer
395
396 **/
397 MTRR_VARIABLE_SETTINGS*
398 EFIAPI
399 MtrrGetVariableMtrr (
400 OUT MTRR_VARIABLE_SETTINGS *VariableSettings
401 )
402 {
403 if (!IsMtrrSupported ()) {
404 return VariableSettings;
405 }
406
407 return MtrrGetVariableMtrrWorker (
408 VariableSettings
409 );
410 }
411
412 /**
413 Programs fixed MTRRs registers.
414
415 @param[in] MemoryCacheType The memory type to set.
416 @param[in, out] Base The base address of memory range.
417 @param[in, out] Length The length of memory range.
418
419 @retval RETURN_SUCCESS The cache type was updated successfully
420 @retval RETURN_UNSUPPORTED The requested range or cache type was invalid
421 for the fixed MTRRs.
422
423 **/
424 RETURN_STATUS
425 ProgramFixedMtrr (
426 IN UINT64 MemoryCacheType,
427 IN OUT UINT64 *Base,
428 IN OUT UINT64 *Length
429 )
430 {
431 UINT32 MsrNum;
432 UINT32 ByteShift;
433 UINT64 TempQword;
434 UINT64 OrMask;
435 UINT64 ClearMask;
436
437 TempQword = 0;
438 OrMask = 0;
439 ClearMask = 0;
440
441 for (MsrNum = 0; MsrNum < MTRR_NUMBER_OF_FIXED_MTRR; MsrNum++) {
442 if ((*Base >= mMtrrLibFixedMtrrTable[MsrNum].BaseAddress) &&
443 (*Base <
444 (
445 mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +
446 (8 * mMtrrLibFixedMtrrTable[MsrNum].Length)
447 )
448 )
449 ) {
450 break;
451 }
452 }
453
454 if (MsrNum == MTRR_NUMBER_OF_FIXED_MTRR) {
455 return RETURN_UNSUPPORTED;
456 }
457
458 //
459 // We found the fixed MTRR to be programmed
460 //
461 for (ByteShift = 0; ByteShift < 8; ByteShift++) {
462 if (*Base ==
463 (
464 mMtrrLibFixedMtrrTable[MsrNum].BaseAddress +
465 (ByteShift * mMtrrLibFixedMtrrTable[MsrNum].Length)
466 )
467 ) {
468 break;
469 }
470 }
471
472 if (ByteShift == 8) {
473 return RETURN_UNSUPPORTED;
474 }
475
476 for (
477 ;
478 ((ByteShift < 8) && (*Length >= mMtrrLibFixedMtrrTable[MsrNum].Length));
479 ByteShift++
480 ) {
481 OrMask |= LShiftU64 ((UINT64) MemoryCacheType, (UINT32) (ByteShift * 8));
482 ClearMask |= LShiftU64 ((UINT64) 0xFF, (UINT32) (ByteShift * 8));
483 *Length -= mMtrrLibFixedMtrrTable[MsrNum].Length;
484 *Base += mMtrrLibFixedMtrrTable[MsrNum].Length;
485 }
486
487 if (ByteShift < 8 && (*Length != 0)) {
488 return RETURN_UNSUPPORTED;
489 }
490
491 TempQword =
492 (AsmReadMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr) & ~ClearMask) | OrMask;
493 AsmWriteMsr64 (mMtrrLibFixedMtrrTable[MsrNum].Msr, TempQword);
494 return RETURN_SUCCESS;
495 }
496
497
498 /**
499 Gets the attribute of variable MTRRs.
500
501 This function shadows the content of variable MTRRs into an
502 internal array: VariableMtrr.
503
504 @param[in] MtrrValidBitsMask The mask for the valid bit of the MTRR
505 @param[in] MtrrValidAddressMask The valid address mask for MTRR
506 @param[out] VariableMtrr The array to shadow variable MTRRs content
507
508 @return The return value of this paramter indicates the
509 number of MTRRs which has been used.
510
511 **/
512 UINT32
513 EFIAPI
514 MtrrGetMemoryAttributeInVariableMtrr (
515 IN UINT64 MtrrValidBitsMask,
516 IN UINT64 MtrrValidAddressMask,
517 OUT VARIABLE_MTRR *VariableMtrr
518 )
519 {
520 UINTN Index;
521 UINT32 MsrNum;
522 UINT32 UsedMtrr;
523 UINT32 FirmwareVariableMtrrCount;
524 UINT32 VariableMtrrEnd;
525
526 if (!IsMtrrSupported ()) {
527 return 0;
528 }
529
530 FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();
531 VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;
532
533 ZeroMem (VariableMtrr, sizeof (VARIABLE_MTRR) * MTRR_NUMBER_OF_VARIABLE_MTRR);
534 UsedMtrr = 0;
535
536 for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE, Index = 0;
537 (
538 (MsrNum < VariableMtrrEnd) &&
539 (Index < FirmwareVariableMtrrCount)
540 );
541 MsrNum += 2
542 ) {
543 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) != 0) {
544 VariableMtrr[Index].Msr = MsrNum;
545 VariableMtrr[Index].BaseAddress = (AsmReadMsr64 (MsrNum) &
546 MtrrValidAddressMask);
547 VariableMtrr[Index].Length = ((~(AsmReadMsr64 (MsrNum + 1) &
548 MtrrValidAddressMask)
549 ) &
550 MtrrValidBitsMask
551 ) + 1;
552 VariableMtrr[Index].Type = (AsmReadMsr64 (MsrNum) & 0x0ff);
553 VariableMtrr[Index].Valid = TRUE;
554 VariableMtrr[Index].Used = TRUE;
555 UsedMtrr = UsedMtrr + 1;
556 Index++;
557 }
558 }
559 return UsedMtrr;
560 }
561
562
563 /**
564 Checks overlap between given memory range and MTRRs.
565
566 @param[in] Start The start address of memory range.
567 @param[in] End The end address of memory range.
568 @param[in] VariableMtrr The array to shadow variable MTRRs content
569
570 @retval TRUE Overlap exists.
571 @retval FALSE No overlap.
572
573 **/
574 BOOLEAN
575 CheckMemoryAttributeOverlap (
576 IN PHYSICAL_ADDRESS Start,
577 IN PHYSICAL_ADDRESS End,
578 IN VARIABLE_MTRR *VariableMtrr
579 )
580 {
581 UINT32 Index;
582
583 for (Index = 0; Index < 6; Index++) {
584 if (
585 VariableMtrr[Index].Valid &&
586 !(
587 (Start > (VariableMtrr[Index].BaseAddress +
588 VariableMtrr[Index].Length - 1)
589 ) ||
590 (End < VariableMtrr[Index].BaseAddress)
591 )
592 ) {
593 return TRUE;
594 }
595 }
596
597 return FALSE;
598 }
599
600
601 /**
602 Marks a variable MTRR as non-valid.
603
604 @param[in] Index The index of the array VariableMtrr to be invalidated
605 @param[in] VariableMtrr The array to shadow variable MTRRs content
606 @param[out] UsedMtrr The number of MTRRs which has already been used
607
608 **/
609 VOID
610 InvalidateShadowMtrr (
611 IN UINTN Index,
612 IN VARIABLE_MTRR *VariableMtrr,
613 OUT UINT32 *UsedMtrr
614 )
615 {
616 VariableMtrr[Index].Valid = FALSE;
617 *UsedMtrr = *UsedMtrr - 1;
618 }
619
620
621 /**
622 Combines memory attributes.
623
624 If overlap exists between given memory range and MTRRs, try to combine them.
625
626 @param[in] Attributes The memory type to set.
627 @param[in, out] Base The base address of memory range.
628 @param[in, out] Length The length of memory range.
629 @param[in] VariableMtrr The array to shadow variable MTRRs content
630 @param[in, out] UsedMtrr The number of MTRRs which has already been used
631 @param[out] OverwriteExistingMtrr Returns whether an existing MTRR was used
632
633 @retval EFI_SUCCESS Memory region successfully combined.
634 @retval EFI_ACCESS_DENIED Memory region cannot be combined.
635
636 **/
637 RETURN_STATUS
638 CombineMemoryAttribute (
639 IN UINT64 Attributes,
640 IN OUT UINT64 *Base,
641 IN OUT UINT64 *Length,
642 IN VARIABLE_MTRR *VariableMtrr,
643 IN OUT UINT32 *UsedMtrr,
644 OUT BOOLEAN *OverwriteExistingMtrr
645 )
646 {
647 UINT32 Index;
648 UINT64 CombineStart;
649 UINT64 CombineEnd;
650 UINT64 MtrrEnd;
651 UINT64 EndAddress;
652 UINT32 FirmwareVariableMtrrCount;
653 BOOLEAN CoveredByExistingMtrr;
654
655 FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();
656
657 *OverwriteExistingMtrr = FALSE;
658 CoveredByExistingMtrr = FALSE;
659 EndAddress = *Base +*Length - 1;
660
661 for (Index = 0; Index < FirmwareVariableMtrrCount; Index++) {
662
663 MtrrEnd = VariableMtrr[Index].BaseAddress + VariableMtrr[Index].Length - 1;
664 if (
665 !VariableMtrr[Index].Valid ||
666 (
667 *Base > (MtrrEnd) ||
668 (EndAddress < VariableMtrr[Index].BaseAddress)
669 )
670 ) {
671 continue;
672 }
673
674 //
675 // Combine same attribute MTRR range
676 //
677 if (Attributes == VariableMtrr[Index].Type) {
678 //
679 // if the MTRR range contain the request range, set a flag, then continue to
680 // invalidate any MTRR of the same request range with higher priority cache type.
681 //
682 if (VariableMtrr[Index].BaseAddress <= *Base && MtrrEnd >= EndAddress) {
683 CoveredByExistingMtrr = TRUE;
684 continue;
685 }
686 //
687 // invalid this MTRR, and program the combine range
688 //
689 CombineStart =
690 (*Base) < VariableMtrr[Index].BaseAddress ?
691 (*Base) :
692 VariableMtrr[Index].BaseAddress;
693 CombineEnd = EndAddress > MtrrEnd ? EndAddress : MtrrEnd;
694
695 //
696 // Record the MTRR usage status in VariableMtrr array.
697 //
698 InvalidateShadowMtrr (Index, VariableMtrr, UsedMtrr);
699 *Base = CombineStart;
700 *Length = CombineEnd - CombineStart + 1;
701 EndAddress = CombineEnd;
702 *OverwriteExistingMtrr = TRUE;
703 continue;
704 } else {
705 //
706 // The cache type is different, but the range is convered by one MTRR
707 //
708 if (VariableMtrr[Index].BaseAddress == *Base && MtrrEnd == EndAddress) {
709 InvalidateShadowMtrr (Index, VariableMtrr, UsedMtrr);
710 continue;
711 }
712
713 }
714
715 if ((Attributes== MTRR_CACHE_WRITE_THROUGH &&
716 VariableMtrr[Index].Type == MTRR_CACHE_WRITE_BACK) ||
717 (Attributes == MTRR_CACHE_WRITE_BACK &&
718 VariableMtrr[Index].Type == MTRR_CACHE_WRITE_THROUGH) ||
719 (Attributes == MTRR_CACHE_UNCACHEABLE) ||
720 (VariableMtrr[Index].Type == MTRR_CACHE_UNCACHEABLE)
721 ) {
722 *OverwriteExistingMtrr = TRUE;
723 continue;
724 }
725 //
726 // Other type memory overlap is invalid
727 //
728 return RETURN_ACCESS_DENIED;
729 }
730
731 if (CoveredByExistingMtrr) {
732 *Length = 0;
733 }
734
735 return RETURN_SUCCESS;
736 }
737
738
739 /**
740 Calculates the maximum value which is a power of 2, but less the MemoryLength.
741
742 @param[in] MemoryLength The number to pass in.
743
744 @return The maximum value which is align to power of 2 and less the MemoryLength
745
746 **/
747 UINT64
748 Power2MaxMemory (
749 IN UINT64 MemoryLength
750 )
751 {
752 UINT64 Result;
753
754 if (RShiftU64 (MemoryLength, 32) != 0) {
755 Result = LShiftU64 (
756 (UINT64) GetPowerOfTwo32 (
757 (UINT32) RShiftU64 (MemoryLength, 32)
758 ),
759 32
760 );
761 } else {
762 Result = (UINT64) GetPowerOfTwo32 ((UINT32) MemoryLength);
763 }
764
765 return Result;
766 }
767
768
769 /**
770 Determines the MTRR numbers used to program a memory range.
771
772 This function first checks the alignment of the base address.
773 If the alignment of the base address <= Length, cover the memory range
774 (BaseAddress, alignment) by a MTRR, then BaseAddress += alignment and
775 Length -= alignment. Repeat the step until alignment > Length.
776
777 Then this function determines which direction of programming the variable
778 MTRRs for the remaining length will use fewer MTRRs.
779
780 @param[in] BaseAddress Length of Memory to program MTRR
781 @param[in] Length Length of Memory to program MTRR
782 @param[in] MtrrNumber Pointer to the number of necessary MTRRs
783
784 @retval TRUE Positive direction is better.
785 FALSE Negative direction is better.
786
787 **/
788 BOOLEAN
789 GetMtrrNumberAndDirection (
790 IN UINT64 BaseAddress,
791 IN UINT64 Length,
792 IN UINTN *MtrrNumber
793 )
794 {
795 UINT64 TempQword;
796 UINT64 Alignment;
797 UINT32 Positive;
798 UINT32 Subtractive;
799
800 *MtrrNumber = 0;
801
802 if (BaseAddress != 0) {
803 do {
804 //
805 // Calculate the alignment of the base address.
806 //
807 Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));
808
809 if (Alignment > Length) {
810 break;
811 }
812
813 (*MtrrNumber)++;
814 BaseAddress += Alignment;
815 Length -= Alignment;
816 } while (TRUE);
817
818 if (Length == 0) {
819 return TRUE;
820 }
821 }
822
823 TempQword = Length;
824 Positive = 0;
825 Subtractive = 0;
826
827 do {
828 TempQword -= Power2MaxMemory (TempQword);
829 Positive++;
830 } while (TempQword != 0);
831
832 TempQword = Power2MaxMemory (LShiftU64 (Length, 1)) - Length;
833 Subtractive++;
834 do {
835 TempQword -= Power2MaxMemory (TempQword);
836 Subtractive++;
837 } while (TempQword != 0);
838
839 if (Positive <= Subtractive) {
840 *MtrrNumber += Positive;
841 return TRUE;
842 } else {
843 *MtrrNumber += Subtractive;
844 return FALSE;
845 }
846 }
847
848 /**
849 Invalid variable MTRRs according to the value in the shadow array.
850
851 This function programs MTRRs according to the values specified
852 in the shadow array.
853
854 @param[in, out] VariableMtrr Shadow of variable MTRR contents
855
856 **/
857 VOID
858 InvalidateMtrr (
859 IN OUT VARIABLE_MTRR *VariableMtrr
860 )
861 {
862 UINTN Index;
863 UINTN VariableMtrrCount;
864 MTRR_CONTEXT MtrrContext;
865
866 PreMtrrChange (&MtrrContext);
867 Index = 0;
868 VariableMtrrCount = GetVariableMtrrCount ();
869 while (Index < VariableMtrrCount) {
870 if (!VariableMtrr[Index].Valid && VariableMtrr[Index].Used) {
871 AsmWriteMsr64 (VariableMtrr[Index].Msr, 0);
872 AsmWriteMsr64 (VariableMtrr[Index].Msr + 1, 0);
873 VariableMtrr[Index].Used = FALSE;
874 }
875 Index ++;
876 }
877 PostMtrrChange (&MtrrContext);
878 }
879
880
881 /**
882 Programs variable MTRRs
883
884 This function programs variable MTRRs
885
886 @param[in] MtrrNumber Index of MTRR to program.
887 @param[in] BaseAddress Base address of memory region.
888 @param[in] Length Length of memory region.
889 @param[in] MemoryCacheType Memory type to set.
890 @param[in] MtrrValidAddressMask The valid address mask for MTRR
891
892 **/
893 VOID
894 ProgramVariableMtrr (
895 IN UINTN MtrrNumber,
896 IN PHYSICAL_ADDRESS BaseAddress,
897 IN UINT64 Length,
898 IN UINT64 MemoryCacheType,
899 IN UINT64 MtrrValidAddressMask
900 )
901 {
902 UINT64 TempQword;
903 MTRR_CONTEXT MtrrContext;
904
905 PreMtrrChange (&MtrrContext);
906
907 //
908 // MTRR Physical Base
909 //
910 TempQword = (BaseAddress & MtrrValidAddressMask) | MemoryCacheType;
911 AsmWriteMsr64 ((UINT32) MtrrNumber, TempQword);
912
913 //
914 // MTRR Physical Mask
915 //
916 TempQword = ~(Length - 1);
917 AsmWriteMsr64 (
918 (UINT32) (MtrrNumber + 1),
919 (TempQword & MtrrValidAddressMask) | MTRR_LIB_CACHE_MTRR_ENABLED
920 );
921
922 PostMtrrChange (&MtrrContext);
923 }
924
925
926 /**
927 Converts the Memory attribute value to MTRR_MEMORY_CACHE_TYPE.
928
929 @param[in] MtrrType MTRR memory type
930
931 @return The enum item in MTRR_MEMORY_CACHE_TYPE
932
933 **/
934 MTRR_MEMORY_CACHE_TYPE
935 GetMemoryCacheTypeFromMtrrType (
936 IN UINT64 MtrrType
937 )
938 {
939 switch (MtrrType) {
940 case MTRR_CACHE_UNCACHEABLE:
941 return CacheUncacheable;
942 case MTRR_CACHE_WRITE_COMBINING:
943 return CacheWriteCombining;
944 case MTRR_CACHE_WRITE_THROUGH:
945 return CacheWriteThrough;
946 case MTRR_CACHE_WRITE_PROTECTED:
947 return CacheWriteProtected;
948 case MTRR_CACHE_WRITE_BACK:
949 return CacheWriteBack;
950 default:
951 //
952 // MtrrType is MTRR_CACHE_INVALID_TYPE, that means
953 // no MTRR covers the range
954 //
955 return MtrrGetDefaultMemoryType ();
956 }
957 }
958
959 /**
960 Initializes the valid bits mask and valid address mask for MTRRs.
961
962 This function initializes the valid bits mask and valid address mask for MTRRs.
963
964 @param[out] MtrrValidBitsMask The mask for the valid bit of the MTRR
965 @param[out] MtrrValidAddressMask The valid address mask for the MTRR
966
967 **/
968 VOID
969 MtrrLibInitializeMtrrMask (
970 OUT UINT64 *MtrrValidBitsMask,
971 OUT UINT64 *MtrrValidAddressMask
972 )
973 {
974 UINT32 RegEax;
975 UINT8 PhysicalAddressBits;
976
977 AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);
978
979 if (RegEax >= 0x80000008) {
980 AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);
981
982 PhysicalAddressBits = (UINT8) RegEax;
983
984 *MtrrValidBitsMask = LShiftU64 (1, PhysicalAddressBits) - 1;
985 *MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;
986 } else {
987 *MtrrValidBitsMask = MTRR_LIB_MSR_VALID_MASK;
988 *MtrrValidAddressMask = MTRR_LIB_CACHE_VALID_ADDRESS;
989 }
990 }
991
992
993 /**
994 Determines the real attribute of a memory range.
995
996 This function is to arbitrate the real attribute of the memory when
997 there are 2 MTRRs covers the same memory range. For further details,
998 please refer the IA32 Software Developer's Manual, Volume 3,
999 Section 10.11.4.1.
1000
1001 @param[in] MtrrType1 The first kind of Memory type
1002 @param[in] MtrrType2 The second kind of memory type
1003
1004 **/
1005 UINT64
1006 MtrrPrecedence (
1007 IN UINT64 MtrrType1,
1008 IN UINT64 MtrrType2
1009 )
1010 {
1011 UINT64 MtrrType;
1012
1013 MtrrType = MTRR_CACHE_INVALID_TYPE;
1014 switch (MtrrType1) {
1015 case MTRR_CACHE_UNCACHEABLE:
1016 MtrrType = MTRR_CACHE_UNCACHEABLE;
1017 break;
1018 case MTRR_CACHE_WRITE_COMBINING:
1019 if (
1020 MtrrType2==MTRR_CACHE_WRITE_COMBINING ||
1021 MtrrType2==MTRR_CACHE_UNCACHEABLE
1022 ) {
1023 MtrrType = MtrrType2;
1024 }
1025 break;
1026 case MTRR_CACHE_WRITE_THROUGH:
1027 if (
1028 MtrrType2==MTRR_CACHE_WRITE_THROUGH ||
1029 MtrrType2==MTRR_CACHE_WRITE_BACK
1030 ) {
1031 MtrrType = MTRR_CACHE_WRITE_THROUGH;
1032 } else if(MtrrType2==MTRR_CACHE_UNCACHEABLE) {
1033 MtrrType = MTRR_CACHE_UNCACHEABLE;
1034 }
1035 break;
1036 case MTRR_CACHE_WRITE_PROTECTED:
1037 if (MtrrType2 == MTRR_CACHE_WRITE_PROTECTED ||
1038 MtrrType2 == MTRR_CACHE_UNCACHEABLE) {
1039 MtrrType = MtrrType2;
1040 }
1041 break;
1042 case MTRR_CACHE_WRITE_BACK:
1043 if (
1044 MtrrType2== MTRR_CACHE_UNCACHEABLE ||
1045 MtrrType2==MTRR_CACHE_WRITE_THROUGH ||
1046 MtrrType2== MTRR_CACHE_WRITE_BACK
1047 ) {
1048 MtrrType = MtrrType2;
1049 }
1050 break;
1051 case MTRR_CACHE_INVALID_TYPE:
1052 MtrrType = MtrrType2;
1053 break;
1054 default:
1055 break;
1056 }
1057
1058 if (MtrrType2 == MTRR_CACHE_INVALID_TYPE) {
1059 MtrrType = MtrrType1;
1060 }
1061 return MtrrType;
1062 }
1063
1064
1065
1066 /**
1067 This function will get the memory cache type of the specific address.
1068
1069 This function is mainly for debug purpose.
1070
1071 @param[in] Address The specific address
1072
1073 @return Memory cache type of the specific address
1074
1075 **/
1076 MTRR_MEMORY_CACHE_TYPE
1077 EFIAPI
1078 MtrrGetMemoryAttribute (
1079 IN PHYSICAL_ADDRESS Address
1080 )
1081 {
1082 UINT64 TempQword;
1083 UINTN Index;
1084 UINTN SubIndex;
1085 UINT64 MtrrType;
1086 UINT64 TempMtrrType;
1087 MTRR_MEMORY_CACHE_TYPE CacheType;
1088 VARIABLE_MTRR VariableMtrr[MTRR_NUMBER_OF_VARIABLE_MTRR];
1089 UINT64 MtrrValidBitsMask;
1090 UINT64 MtrrValidAddressMask;
1091 UINTN VariableMtrrCount;
1092
1093 if (!IsMtrrSupported ()) {
1094 return CacheUncacheable;
1095 }
1096
1097 //
1098 // Check if MTRR is enabled, if not, return UC as attribute
1099 //
1100 TempQword = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE);
1101 MtrrType = MTRR_CACHE_INVALID_TYPE;
1102
1103 if ((TempQword & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1104 return CacheUncacheable;
1105 }
1106
1107 //
1108 // If address is less than 1M, then try to go through the fixed MTRR
1109 //
1110 if (Address < BASE_1MB) {
1111 if ((TempQword & MTRR_LIB_CACHE_FIXED_MTRR_ENABLED) != 0) {
1112 //
1113 // Go through the fixed MTRR
1114 //
1115 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1116 if (Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress &&
1117 Address < (
1118 mMtrrLibFixedMtrrTable[Index].BaseAddress +
1119 (mMtrrLibFixedMtrrTable[Index].Length * 8)
1120 )
1121 ) {
1122 SubIndex =
1123 ((UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /
1124 mMtrrLibFixedMtrrTable[Index].Length;
1125 TempQword = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
1126 MtrrType = RShiftU64 (TempQword, SubIndex * 8) & 0xFF;
1127 return GetMemoryCacheTypeFromMtrrType (MtrrType);
1128 }
1129 }
1130 }
1131 }
1132 MtrrLibInitializeMtrrMask(&MtrrValidBitsMask, &MtrrValidAddressMask);
1133 MtrrGetMemoryAttributeInVariableMtrr(
1134 MtrrValidBitsMask,
1135 MtrrValidAddressMask,
1136 VariableMtrr
1137 );
1138
1139 //
1140 // Go through the variable MTRR
1141 //
1142 VariableMtrrCount = GetVariableMtrrCount ();
1143 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
1144
1145 for (Index = 0; Index < VariableMtrrCount; Index++) {
1146 if (VariableMtrr[Index].Valid) {
1147 if (Address >= VariableMtrr[Index].BaseAddress &&
1148 Address < VariableMtrr[Index].BaseAddress+VariableMtrr[Index].Length) {
1149 TempMtrrType = VariableMtrr[Index].Type;
1150 MtrrType = MtrrPrecedence (MtrrType, TempMtrrType);
1151 }
1152 }
1153 }
1154 CacheType = GetMemoryCacheTypeFromMtrrType (MtrrType);
1155
1156 return CacheType;
1157 }
1158
1159
1160
1161 /**
1162 This function prints all MTRRs for debugging.
1163 **/
1164 VOID
1165 EFIAPI
1166 MtrrDebugPrintAllMtrrs (
1167 VOID
1168 )
1169 {
1170 DEBUG_CODE (
1171 MTRR_SETTINGS MtrrSettings;
1172 UINTN Index;
1173 UINTN Index1;
1174 UINTN VariableMtrrCount;
1175 UINT64 Base;
1176 UINT64 Limit;
1177 UINT64 MtrrBase;
1178 UINT64 MtrrLimit;
1179 UINT64 RangeBase;
1180 UINT64 RangeLimit;
1181 UINT64 NoRangeBase;
1182 UINT64 NoRangeLimit;
1183 UINT32 RegEax;
1184 UINTN MemoryType;
1185 UINTN PreviousMemoryType;
1186 BOOLEAN Found;
1187
1188 if (!IsMtrrSupported ()) {
1189 return;
1190 }
1191
1192 DEBUG((DEBUG_CACHE, "MTRR Settings\n"));
1193 DEBUG((DEBUG_CACHE, "=============\n"));
1194
1195 MtrrGetAllMtrrs (&MtrrSettings);
1196 DEBUG((DEBUG_CACHE, "MTRR Default Type: %016lx\n", MtrrSettings.MtrrDefType));
1197 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1198 DEBUG((DEBUG_CACHE, "Fixed MTRR[%02d] : %016lx\n", Index, MtrrSettings.Fixed.Mtrr[Index]));
1199 }
1200
1201 VariableMtrrCount = GetVariableMtrrCount ();
1202 for (Index = 0; Index < VariableMtrrCount; Index++) {
1203 DEBUG((DEBUG_CACHE, "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",
1204 Index,
1205 MtrrSettings.Variables.Mtrr[Index].Base,
1206 MtrrSettings.Variables.Mtrr[Index].Mask
1207 ));
1208 }
1209 DEBUG((DEBUG_CACHE, "\n"));
1210 DEBUG((DEBUG_CACHE, "MTRR Ranges\n"));
1211 DEBUG((DEBUG_CACHE, "====================================\n"));
1212
1213 Base = 0;
1214 PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;
1215 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1216 Base = mMtrrLibFixedMtrrTable[Index].BaseAddress;
1217 for (Index1 = 0; Index1 < 8; Index1++) {
1218 MemoryType = (UINTN)(RShiftU64 (MtrrSettings.Fixed.Mtrr[Index], Index1 * 8) & 0xff);
1219 if (MemoryType > CacheWriteBack) {
1220 MemoryType = MTRR_CACHE_INVALID_TYPE;
1221 }
1222 if (MemoryType != PreviousMemoryType) {
1223 if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {
1224 DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));
1225 }
1226 PreviousMemoryType = MemoryType;
1227 DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));
1228 }
1229 Base += mMtrrLibFixedMtrrTable[Index].Length;
1230 }
1231 }
1232 DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));
1233
1234 VariableMtrrCount = GetVariableMtrrCount ();
1235
1236 Limit = BIT36 - 1;
1237 AsmCpuid (0x80000000, &RegEax, NULL, NULL, NULL);
1238 if (RegEax >= 0x80000008) {
1239 AsmCpuid (0x80000008, &RegEax, NULL, NULL, NULL);
1240 Limit = LShiftU64 (1, RegEax & 0xff) - 1;
1241 }
1242 Base = BASE_1MB;
1243 PreviousMemoryType = MTRR_CACHE_INVALID_TYPE;
1244 do {
1245 MemoryType = MtrrGetMemoryAttribute (Base);
1246 if (MemoryType > CacheWriteBack) {
1247 MemoryType = MTRR_CACHE_INVALID_TYPE;
1248 }
1249
1250 if (MemoryType != PreviousMemoryType) {
1251 if (PreviousMemoryType != MTRR_CACHE_INVALID_TYPE) {
1252 DEBUG((DEBUG_CACHE, "%016lx\n", Base - 1));
1253 }
1254 PreviousMemoryType = MemoryType;
1255 DEBUG((DEBUG_CACHE, "%a:%016lx-", mMtrrMemoryCacheTypeShortName[MemoryType], Base));
1256 }
1257
1258 RangeBase = BASE_1MB;
1259 NoRangeBase = BASE_1MB;
1260 RangeLimit = Limit;
1261 NoRangeLimit = Limit;
1262
1263 for (Index = 0, Found = FALSE; Index < VariableMtrrCount; Index++) {
1264 if ((MtrrSettings.Variables.Mtrr[Index].Mask & BIT11) == 0) {
1265 //
1266 // If mask is not valid, then do not display range
1267 //
1268 continue;
1269 }
1270 MtrrBase = (MtrrSettings.Variables.Mtrr[Index].Base & (~(SIZE_4KB - 1)));
1271 MtrrLimit = MtrrBase + ((~(MtrrSettings.Variables.Mtrr[Index].Mask & (~(SIZE_4KB - 1)))) & Limit);
1272
1273 if (Base >= MtrrBase && Base < MtrrLimit) {
1274 Found = TRUE;
1275 }
1276
1277 if (Base >= MtrrBase && MtrrBase > RangeBase) {
1278 RangeBase = MtrrBase;
1279 }
1280 if (Base > MtrrLimit && MtrrLimit > RangeBase) {
1281 RangeBase = MtrrLimit + 1;
1282 }
1283 if (Base < MtrrBase && MtrrBase < RangeLimit) {
1284 RangeLimit = MtrrBase - 1;
1285 }
1286 if (Base < MtrrLimit && MtrrLimit <= RangeLimit) {
1287 RangeLimit = MtrrLimit;
1288 }
1289
1290 if (Base > MtrrLimit && NoRangeBase < MtrrLimit) {
1291 NoRangeBase = MtrrLimit + 1;
1292 }
1293 if (Base < MtrrBase && NoRangeLimit > MtrrBase) {
1294 NoRangeLimit = MtrrBase - 1;
1295 }
1296 }
1297
1298 if (Found) {
1299 Base = RangeLimit + 1;
1300 } else {
1301 Base = NoRangeLimit + 1;
1302 }
1303 } while (Base < Limit);
1304 DEBUG((DEBUG_CACHE, "%016lx\n\n", Base - 1));
1305 );
1306 }
1307 /**
1308 This function attempts to set the attributes for a memory range.
1309
1310 @param[in] BaseAddress The physical address that is the start
1311 address of a memory region.
1312 @param[in] Length The size in bytes of the memory region.
1313 @param[in] Attribute The bit mask of attributes to set for the
1314 memory region.
1315
1316 @retval RETURN_SUCCESS The attributes were set for the memory
1317 region.
1318 @retval RETURN_INVALID_PARAMETER Length is zero.
1319 @retval RETURN_UNSUPPORTED The processor does not support one or
1320 more bytes of the memory resource range
1321 specified by BaseAddress and Length.
1322 @retval RETURN_UNSUPPORTED The bit mask of attributes is not support
1323 for the memory resource range specified
1324 by BaseAddress and Length.
1325 @retval RETURN_ACCESS_DENIED The attributes for the memory resource
1326 range specified by BaseAddress and Length
1327 cannot be modified.
1328 @retval RETURN_OUT_OF_RESOURCES There are not enough system resources to
1329 modify the attributes of the memory
1330 resource range.
1331
1332 **/
1333 RETURN_STATUS
1334 EFIAPI
1335 MtrrSetMemoryAttribute (
1336 IN PHYSICAL_ADDRESS BaseAddress,
1337 IN UINT64 Length,
1338 IN MTRR_MEMORY_CACHE_TYPE Attribute
1339 )
1340 {
1341 UINT64 TempQword;
1342 RETURN_STATUS Status;
1343 UINT64 MemoryType;
1344 UINT64 Alignment;
1345 BOOLEAN OverLap;
1346 BOOLEAN Positive;
1347 UINT32 MsrNum;
1348 UINTN MtrrNumber;
1349 VARIABLE_MTRR VariableMtrr[MTRR_NUMBER_OF_VARIABLE_MTRR];
1350 UINT32 UsedMtrr;
1351 UINT64 MtrrValidBitsMask;
1352 UINT64 MtrrValidAddressMask;
1353 BOOLEAN OverwriteExistingMtrr;
1354 UINT32 FirmwareVariableMtrrCount;
1355 UINT32 VariableMtrrEnd;
1356 MTRR_CONTEXT MtrrContext;
1357
1358 DEBUG((DEBUG_CACHE, "MtrrSetMemoryAttribute() %a:%016lx-%016lx\n", mMtrrMemoryCacheTypeShortName[Attribute], BaseAddress, Length));
1359
1360 if (!IsMtrrSupported ()) {
1361 Status = RETURN_UNSUPPORTED;
1362 goto Done;
1363 }
1364
1365 FirmwareVariableMtrrCount = GetFirmwareVariableMtrrCount ();
1366 VariableMtrrEnd = MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (2 * GetVariableMtrrCount ()) - 1;
1367
1368 MtrrLibInitializeMtrrMask(&MtrrValidBitsMask, &MtrrValidAddressMask);
1369
1370 TempQword = 0;
1371 MemoryType = (UINT64)Attribute;
1372 OverwriteExistingMtrr = FALSE;
1373
1374 //
1375 // Check for an invalid parameter
1376 //
1377 if (Length == 0) {
1378 Status = RETURN_INVALID_PARAMETER;
1379 goto Done;
1380 }
1381
1382 if (
1383 (BaseAddress & ~MtrrValidAddressMask) != 0 ||
1384 (Length & ~MtrrValidAddressMask) != 0
1385 ) {
1386 Status = RETURN_UNSUPPORTED;
1387 goto Done;
1388 }
1389
1390 //
1391 // Check if Fixed MTRR
1392 //
1393 Status = RETURN_SUCCESS;
1394 while ((BaseAddress < BASE_1MB) && (Length > 0) && Status == RETURN_SUCCESS) {
1395 PreMtrrChange (&MtrrContext);
1396 Status = ProgramFixedMtrr (MemoryType, &BaseAddress, &Length);
1397 PostMtrrChange (&MtrrContext);
1398 if (RETURN_ERROR (Status)) {
1399 goto Done;
1400 }
1401 }
1402
1403 if (Length == 0) {
1404 //
1405 // A Length of 0 can only make sense for fixed MTTR ranges.
1406 // Since we just handled the fixed MTRRs, we can skip the
1407 // variable MTRR section.
1408 //
1409 goto Done;
1410 }
1411
1412 //
1413 // Since memory ranges below 1MB will be overridden by the fixed MTRRs,
1414 // we can set the base to 0 to save variable MTRRs.
1415 //
1416 if (BaseAddress == BASE_1MB) {
1417 BaseAddress = 0;
1418 Length += SIZE_1MB;
1419 }
1420
1421 //
1422 // Check for overlap
1423 //
1424 UsedMtrr = MtrrGetMemoryAttributeInVariableMtrr (MtrrValidBitsMask, MtrrValidAddressMask, VariableMtrr);
1425 OverLap = CheckMemoryAttributeOverlap (BaseAddress, BaseAddress + Length - 1, VariableMtrr);
1426 if (OverLap) {
1427 Status = CombineMemoryAttribute (MemoryType, &BaseAddress, &Length, VariableMtrr, &UsedMtrr, &OverwriteExistingMtrr);
1428 if (RETURN_ERROR (Status)) {
1429 goto Done;
1430 }
1431
1432 if (Length == 0) {
1433 //
1434 // Combined successfully, invalidate the now-unused MTRRs
1435 //
1436 InvalidateMtrr(VariableMtrr);
1437 Status = RETURN_SUCCESS;
1438 goto Done;
1439 }
1440 }
1441
1442 //
1443 // The memory type is the same with the type specified by
1444 // MTRR_LIB_IA32_MTRR_DEF_TYPE.
1445 //
1446 if ((!OverwriteExistingMtrr) && (Attribute == MtrrGetDefaultMemoryType ())) {
1447 //
1448 // Invalidate the now-unused MTRRs
1449 //
1450 InvalidateMtrr(VariableMtrr);
1451 goto Done;
1452 }
1453
1454 Positive = GetMtrrNumberAndDirection (BaseAddress, Length, &MtrrNumber);
1455
1456 if ((UsedMtrr + MtrrNumber) > FirmwareVariableMtrrCount) {
1457 Status = RETURN_OUT_OF_RESOURCES;
1458 goto Done;
1459 }
1460
1461 //
1462 // Invalidate the now-unused MTRRs
1463 //
1464 InvalidateMtrr(VariableMtrr);
1465
1466 //
1467 // Find first unused MTRR
1468 //
1469 for (MsrNum = MTRR_LIB_IA32_VARIABLE_MTRR_BASE;
1470 MsrNum < VariableMtrrEnd;
1471 MsrNum += 2
1472 ) {
1473 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1474 break;
1475 }
1476 }
1477
1478 if (BaseAddress != 0) {
1479 do {
1480 //
1481 // Calculate the alignment of the base address.
1482 //
1483 Alignment = LShiftU64 (1, (UINTN)LowBitSet64 (BaseAddress));
1484
1485 if (Alignment > Length) {
1486 break;
1487 }
1488
1489 //
1490 // Find unused MTRR
1491 //
1492 for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {
1493 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1494 break;
1495 }
1496 }
1497
1498 ProgramVariableMtrr (
1499 MsrNum,
1500 BaseAddress,
1501 Alignment,
1502 MemoryType,
1503 MtrrValidAddressMask
1504 );
1505 BaseAddress += Alignment;
1506 Length -= Alignment;
1507 } while (TRUE);
1508
1509 if (Length == 0) {
1510 goto Done;
1511 }
1512 }
1513
1514 TempQword = Length;
1515
1516 if (!Positive) {
1517 Length = Power2MaxMemory (LShiftU64 (TempQword, 1));
1518
1519 //
1520 // Find unused MTRR
1521 //
1522 for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {
1523 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1524 break;
1525 }
1526 }
1527
1528 ProgramVariableMtrr (
1529 MsrNum,
1530 BaseAddress,
1531 Length,
1532 MemoryType,
1533 MtrrValidAddressMask
1534 );
1535 BaseAddress += Length;
1536 TempQword = Length - TempQword;
1537 MemoryType = MTRR_CACHE_UNCACHEABLE;
1538 }
1539
1540 do {
1541 //
1542 // Find unused MTRR
1543 //
1544 for (; MsrNum < VariableMtrrEnd; MsrNum += 2) {
1545 if ((AsmReadMsr64 (MsrNum + 1) & MTRR_LIB_CACHE_MTRR_ENABLED) == 0) {
1546 break;
1547 }
1548 }
1549
1550 Length = Power2MaxMemory (TempQword);
1551 if (!Positive) {
1552 BaseAddress -= Length;
1553 }
1554
1555 ProgramVariableMtrr (
1556 MsrNum,
1557 BaseAddress,
1558 Length,
1559 MemoryType,
1560 MtrrValidAddressMask
1561 );
1562
1563 if (Positive) {
1564 BaseAddress += Length;
1565 }
1566 TempQword -= Length;
1567
1568 } while (TempQword > 0);
1569
1570 Done:
1571 DEBUG((DEBUG_CACHE, " Status = %r\n", Status));
1572 if (!RETURN_ERROR (Status)) {
1573 MtrrDebugPrintAllMtrrs ();
1574 }
1575
1576 return Status;
1577 }
1578 /**
1579 Worker function setting variable MTRRs
1580
1581 @param[in] VariableSettings A buffer to hold variable MTRRs content.
1582
1583 **/
1584 VOID
1585 MtrrSetVariableMtrrWorker (
1586 IN MTRR_VARIABLE_SETTINGS *VariableSettings
1587 )
1588 {
1589 UINT32 Index;
1590 UINT32 VariableMtrrCount;
1591
1592 VariableMtrrCount = GetVariableMtrrCount ();
1593 ASSERT (VariableMtrrCount <= MTRR_NUMBER_OF_VARIABLE_MTRR);
1594
1595 for (Index = 0; Index < VariableMtrrCount; Index++) {
1596 AsmWriteMsr64 (
1597 MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1),
1598 VariableSettings->Mtrr[Index].Base
1599 );
1600 AsmWriteMsr64 (
1601 MTRR_LIB_IA32_VARIABLE_MTRR_BASE + (Index << 1) + 1,
1602 VariableSettings->Mtrr[Index].Mask
1603 );
1604 }
1605 }
1606
1607
1608 /**
1609 This function sets variable MTRRs
1610
1611 @param[in] VariableSettings A buffer to hold variable MTRRs content.
1612
1613 @return The pointer of VariableSettings
1614
1615 **/
1616 MTRR_VARIABLE_SETTINGS*
1617 EFIAPI
1618 MtrrSetVariableMtrr (
1619 IN MTRR_VARIABLE_SETTINGS *VariableSettings
1620 )
1621 {
1622 MTRR_CONTEXT MtrrContext;
1623
1624 if (!IsMtrrSupported ()) {
1625 return VariableSettings;
1626 }
1627
1628 PreMtrrChange (&MtrrContext);
1629 MtrrSetVariableMtrrWorker (VariableSettings);
1630 PostMtrrChange (&MtrrContext);
1631 return VariableSettings;
1632 }
1633
1634 /**
1635 Worker function setting fixed MTRRs
1636
1637 @param[in] FixedSettings A buffer to hold fixed Mtrrs content.
1638
1639 **/
1640 VOID
1641 MtrrSetFixedMtrrWorker (
1642 IN MTRR_FIXED_SETTINGS *FixedSettings
1643 )
1644 {
1645 UINT32 Index;
1646
1647 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
1648 AsmWriteMsr64 (
1649 mMtrrLibFixedMtrrTable[Index].Msr,
1650 FixedSettings->Mtrr[Index]
1651 );
1652 }
1653 }
1654
1655
1656 /**
1657 This function sets fixed MTRRs
1658
1659 @param[in] FixedSettings A buffer to hold fixed Mtrrs content.
1660
1661 @retval The pointer of FixedSettings
1662
1663 **/
1664 MTRR_FIXED_SETTINGS*
1665 EFIAPI
1666 MtrrSetFixedMtrr (
1667 IN MTRR_FIXED_SETTINGS *FixedSettings
1668 )
1669 {
1670 MTRR_CONTEXT MtrrContext;
1671
1672 if (!IsMtrrSupported ()) {
1673 return FixedSettings;
1674 }
1675
1676 PreMtrrChange (&MtrrContext);
1677 MtrrSetFixedMtrrWorker (FixedSettings);
1678 PostMtrrChange (&MtrrContext);
1679
1680 return FixedSettings;
1681 }
1682
1683
1684 /**
1685 This function gets the content in all MTRRs (variable and fixed)
1686
1687 @param[out] MtrrSetting A buffer to hold all Mtrrs content.
1688
1689 @retval the pointer of MtrrSetting
1690
1691 **/
1692 MTRR_SETTINGS *
1693 EFIAPI
1694 MtrrGetAllMtrrs (
1695 OUT MTRR_SETTINGS *MtrrSetting
1696 )
1697 {
1698 if (!IsMtrrSupported ()) {
1699 return MtrrSetting;
1700 }
1701
1702 //
1703 // Get fixed MTRRs
1704 //
1705 MtrrGetFixedMtrr (&MtrrSetting->Fixed);
1706
1707 //
1708 // Get variable MTRRs
1709 //
1710 MtrrGetVariableMtrr (&MtrrSetting->Variables);
1711
1712 //
1713 // Get MTRR_DEF_TYPE value
1714 //
1715 MtrrSetting->MtrrDefType = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE);
1716
1717 return MtrrSetting;
1718 }
1719
1720
1721 /**
1722 This function sets all MTRRs (variable and fixed)
1723
1724 @param[in] MtrrSetting A buffer holding all MTRRs content.
1725
1726 @retval The pointer of MtrrSetting
1727
1728 **/
1729 MTRR_SETTINGS *
1730 EFIAPI
1731 MtrrSetAllMtrrs (
1732 IN MTRR_SETTINGS *MtrrSetting
1733 )
1734 {
1735 MTRR_CONTEXT MtrrContext;
1736
1737 if (!IsMtrrSupported ()) {
1738 return MtrrSetting;
1739 }
1740
1741 PreMtrrChange (&MtrrContext);
1742
1743 //
1744 // Set fixed MTRRs
1745 //
1746 MtrrSetFixedMtrrWorker (&MtrrSetting->Fixed);
1747
1748 //
1749 // Set variable MTRRs
1750 //
1751 MtrrSetVariableMtrrWorker (&MtrrSetting->Variables);
1752
1753 //
1754 // Set MTRR_DEF_TYPE value
1755 //
1756 AsmWriteMsr64 (MTRR_LIB_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);
1757
1758 PostMtrrChangeEnableCache (&MtrrContext);
1759
1760 return MtrrSetting;
1761 }
1762
1763 /**
1764 Checks if MTRR is supported.
1765
1766 @retval TRUE MTRR is supported.
1767 @retval FALSE MTRR is not supported.
1768
1769 **/
1770 BOOLEAN
1771 EFIAPI
1772 IsMtrrSupported (
1773 VOID
1774 )
1775 {
1776 UINT32 RegEdx;
1777 UINT64 MtrrCap;
1778
1779 //
1780 // Check CPUID(1).EDX[12] for MTRR capability
1781 //
1782 AsmCpuid (1, NULL, NULL, NULL, &RegEdx);
1783 if (BitFieldRead32 (RegEdx, 12, 12) == 0) {
1784 return FALSE;
1785 }
1786
1787 //
1788 // Check IA32_MTRRCAP.[0..7] for number of variable MTRRs and IA32_MTRRCAP[8] for
1789 // fixed MTRRs existence. If number of variable MTRRs is zero, or fixed MTRRs do not
1790 // exist, return false.
1791 //
1792 MtrrCap = AsmReadMsr64 (MTRR_LIB_IA32_MTRR_CAP);
1793 if ((BitFieldRead64 (MtrrCap, 0, 7) == 0) || (BitFieldRead64 (MtrrCap, 8, 8) == 0)) {
1794 return FALSE;
1795 }
1796
1797 return TRUE;
1798 }