]>
Commit | Line | Data |
---|---|---|
1 | /** @file\r | |
2 | MTRR setting library\r | |
3 | \r | |
4 | @par Note:\r | |
5 | Most of services in this library instance are suggested to be invoked by BSP only,\r | |
6 | except for MtrrSetAllMtrrs() which is used to sync BSP's MTRR setting to APs.\r | |
7 | \r | |
8 | Copyright (c) 2008 - 2020, Intel Corporation. All rights reserved.<BR>\r | |
9 | SPDX-License-Identifier: BSD-2-Clause-Patent\r | |
10 | \r | |
11 | **/\r | |
12 | \r | |
13 | #include <Uefi.h>\r | |
14 | #include <Register/Intel/Cpuid.h>\r | |
15 | #include <Register/Intel/Msr.h>\r | |
16 | \r | |
17 | #include <Library/MtrrLib.h>\r | |
18 | #include <Library/BaseLib.h>\r | |
19 | #include <Library/CpuLib.h>\r | |
20 | #include <Library/BaseMemoryLib.h>\r | |
21 | #include <Library/DebugLib.h>\r | |
22 | \r | |
23 | #define OR_SEED 0x0101010101010101ull\r | |
24 | #define CLEAR_SEED 0xFFFFFFFFFFFFFFFFull\r | |
25 | #define MAX_WEIGHT MAX_UINT8\r | |
26 | #define SCRATCH_BUFFER_SIZE (4 * SIZE_4KB)\r | |
27 | #define MTRR_LIB_ASSERT_ALIGNED(B, L) ASSERT ((B & ~(L - 1)) == B);\r | |
28 | \r | |
29 | #define M(x, y) ((x) * VertexCount + (y))\r | |
30 | #define O(x, y) ((y) * VertexCount + (x))\r | |
31 | \r | |
32 | //\r | |
33 | // Context to save and restore when MTRRs are programmed\r | |
34 | //\r | |
35 | typedef struct {\r | |
36 | UINTN Cr4;\r | |
37 | BOOLEAN InterruptState;\r | |
38 | } MTRR_CONTEXT;\r | |
39 | \r | |
40 | typedef struct {\r | |
41 | UINT64 Address;\r | |
42 | UINT64 Alignment;\r | |
43 | UINT64 Length;\r | |
44 | MTRR_MEMORY_CACHE_TYPE Type : 7;\r | |
45 | \r | |
46 | //\r | |
47 | // Temprary use for calculating the best MTRR settings.\r | |
48 | //\r | |
49 | BOOLEAN Visited : 1;\r | |
50 | UINT8 Weight;\r | |
51 | UINT16 Previous;\r | |
52 | } MTRR_LIB_ADDRESS;\r | |
53 | \r | |
54 | //\r | |
55 | // This table defines the offset, base and length of the fixed MTRRs\r | |
56 | //\r | |
57 | CONST FIXED_MTRR mMtrrLibFixedMtrrTable[] = {\r | |
58 | {\r | |
59 | MSR_IA32_MTRR_FIX64K_00000,\r | |
60 | 0,\r | |
61 | SIZE_64KB\r | |
62 | },\r | |
63 | {\r | |
64 | MSR_IA32_MTRR_FIX16K_80000,\r | |
65 | 0x80000,\r | |
66 | SIZE_16KB\r | |
67 | },\r | |
68 | {\r | |
69 | MSR_IA32_MTRR_FIX16K_A0000,\r | |
70 | 0xA0000,\r | |
71 | SIZE_16KB\r | |
72 | },\r | |
73 | {\r | |
74 | MSR_IA32_MTRR_FIX4K_C0000,\r | |
75 | 0xC0000,\r | |
76 | SIZE_4KB\r | |
77 | },\r | |
78 | {\r | |
79 | MSR_IA32_MTRR_FIX4K_C8000,\r | |
80 | 0xC8000,\r | |
81 | SIZE_4KB\r | |
82 | },\r | |
83 | {\r | |
84 | MSR_IA32_MTRR_FIX4K_D0000,\r | |
85 | 0xD0000,\r | |
86 | SIZE_4KB\r | |
87 | },\r | |
88 | {\r | |
89 | MSR_IA32_MTRR_FIX4K_D8000,\r | |
90 | 0xD8000,\r | |
91 | SIZE_4KB\r | |
92 | },\r | |
93 | {\r | |
94 | MSR_IA32_MTRR_FIX4K_E0000,\r | |
95 | 0xE0000,\r | |
96 | SIZE_4KB\r | |
97 | },\r | |
98 | {\r | |
99 | MSR_IA32_MTRR_FIX4K_E8000,\r | |
100 | 0xE8000,\r | |
101 | SIZE_4KB\r | |
102 | },\r | |
103 | {\r | |
104 | MSR_IA32_MTRR_FIX4K_F0000,\r | |
105 | 0xF0000,\r | |
106 | SIZE_4KB\r | |
107 | },\r | |
108 | {\r | |
109 | MSR_IA32_MTRR_FIX4K_F8000,\r | |
110 | 0xF8000,\r | |
111 | SIZE_4KB\r | |
112 | }\r | |
113 | };\r | |
114 | \r | |
115 | //\r | |
116 | // Lookup table used to print MTRRs\r | |
117 | //\r | |
118 | GLOBAL_REMOVE_IF_UNREFERENCED CONST CHAR8 *mMtrrMemoryCacheTypeShortName[] = {\r | |
119 | "UC", // CacheUncacheable\r | |
120 | "WC", // CacheWriteCombining\r | |
121 | "R*", // Invalid\r | |
122 | "R*", // Invalid\r | |
123 | "WT", // CacheWriteThrough\r | |
124 | "WP", // CacheWriteProtected\r | |
125 | "WB", // CacheWriteBack\r | |
126 | "R*" // Invalid\r | |
127 | };\r | |
128 | \r | |
129 | /**\r | |
130 | Worker function prints all MTRRs for debugging.\r | |
131 | \r | |
132 | If MtrrSetting is not NULL, print MTRR settings from input MTRR\r | |
133 | settings buffer.\r | |
134 | If MtrrSetting is NULL, print MTRR settings from MTRRs.\r | |
135 | \r | |
136 | @param MtrrSetting A buffer holding all MTRRs content.\r | |
137 | **/\r | |
138 | VOID\r | |
139 | MtrrDebugPrintAllMtrrsWorker (\r | |
140 | IN MTRR_SETTINGS *MtrrSetting\r | |
141 | );\r | |
142 | \r | |
143 | /**\r | |
144 | Worker function returns the variable MTRR count for the CPU.\r | |
145 | \r | |
146 | @return Variable MTRR count\r | |
147 | \r | |
148 | **/\r | |
149 | UINT32\r | |
150 | GetVariableMtrrCountWorker (\r | |
151 | VOID\r | |
152 | )\r | |
153 | {\r | |
154 | MSR_IA32_MTRRCAP_REGISTER MtrrCap;\r | |
155 | \r | |
156 | MtrrCap.Uint64 = AsmReadMsr64 (MSR_IA32_MTRRCAP);\r | |
157 | ASSERT (MtrrCap.Bits.VCNT <= ARRAY_SIZE (((MTRR_VARIABLE_SETTINGS *)0)->Mtrr));\r | |
158 | return MtrrCap.Bits.VCNT;\r | |
159 | }\r | |
160 | \r | |
161 | /**\r | |
162 | Returns the variable MTRR count for the CPU.\r | |
163 | \r | |
164 | @return Variable MTRR count\r | |
165 | \r | |
166 | **/\r | |
167 | UINT32\r | |
168 | EFIAPI\r | |
169 | GetVariableMtrrCount (\r | |
170 | VOID\r | |
171 | )\r | |
172 | {\r | |
173 | if (!IsMtrrSupported ()) {\r | |
174 | return 0;\r | |
175 | }\r | |
176 | \r | |
177 | return GetVariableMtrrCountWorker ();\r | |
178 | }\r | |
179 | \r | |
180 | /**\r | |
181 | Worker function returns the firmware usable variable MTRR count for the CPU.\r | |
182 | \r | |
183 | @return Firmware usable variable MTRR count\r | |
184 | \r | |
185 | **/\r | |
186 | UINT32\r | |
187 | GetFirmwareVariableMtrrCountWorker (\r | |
188 | VOID\r | |
189 | )\r | |
190 | {\r | |
191 | UINT32 VariableMtrrCount;\r | |
192 | UINT32 ReservedMtrrNumber;\r | |
193 | \r | |
194 | VariableMtrrCount = GetVariableMtrrCountWorker ();\r | |
195 | ReservedMtrrNumber = PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);\r | |
196 | if (VariableMtrrCount < ReservedMtrrNumber) {\r | |
197 | return 0;\r | |
198 | }\r | |
199 | \r | |
200 | return VariableMtrrCount - ReservedMtrrNumber;\r | |
201 | }\r | |
202 | \r | |
203 | /**\r | |
204 | Returns the firmware usable variable MTRR count for the CPU.\r | |
205 | \r | |
206 | @return Firmware usable variable MTRR count\r | |
207 | \r | |
208 | **/\r | |
209 | UINT32\r | |
210 | EFIAPI\r | |
211 | GetFirmwareVariableMtrrCount (\r | |
212 | VOID\r | |
213 | )\r | |
214 | {\r | |
215 | if (!IsMtrrSupported ()) {\r | |
216 | return 0;\r | |
217 | }\r | |
218 | \r | |
219 | return GetFirmwareVariableMtrrCountWorker ();\r | |
220 | }\r | |
221 | \r | |
222 | /**\r | |
223 | Worker function returns the default MTRR cache type for the system.\r | |
224 | \r | |
225 | If MtrrSetting is not NULL, returns the default MTRR cache type from input\r | |
226 | MTRR settings buffer.\r | |
227 | If MtrrSetting is NULL, returns the default MTRR cache type from MSR.\r | |
228 | \r | |
229 | @param[in] MtrrSetting A buffer holding all MTRRs content.\r | |
230 | \r | |
231 | @return The default MTRR cache type.\r | |
232 | \r | |
233 | **/\r | |
234 | MTRR_MEMORY_CACHE_TYPE\r | |
235 | MtrrGetDefaultMemoryTypeWorker (\r | |
236 | IN MTRR_SETTINGS *MtrrSetting\r | |
237 | )\r | |
238 | {\r | |
239 | MSR_IA32_MTRR_DEF_TYPE_REGISTER DefType;\r | |
240 | \r | |
241 | if (MtrrSetting == NULL) {\r | |
242 | DefType.Uint64 = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);\r | |
243 | } else {\r | |
244 | DefType.Uint64 = MtrrSetting->MtrrDefType;\r | |
245 | }\r | |
246 | \r | |
247 | return (MTRR_MEMORY_CACHE_TYPE)DefType.Bits.Type;\r | |
248 | }\r | |
249 | \r | |
250 | /**\r | |
251 | Returns the default MTRR cache type for the system.\r | |
252 | \r | |
253 | @return The default MTRR cache type.\r | |
254 | \r | |
255 | **/\r | |
256 | MTRR_MEMORY_CACHE_TYPE\r | |
257 | EFIAPI\r | |
258 | MtrrGetDefaultMemoryType (\r | |
259 | VOID\r | |
260 | )\r | |
261 | {\r | |
262 | if (!IsMtrrSupported ()) {\r | |
263 | return CacheUncacheable;\r | |
264 | }\r | |
265 | \r | |
266 | return MtrrGetDefaultMemoryTypeWorker (NULL);\r | |
267 | }\r | |
268 | \r | |
269 | /**\r | |
270 | Preparation before programming MTRR.\r | |
271 | \r | |
272 | This function will do some preparation for programming MTRRs:\r | |
273 | disable cache, invalid cache and disable MTRR caching functionality\r | |
274 | \r | |
275 | @param[out] MtrrContext Pointer to context to save\r | |
276 | \r | |
277 | **/\r | |
278 | VOID\r | |
279 | MtrrLibPreMtrrChange (\r | |
280 | OUT MTRR_CONTEXT *MtrrContext\r | |
281 | )\r | |
282 | {\r | |
283 | MSR_IA32_MTRR_DEF_TYPE_REGISTER DefType;\r | |
284 | \r | |
285 | //\r | |
286 | // Disable interrupts and save current interrupt state\r | |
287 | //\r | |
288 | MtrrContext->InterruptState = SaveAndDisableInterrupts ();\r | |
289 | \r | |
290 | //\r | |
291 | // Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)\r | |
292 | //\r | |
293 | AsmDisableCache ();\r | |
294 | \r | |
295 | //\r | |
296 | // Save original CR4 value and clear PGE flag (Bit 7)\r | |
297 | //\r | |
298 | MtrrContext->Cr4 = AsmReadCr4 ();\r | |
299 | AsmWriteCr4 (MtrrContext->Cr4 & (~BIT7));\r | |
300 | \r | |
301 | //\r | |
302 | // Flush all TLBs\r | |
303 | //\r | |
304 | CpuFlushTlb ();\r | |
305 | \r | |
306 | //\r | |
307 | // Disable MTRRs\r | |
308 | //\r | |
309 | DefType.Uint64 = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);\r | |
310 | DefType.Bits.E = 0;\r | |
311 | AsmWriteMsr64 (MSR_IA32_MTRR_DEF_TYPE, DefType.Uint64);\r | |
312 | }\r | |
313 | \r | |
314 | /**\r | |
315 | Cleaning up after programming MTRRs.\r | |
316 | \r | |
317 | This function will do some clean up after programming MTRRs:\r | |
318 | Flush all TLBs, re-enable caching, restore CR4.\r | |
319 | \r | |
320 | @param[in] MtrrContext Pointer to context to restore\r | |
321 | \r | |
322 | **/\r | |
323 | VOID\r | |
324 | MtrrLibPostMtrrChangeEnableCache (\r | |
325 | IN MTRR_CONTEXT *MtrrContext\r | |
326 | )\r | |
327 | {\r | |
328 | //\r | |
329 | // Flush all TLBs\r | |
330 | //\r | |
331 | CpuFlushTlb ();\r | |
332 | \r | |
333 | //\r | |
334 | // Enable Normal Mode caching CD=NW=0, CD(Bit30), NW(Bit29)\r | |
335 | //\r | |
336 | AsmEnableCache ();\r | |
337 | \r | |
338 | //\r | |
339 | // Restore original CR4 value\r | |
340 | //\r | |
341 | AsmWriteCr4 (MtrrContext->Cr4);\r | |
342 | \r | |
343 | //\r | |
344 | // Restore original interrupt state\r | |
345 | //\r | |
346 | SetInterruptState (MtrrContext->InterruptState);\r | |
347 | }\r | |
348 | \r | |
349 | /**\r | |
350 | Cleaning up after programming MTRRs.\r | |
351 | \r | |
352 | This function will do some clean up after programming MTRRs:\r | |
353 | enable MTRR caching functionality, and enable cache\r | |
354 | \r | |
355 | @param[in] MtrrContext Pointer to context to restore\r | |
356 | \r | |
357 | **/\r | |
358 | VOID\r | |
359 | MtrrLibPostMtrrChange (\r | |
360 | IN MTRR_CONTEXT *MtrrContext\r | |
361 | )\r | |
362 | {\r | |
363 | MSR_IA32_MTRR_DEF_TYPE_REGISTER DefType;\r | |
364 | \r | |
365 | //\r | |
366 | // Enable Cache MTRR\r | |
367 | //\r | |
368 | DefType.Uint64 = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);\r | |
369 | DefType.Bits.E = 1;\r | |
370 | DefType.Bits.FE = 1;\r | |
371 | AsmWriteMsr64 (MSR_IA32_MTRR_DEF_TYPE, DefType.Uint64);\r | |
372 | \r | |
373 | MtrrLibPostMtrrChangeEnableCache (MtrrContext);\r | |
374 | }\r | |
375 | \r | |
376 | /**\r | |
377 | Worker function gets the content in fixed MTRRs\r | |
378 | \r | |
379 | @param[out] FixedSettings A buffer to hold fixed MTRRs content.\r | |
380 | \r | |
381 | @retval The pointer of FixedSettings\r | |
382 | \r | |
383 | **/\r | |
384 | MTRR_FIXED_SETTINGS *\r | |
385 | MtrrGetFixedMtrrWorker (\r | |
386 | OUT MTRR_FIXED_SETTINGS *FixedSettings\r | |
387 | )\r | |
388 | {\r | |
389 | UINT32 Index;\r | |
390 | \r | |
391 | for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r | |
392 | FixedSettings->Mtrr[Index] =\r | |
393 | AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);\r | |
394 | }\r | |
395 | \r | |
396 | return FixedSettings;\r | |
397 | }\r | |
398 | \r | |
399 | /**\r | |
400 | This function gets the content in fixed MTRRs\r | |
401 | \r | |
402 | @param[out] FixedSettings A buffer to hold fixed MTRRs content.\r | |
403 | \r | |
404 | @retval The pointer of FixedSettings\r | |
405 | \r | |
406 | **/\r | |
407 | MTRR_FIXED_SETTINGS *\r | |
408 | EFIAPI\r | |
409 | MtrrGetFixedMtrr (\r | |
410 | OUT MTRR_FIXED_SETTINGS *FixedSettings\r | |
411 | )\r | |
412 | {\r | |
413 | if (!IsMtrrSupported ()) {\r | |
414 | return FixedSettings;\r | |
415 | }\r | |
416 | \r | |
417 | return MtrrGetFixedMtrrWorker (FixedSettings);\r | |
418 | }\r | |
419 | \r | |
420 | /**\r | |
421 | Worker function will get the raw value in variable MTRRs\r | |
422 | \r | |
423 | If MtrrSetting is not NULL, gets the variable MTRRs raw value from input\r | |
424 | MTRR settings buffer.\r | |
425 | If MtrrSetting is NULL, gets the variable MTRRs raw value from MTRRs.\r | |
426 | \r | |
427 | @param[in] MtrrSetting A buffer holding all MTRRs content.\r | |
428 | @param[in] VariableMtrrCount Number of variable MTRRs.\r | |
429 | @param[out] VariableSettings A buffer to hold variable MTRRs content.\r | |
430 | \r | |
431 | @return The VariableSettings input pointer\r | |
432 | \r | |
433 | **/\r | |
434 | MTRR_VARIABLE_SETTINGS *\r | |
435 | MtrrGetVariableMtrrWorker (\r | |
436 | IN MTRR_SETTINGS *MtrrSetting,\r | |
437 | IN UINT32 VariableMtrrCount,\r | |
438 | OUT MTRR_VARIABLE_SETTINGS *VariableSettings\r | |
439 | )\r | |
440 | {\r | |
441 | UINT32 Index;\r | |
442 | \r | |
443 | ASSERT (VariableMtrrCount <= ARRAY_SIZE (VariableSettings->Mtrr));\r | |
444 | \r | |
445 | for (Index = 0; Index < VariableMtrrCount; Index++) {\r | |
446 | if (MtrrSetting == NULL) {\r | |
447 | VariableSettings->Mtrr[Index].Base =\r | |
448 | AsmReadMsr64 (MSR_IA32_MTRR_PHYSBASE0 + (Index << 1));\r | |
449 | VariableSettings->Mtrr[Index].Mask =\r | |
450 | AsmReadMsr64 (MSR_IA32_MTRR_PHYSMASK0 + (Index << 1));\r | |
451 | } else {\r | |
452 | VariableSettings->Mtrr[Index].Base = MtrrSetting->Variables.Mtrr[Index].Base;\r | |
453 | VariableSettings->Mtrr[Index].Mask = MtrrSetting->Variables.Mtrr[Index].Mask;\r | |
454 | }\r | |
455 | }\r | |
456 | \r | |
457 | return VariableSettings;\r | |
458 | }\r | |
459 | \r | |
460 | /**\r | |
461 | Programs fixed MTRRs registers.\r | |
462 | \r | |
463 | @param[in] Type The memory type to set.\r | |
464 | @param[in, out] Base The base address of memory range.\r | |
465 | @param[in, out] Length The length of memory range.\r | |
466 | @param[in, out] LastMsrIndex On input, the last index of the fixed MTRR MSR to program.\r | |
467 | On return, the current index of the fixed MTRR MSR to program.\r | |
468 | @param[out] ClearMask The bits to clear in the fixed MTRR MSR.\r | |
469 | @param[out] OrMask The bits to set in the fixed MTRR MSR.\r | |
470 | \r | |
471 | @retval RETURN_SUCCESS The cache type was updated successfully\r | |
472 | @retval RETURN_UNSUPPORTED The requested range or cache type was invalid\r | |
473 | for the fixed MTRRs.\r | |
474 | \r | |
475 | **/\r | |
476 | RETURN_STATUS\r | |
477 | MtrrLibProgramFixedMtrr (\r | |
478 | IN MTRR_MEMORY_CACHE_TYPE Type,\r | |
479 | IN OUT UINT64 *Base,\r | |
480 | IN OUT UINT64 *Length,\r | |
481 | IN OUT UINT32 *LastMsrIndex,\r | |
482 | OUT UINT64 *ClearMask,\r | |
483 | OUT UINT64 *OrMask\r | |
484 | )\r | |
485 | {\r | |
486 | UINT32 MsrIndex;\r | |
487 | UINT32 LeftByteShift;\r | |
488 | UINT32 RightByteShift;\r | |
489 | UINT64 SubLength;\r | |
490 | \r | |
491 | //\r | |
492 | // Find the fixed MTRR index to be programmed\r | |
493 | //\r | |
494 | for (MsrIndex = *LastMsrIndex + 1; MsrIndex < ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {\r | |
495 | if ((*Base >= mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) &&\r | |
496 | (*Base <\r | |
497 | (\r | |
498 | mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress +\r | |
499 | (8 * mMtrrLibFixedMtrrTable[MsrIndex].Length)\r | |
500 | )\r | |
501 | )\r | |
502 | )\r | |
503 | {\r | |
504 | break;\r | |
505 | }\r | |
506 | }\r | |
507 | \r | |
508 | ASSERT (MsrIndex != ARRAY_SIZE (mMtrrLibFixedMtrrTable));\r | |
509 | \r | |
510 | //\r | |
511 | // Find the begin offset in fixed MTRR and calculate byte offset of left shift\r | |
512 | //\r | |
513 | if ((((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {\r | |
514 | //\r | |
515 | // Base address should be aligned to the begin of a certain Fixed MTRR range.\r | |
516 | //\r | |
517 | return RETURN_UNSUPPORTED;\r | |
518 | }\r | |
519 | \r | |
520 | LeftByteShift = ((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) / mMtrrLibFixedMtrrTable[MsrIndex].Length;\r | |
521 | ASSERT (LeftByteShift < 8);\r | |
522 | \r | |
523 | //\r | |
524 | // Find the end offset in fixed MTRR and calculate byte offset of right shift\r | |
525 | //\r | |
526 | SubLength = mMtrrLibFixedMtrrTable[MsrIndex].Length * (8 - LeftByteShift);\r | |
527 | if (*Length >= SubLength) {\r | |
528 | RightByteShift = 0;\r | |
529 | } else {\r | |
530 | if (((UINT32)(*Length) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {\r | |
531 | //\r | |
532 | // Length should be aligned to the end of a certain Fixed MTRR range.\r | |
533 | //\r | |
534 | return RETURN_UNSUPPORTED;\r | |
535 | }\r | |
536 | \r | |
537 | RightByteShift = 8 - LeftByteShift - (UINT32)(*Length) / mMtrrLibFixedMtrrTable[MsrIndex].Length;\r | |
538 | //\r | |
539 | // Update SubLength by actual length\r | |
540 | //\r | |
541 | SubLength = *Length;\r | |
542 | }\r | |
543 | \r | |
544 | *ClearMask = CLEAR_SEED;\r | |
545 | *OrMask = MultU64x32 (OR_SEED, (UINT32)Type);\r | |
546 | \r | |
547 | if (LeftByteShift != 0) {\r | |
548 | //\r | |
549 | // Clear the low bits by LeftByteShift\r | |
550 | //\r | |
551 | *ClearMask &= LShiftU64 (*ClearMask, LeftByteShift * 8);\r | |
552 | *OrMask &= LShiftU64 (*OrMask, LeftByteShift * 8);\r | |
553 | }\r | |
554 | \r | |
555 | if (RightByteShift != 0) {\r | |
556 | //\r | |
557 | // Clear the high bits by RightByteShift\r | |
558 | //\r | |
559 | *ClearMask &= RShiftU64 (*ClearMask, RightByteShift * 8);\r | |
560 | *OrMask &= RShiftU64 (*OrMask, RightByteShift * 8);\r | |
561 | }\r | |
562 | \r | |
563 | *Length -= SubLength;\r | |
564 | *Base += SubLength;\r | |
565 | \r | |
566 | *LastMsrIndex = MsrIndex;\r | |
567 | \r | |
568 | return RETURN_SUCCESS;\r | |
569 | }\r | |
570 | \r | |
571 | /**\r | |
572 | Worker function gets the attribute of variable MTRRs.\r | |
573 | \r | |
574 | This function shadows the content of variable MTRRs into an\r | |
575 | internal array: VariableMtrr.\r | |
576 | \r | |
577 | @param[in] VariableSettings The variable MTRR values to shadow\r | |
578 | @param[in] VariableMtrrCount The number of variable MTRRs\r | |
579 | @param[in] MtrrValidBitsMask The mask for the valid bit of the MTRR\r | |
580 | @param[in] MtrrValidAddressMask The valid address mask for MTRR\r | |
581 | @param[out] VariableMtrr The array to shadow variable MTRRs content\r | |
582 | \r | |
583 | @return Number of MTRRs which has been used.\r | |
584 | \r | |
585 | **/\r | |
586 | UINT32\r | |
587 | MtrrGetMemoryAttributeInVariableMtrrWorker (\r | |
588 | IN MTRR_VARIABLE_SETTINGS *VariableSettings,\r | |
589 | IN UINTN VariableMtrrCount,\r | |
590 | IN UINT64 MtrrValidBitsMask,\r | |
591 | IN UINT64 MtrrValidAddressMask,\r | |
592 | OUT VARIABLE_MTRR *VariableMtrr\r | |
593 | )\r | |
594 | {\r | |
595 | UINTN Index;\r | |
596 | UINT32 UsedMtrr;\r | |
597 | \r | |
598 | ZeroMem (VariableMtrr, sizeof (VARIABLE_MTRR) * ARRAY_SIZE (VariableSettings->Mtrr));\r | |
599 | for (Index = 0, UsedMtrr = 0; Index < VariableMtrrCount; Index++) {\r | |
600 | if (((MSR_IA32_MTRR_PHYSMASK_REGISTER *)&VariableSettings->Mtrr[Index].Mask)->Bits.V != 0) {\r | |
601 | VariableMtrr[Index].Msr = (UINT32)Index;\r | |
602 | VariableMtrr[Index].BaseAddress = (VariableSettings->Mtrr[Index].Base & MtrrValidAddressMask);\r | |
603 | VariableMtrr[Index].Length =\r | |
604 | ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;\r | |
605 | VariableMtrr[Index].Type = (VariableSettings->Mtrr[Index].Base & 0x0ff);\r | |
606 | VariableMtrr[Index].Valid = TRUE;\r | |
607 | VariableMtrr[Index].Used = TRUE;\r | |
608 | UsedMtrr++;\r | |
609 | }\r | |
610 | }\r | |
611 | \r | |
612 | return UsedMtrr;\r | |
613 | }\r | |
614 | \r | |
615 | /**\r | |
616 | Convert variable MTRRs to a RAW MTRR_MEMORY_RANGE array.\r | |
617 | One MTRR_MEMORY_RANGE element is created for each MTRR setting.\r | |
618 | The routine doesn't remove the overlap or combine the near-by region.\r | |
619 | \r | |
620 | @param[in] VariableSettings The variable MTRR values to shadow\r | |
621 | @param[in] VariableMtrrCount The number of variable MTRRs\r | |
622 | @param[in] MtrrValidBitsMask The mask for the valid bit of the MTRR\r | |
623 | @param[in] MtrrValidAddressMask The valid address mask for MTRR\r | |
624 | @param[out] VariableMtrr The array to shadow variable MTRRs content\r | |
625 | \r | |
626 | @return Number of MTRRs which has been used.\r | |
627 | \r | |
628 | **/\r | |
629 | UINT32\r | |
630 | MtrrLibGetRawVariableRanges (\r | |
631 | IN MTRR_VARIABLE_SETTINGS *VariableSettings,\r | |
632 | IN UINTN VariableMtrrCount,\r | |
633 | IN UINT64 MtrrValidBitsMask,\r | |
634 | IN UINT64 MtrrValidAddressMask,\r | |
635 | OUT MTRR_MEMORY_RANGE *VariableMtrr\r | |
636 | )\r | |
637 | {\r | |
638 | UINTN Index;\r | |
639 | UINT32 UsedMtrr;\r | |
640 | \r | |
641 | ZeroMem (VariableMtrr, sizeof (MTRR_MEMORY_RANGE) * ARRAY_SIZE (VariableSettings->Mtrr));\r | |
642 | for (Index = 0, UsedMtrr = 0; Index < VariableMtrrCount; Index++) {\r | |
643 | if (((MSR_IA32_MTRR_PHYSMASK_REGISTER *)&VariableSettings->Mtrr[Index].Mask)->Bits.V != 0) {\r | |
644 | VariableMtrr[Index].BaseAddress = (VariableSettings->Mtrr[Index].Base & MtrrValidAddressMask);\r | |
645 | VariableMtrr[Index].Length =\r | |
646 | ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;\r | |
647 | VariableMtrr[Index].Type = (MTRR_MEMORY_CACHE_TYPE)(VariableSettings->Mtrr[Index].Base & 0x0ff);\r | |
648 | UsedMtrr++;\r | |
649 | }\r | |
650 | }\r | |
651 | \r | |
652 | return UsedMtrr;\r | |
653 | }\r | |
654 | \r | |
655 | /**\r | |
656 | Gets the attribute of variable MTRRs.\r | |
657 | \r | |
658 | This function shadows the content of variable MTRRs into an\r | |
659 | internal array: VariableMtrr.\r | |
660 | \r | |
661 | @param[in] MtrrValidBitsMask The mask for the valid bit of the MTRR\r | |
662 | @param[in] MtrrValidAddressMask The valid address mask for MTRR\r | |
663 | @param[out] VariableMtrr The array to shadow variable MTRRs content\r | |
664 | \r | |
665 | @return The return value of this parameter indicates the\r | |
666 | number of MTRRs which has been used.\r | |
667 | \r | |
668 | **/\r | |
669 | UINT32\r | |
670 | EFIAPI\r | |
671 | MtrrGetMemoryAttributeInVariableMtrr (\r | |
672 | IN UINT64 MtrrValidBitsMask,\r | |
673 | IN UINT64 MtrrValidAddressMask,\r | |
674 | OUT VARIABLE_MTRR *VariableMtrr\r | |
675 | )\r | |
676 | {\r | |
677 | MTRR_VARIABLE_SETTINGS VariableSettings;\r | |
678 | \r | |
679 | if (!IsMtrrSupported ()) {\r | |
680 | return 0;\r | |
681 | }\r | |
682 | \r | |
683 | MtrrGetVariableMtrrWorker (\r | |
684 | NULL,\r | |
685 | GetVariableMtrrCountWorker (),\r | |
686 | &VariableSettings\r | |
687 | );\r | |
688 | \r | |
689 | return MtrrGetMemoryAttributeInVariableMtrrWorker (\r | |
690 | &VariableSettings,\r | |
691 | GetFirmwareVariableMtrrCountWorker (),\r | |
692 | MtrrValidBitsMask,\r | |
693 | MtrrValidAddressMask,\r | |
694 | VariableMtrr\r | |
695 | );\r | |
696 | }\r | |
697 | \r | |
698 | /**\r | |
699 | Return the biggest alignment (lowest set bit) of address.\r | |
700 | The function is equivalent to: 1 << LowBitSet64 (Address).\r | |
701 | \r | |
702 | @param Address The address to return the alignment.\r | |
703 | @param Alignment0 The alignment to return when Address is 0.\r | |
704 | \r | |
705 | @return The least alignment of the Address.\r | |
706 | **/\r | |
707 | UINT64\r | |
708 | MtrrLibBiggestAlignment (\r | |
709 | UINT64 Address,\r | |
710 | UINT64 Alignment0\r | |
711 | )\r | |
712 | {\r | |
713 | if (Address == 0) {\r | |
714 | return Alignment0;\r | |
715 | }\r | |
716 | \r | |
717 | return Address & ((~Address) + 1);\r | |
718 | }\r | |
719 | \r | |
720 | /**\r | |
721 | Return whether the left MTRR type precedes the right MTRR type.\r | |
722 | \r | |
723 | The MTRR type precedence rules are:\r | |
724 | 1. UC precedes any other type\r | |
725 | 2. WT precedes WB\r | |
726 | For further details, please refer the IA32 Software Developer's Manual,\r | |
727 | Volume 3, Section "MTRR Precedences".\r | |
728 | \r | |
729 | @param Left The left MTRR type.\r | |
730 | @param Right The right MTRR type.\r | |
731 | \r | |
732 | @retval TRUE Left precedes Right.\r | |
733 | @retval FALSE Left doesn't precede Right.\r | |
734 | **/\r | |
735 | BOOLEAN\r | |
736 | MtrrLibTypeLeftPrecedeRight (\r | |
737 | IN MTRR_MEMORY_CACHE_TYPE Left,\r | |
738 | IN MTRR_MEMORY_CACHE_TYPE Right\r | |
739 | )\r | |
740 | {\r | |
741 | return (BOOLEAN)(Left == CacheUncacheable || (Left == CacheWriteThrough && Right == CacheWriteBack));\r | |
742 | }\r | |
743 | \r | |
744 | /**\r | |
745 | Initializes the valid bits mask and valid address mask for MTRRs.\r | |
746 | \r | |
747 | This function initializes the valid bits mask and valid address mask for MTRRs.\r | |
748 | \r | |
749 | @param[out] MtrrValidBitsMask The mask for the valid bit of the MTRR\r | |
750 | @param[out] MtrrValidAddressMask The valid address mask for the MTRR\r | |
751 | \r | |
752 | **/\r | |
753 | VOID\r | |
754 | MtrrLibInitializeMtrrMask (\r | |
755 | OUT UINT64 *MtrrValidBitsMask,\r | |
756 | OUT UINT64 *MtrrValidAddressMask\r | |
757 | )\r | |
758 | {\r | |
759 | UINT32 MaxExtendedFunction;\r | |
760 | CPUID_VIR_PHY_ADDRESS_SIZE_EAX VirPhyAddressSize;\r | |
761 | \r | |
762 | AsmCpuid (CPUID_EXTENDED_FUNCTION, &MaxExtendedFunction, NULL, NULL, NULL);\r | |
763 | \r | |
764 | if (MaxExtendedFunction >= CPUID_VIR_PHY_ADDRESS_SIZE) {\r | |
765 | AsmCpuid (CPUID_VIR_PHY_ADDRESS_SIZE, &VirPhyAddressSize.Uint32, NULL, NULL, NULL);\r | |
766 | } else {\r | |
767 | VirPhyAddressSize.Bits.PhysicalAddressBits = 36;\r | |
768 | }\r | |
769 | \r | |
770 | *MtrrValidBitsMask = LShiftU64 (1, VirPhyAddressSize.Bits.PhysicalAddressBits) - 1;\r | |
771 | *MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;\r | |
772 | }\r | |
773 | \r | |
774 | /**\r | |
775 | Determines the real attribute of a memory range.\r | |
776 | \r | |
777 | This function is to arbitrate the real attribute of the memory when\r | |
778 | there are 2 MTRRs covers the same memory range. For further details,\r | |
779 | please refer the IA32 Software Developer's Manual, Volume 3,\r | |
780 | Section "MTRR Precedences".\r | |
781 | \r | |
782 | @param[in] MtrrType1 The first kind of Memory type\r | |
783 | @param[in] MtrrType2 The second kind of memory type\r | |
784 | \r | |
785 | **/\r | |
786 | MTRR_MEMORY_CACHE_TYPE\r | |
787 | MtrrLibPrecedence (\r | |
788 | IN MTRR_MEMORY_CACHE_TYPE MtrrType1,\r | |
789 | IN MTRR_MEMORY_CACHE_TYPE MtrrType2\r | |
790 | )\r | |
791 | {\r | |
792 | if (MtrrType1 == MtrrType2) {\r | |
793 | return MtrrType1;\r | |
794 | }\r | |
795 | \r | |
796 | ASSERT (\r | |
797 | MtrrLibTypeLeftPrecedeRight (MtrrType1, MtrrType2) ||\r | |
798 | MtrrLibTypeLeftPrecedeRight (MtrrType2, MtrrType1)\r | |
799 | );\r | |
800 | \r | |
801 | if (MtrrLibTypeLeftPrecedeRight (MtrrType1, MtrrType2)) {\r | |
802 | return MtrrType1;\r | |
803 | } else {\r | |
804 | return MtrrType2;\r | |
805 | }\r | |
806 | }\r | |
807 | \r | |
808 | /**\r | |
809 | Worker function will get the memory cache type of the specific address.\r | |
810 | \r | |
811 | If MtrrSetting is not NULL, gets the memory cache type from input\r | |
812 | MTRR settings buffer.\r | |
813 | If MtrrSetting is NULL, gets the memory cache type from MTRRs.\r | |
814 | \r | |
815 | @param[in] MtrrSetting A buffer holding all MTRRs content.\r | |
816 | @param[in] Address The specific address\r | |
817 | \r | |
818 | @return Memory cache type of the specific address\r | |
819 | \r | |
820 | **/\r | |
821 | MTRR_MEMORY_CACHE_TYPE\r | |
822 | MtrrGetMemoryAttributeByAddressWorker (\r | |
823 | IN MTRR_SETTINGS *MtrrSetting,\r | |
824 | IN PHYSICAL_ADDRESS Address\r | |
825 | )\r | |
826 | {\r | |
827 | MSR_IA32_MTRR_DEF_TYPE_REGISTER DefType;\r | |
828 | UINT64 FixedMtrr;\r | |
829 | UINTN Index;\r | |
830 | UINTN SubIndex;\r | |
831 | MTRR_MEMORY_CACHE_TYPE MtrrType;\r | |
832 | MTRR_MEMORY_RANGE VariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];\r | |
833 | UINT64 MtrrValidBitsMask;\r | |
834 | UINT64 MtrrValidAddressMask;\r | |
835 | UINT32 VariableMtrrCount;\r | |
836 | MTRR_VARIABLE_SETTINGS VariableSettings;\r | |
837 | \r | |
838 | //\r | |
839 | // Check if MTRR is enabled, if not, return UC as attribute\r | |
840 | //\r | |
841 | if (MtrrSetting == NULL) {\r | |
842 | DefType.Uint64 = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);\r | |
843 | } else {\r | |
844 | DefType.Uint64 = MtrrSetting->MtrrDefType;\r | |
845 | }\r | |
846 | \r | |
847 | if (DefType.Bits.E == 0) {\r | |
848 | return CacheUncacheable;\r | |
849 | }\r | |
850 | \r | |
851 | //\r | |
852 | // If address is less than 1M, then try to go through the fixed MTRR\r | |
853 | //\r | |
854 | if (Address < BASE_1MB) {\r | |
855 | if (DefType.Bits.FE != 0) {\r | |
856 | //\r | |
857 | // Go through the fixed MTRR\r | |
858 | //\r | |
859 | for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r | |
860 | if ((Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress) &&\r | |
861 | (Address < mMtrrLibFixedMtrrTable[Index].BaseAddress +\r | |
862 | (mMtrrLibFixedMtrrTable[Index].Length * 8)))\r | |
863 | {\r | |
864 | SubIndex =\r | |
865 | ((UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /\r | |
866 | mMtrrLibFixedMtrrTable[Index].Length;\r | |
867 | if (MtrrSetting == NULL) {\r | |
868 | FixedMtrr = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);\r | |
869 | } else {\r | |
870 | FixedMtrr = MtrrSetting->Fixed.Mtrr[Index];\r | |
871 | }\r | |
872 | \r | |
873 | return (MTRR_MEMORY_CACHE_TYPE)(RShiftU64 (FixedMtrr, SubIndex * 8) & 0xFF);\r | |
874 | }\r | |
875 | }\r | |
876 | }\r | |
877 | }\r | |
878 | \r | |
879 | VariableMtrrCount = GetVariableMtrrCountWorker ();\r | |
880 | ASSERT (VariableMtrrCount <= ARRAY_SIZE (MtrrSetting->Variables.Mtrr));\r | |
881 | MtrrGetVariableMtrrWorker (MtrrSetting, VariableMtrrCount, &VariableSettings);\r | |
882 | \r | |
883 | MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);\r | |
884 | MtrrLibGetRawVariableRanges (\r | |
885 | &VariableSettings,\r | |
886 | VariableMtrrCount,\r | |
887 | MtrrValidBitsMask,\r | |
888 | MtrrValidAddressMask,\r | |
889 | VariableMtrr\r | |
890 | );\r | |
891 | \r | |
892 | //\r | |
893 | // Go through the variable MTRR\r | |
894 | //\r | |
895 | MtrrType = CacheInvalid;\r | |
896 | for (Index = 0; Index < VariableMtrrCount; Index++) {\r | |
897 | if (VariableMtrr[Index].Length != 0) {\r | |
898 | if ((Address >= VariableMtrr[Index].BaseAddress) &&\r | |
899 | (Address < VariableMtrr[Index].BaseAddress + VariableMtrr[Index].Length))\r | |
900 | {\r | |
901 | if (MtrrType == CacheInvalid) {\r | |
902 | MtrrType = (MTRR_MEMORY_CACHE_TYPE)VariableMtrr[Index].Type;\r | |
903 | } else {\r | |
904 | MtrrType = MtrrLibPrecedence (MtrrType, (MTRR_MEMORY_CACHE_TYPE)VariableMtrr[Index].Type);\r | |
905 | }\r | |
906 | }\r | |
907 | }\r | |
908 | }\r | |
909 | \r | |
910 | //\r | |
911 | // If there is no MTRR which covers the Address, use the default MTRR type.\r | |
912 | //\r | |
913 | if (MtrrType == CacheInvalid) {\r | |
914 | MtrrType = (MTRR_MEMORY_CACHE_TYPE)DefType.Bits.Type;\r | |
915 | }\r | |
916 | \r | |
917 | return MtrrType;\r | |
918 | }\r | |
919 | \r | |
920 | /**\r | |
921 | This function will get the memory cache type of the specific address.\r | |
922 | \r | |
923 | This function is mainly for debug purpose.\r | |
924 | \r | |
925 | @param[in] Address The specific address\r | |
926 | \r | |
927 | @return Memory cache type of the specific address\r | |
928 | \r | |
929 | **/\r | |
930 | MTRR_MEMORY_CACHE_TYPE\r | |
931 | EFIAPI\r | |
932 | MtrrGetMemoryAttribute (\r | |
933 | IN PHYSICAL_ADDRESS Address\r | |
934 | )\r | |
935 | {\r | |
936 | if (!IsMtrrSupported ()) {\r | |
937 | return CacheUncacheable;\r | |
938 | }\r | |
939 | \r | |
940 | return MtrrGetMemoryAttributeByAddressWorker (NULL, Address);\r | |
941 | }\r | |
942 | \r | |
943 | /**\r | |
944 | Update the Ranges array to change the specified range identified by\r | |
945 | BaseAddress and Length to Type.\r | |
946 | \r | |
947 | @param Ranges Array holding memory type settings for all memory regions.\r | |
948 | @param Capacity The maximum count of memory ranges the array can hold.\r | |
949 | @param Count Return the new memory range count in the array.\r | |
950 | @param BaseAddress The base address of the memory range to change type.\r | |
951 | @param Length The length of the memory range to change type.\r | |
952 | @param Type The new type of the specified memory range.\r | |
953 | \r | |
954 | @retval RETURN_SUCCESS The type of the specified memory range is\r | |
955 | changed successfully.\r | |
956 | @retval RETURN_ALREADY_STARTED The type of the specified memory range equals\r | |
957 | to the desired type.\r | |
958 | @retval RETURN_OUT_OF_RESOURCES The new type set causes the count of memory\r | |
959 | range exceeds capacity.\r | |
960 | **/\r | |
961 | RETURN_STATUS\r | |
962 | MtrrLibSetMemoryType (\r | |
963 | IN MTRR_MEMORY_RANGE *Ranges,\r | |
964 | IN UINTN Capacity,\r | |
965 | IN OUT UINTN *Count,\r | |
966 | IN UINT64 BaseAddress,\r | |
967 | IN UINT64 Length,\r | |
968 | IN MTRR_MEMORY_CACHE_TYPE Type\r | |
969 | )\r | |
970 | {\r | |
971 | UINTN Index;\r | |
972 | UINT64 Limit;\r | |
973 | UINT64 LengthLeft;\r | |
974 | UINT64 LengthRight;\r | |
975 | UINTN StartIndex;\r | |
976 | UINTN EndIndex;\r | |
977 | UINTN DeltaCount;\r | |
978 | \r | |
979 | LengthRight = 0;\r | |
980 | LengthLeft = 0;\r | |
981 | Limit = BaseAddress + Length;\r | |
982 | StartIndex = *Count;\r | |
983 | EndIndex = *Count;\r | |
984 | for (Index = 0; Index < *Count; Index++) {\r | |
985 | if ((StartIndex == *Count) &&\r | |
986 | (Ranges[Index].BaseAddress <= BaseAddress) &&\r | |
987 | (BaseAddress < Ranges[Index].BaseAddress + Ranges[Index].Length))\r | |
988 | {\r | |
989 | StartIndex = Index;\r | |
990 | LengthLeft = BaseAddress - Ranges[Index].BaseAddress;\r | |
991 | }\r | |
992 | \r | |
993 | if ((EndIndex == *Count) &&\r | |
994 | (Ranges[Index].BaseAddress < Limit) &&\r | |
995 | (Limit <= Ranges[Index].BaseAddress + Ranges[Index].Length))\r | |
996 | {\r | |
997 | EndIndex = Index;\r | |
998 | LengthRight = Ranges[Index].BaseAddress + Ranges[Index].Length - Limit;\r | |
999 | break;\r | |
1000 | }\r | |
1001 | }\r | |
1002 | \r | |
1003 | ASSERT (StartIndex != *Count && EndIndex != *Count);\r | |
1004 | if ((StartIndex == EndIndex) && (Ranges[StartIndex].Type == Type)) {\r | |
1005 | return RETURN_ALREADY_STARTED;\r | |
1006 | }\r | |
1007 | \r | |
1008 | //\r | |
1009 | // The type change may cause merging with previous range or next range.\r | |
1010 | // Update the StartIndex, EndIndex, BaseAddress, Length so that following\r | |
1011 | // logic doesn't need to consider merging.\r | |
1012 | //\r | |
1013 | if (StartIndex != 0) {\r | |
1014 | if ((LengthLeft == 0) && (Ranges[StartIndex - 1].Type == Type)) {\r | |
1015 | StartIndex--;\r | |
1016 | Length += Ranges[StartIndex].Length;\r | |
1017 | BaseAddress -= Ranges[StartIndex].Length;\r | |
1018 | }\r | |
1019 | }\r | |
1020 | \r | |
1021 | if (EndIndex != (*Count) - 1) {\r | |
1022 | if ((LengthRight == 0) && (Ranges[EndIndex + 1].Type == Type)) {\r | |
1023 | EndIndex++;\r | |
1024 | Length += Ranges[EndIndex].Length;\r | |
1025 | }\r | |
1026 | }\r | |
1027 | \r | |
1028 | //\r | |
1029 | // |- 0 -|- 1 -|- 2 -|- 3 -| StartIndex EndIndex DeltaCount Count (Count = 4)\r | |
1030 | // |++++++++++++++++++| 0 3 1=3-0-2 3\r | |
1031 | // |+++++++| 0 1 -1=1-0-2 5\r | |
1032 | // |+| 0 0 -2=0-0-2 6\r | |
1033 | // |+++| 0 0 -1=0-0-2+1 5\r | |
1034 | //\r | |
1035 | //\r | |
1036 | DeltaCount = EndIndex - StartIndex - 2;\r | |
1037 | if (LengthLeft == 0) {\r | |
1038 | DeltaCount++;\r | |
1039 | }\r | |
1040 | \r | |
1041 | if (LengthRight == 0) {\r | |
1042 | DeltaCount++;\r | |
1043 | }\r | |
1044 | \r | |
1045 | if (*Count - DeltaCount > Capacity) {\r | |
1046 | return RETURN_OUT_OF_RESOURCES;\r | |
1047 | }\r | |
1048 | \r | |
1049 | //\r | |
1050 | // Reserve (-DeltaCount) space\r | |
1051 | //\r | |
1052 | CopyMem (&Ranges[EndIndex + 1 - DeltaCount], &Ranges[EndIndex + 1], (*Count - EndIndex - 1) * sizeof (Ranges[0]));\r | |
1053 | *Count -= DeltaCount;\r | |
1054 | \r | |
1055 | if (LengthLeft != 0) {\r | |
1056 | Ranges[StartIndex].Length = LengthLeft;\r | |
1057 | StartIndex++;\r | |
1058 | }\r | |
1059 | \r | |
1060 | if (LengthRight != 0) {\r | |
1061 | Ranges[EndIndex - DeltaCount].BaseAddress = BaseAddress + Length;\r | |
1062 | Ranges[EndIndex - DeltaCount].Length = LengthRight;\r | |
1063 | Ranges[EndIndex - DeltaCount].Type = Ranges[EndIndex].Type;\r | |
1064 | }\r | |
1065 | \r | |
1066 | Ranges[StartIndex].BaseAddress = BaseAddress;\r | |
1067 | Ranges[StartIndex].Length = Length;\r | |
1068 | Ranges[StartIndex].Type = Type;\r | |
1069 | return RETURN_SUCCESS;\r | |
1070 | }\r | |
1071 | \r | |
1072 | /**\r | |
1073 | Return the number of memory types in range [BaseAddress, BaseAddress + Length).\r | |
1074 | \r | |
1075 | @param Ranges Array holding memory type settings for all memory regions.\r | |
1076 | @param RangeCount The count of memory ranges the array holds.\r | |
1077 | @param BaseAddress Base address.\r | |
1078 | @param Length Length.\r | |
1079 | @param Types Return bit mask to indicate all memory types in the specified range.\r | |
1080 | \r | |
1081 | @retval Number of memory types.\r | |
1082 | **/\r | |
1083 | UINT8\r | |
1084 | MtrrLibGetNumberOfTypes (\r | |
1085 | IN CONST MTRR_MEMORY_RANGE *Ranges,\r | |
1086 | IN UINTN RangeCount,\r | |
1087 | IN UINT64 BaseAddress,\r | |
1088 | IN UINT64 Length,\r | |
1089 | IN OUT UINT8 *Types OPTIONAL\r | |
1090 | )\r | |
1091 | {\r | |
1092 | UINTN Index;\r | |
1093 | UINT8 TypeCount;\r | |
1094 | UINT8 LocalTypes;\r | |
1095 | \r | |
1096 | TypeCount = 0;\r | |
1097 | LocalTypes = 0;\r | |
1098 | for (Index = 0; Index < RangeCount; Index++) {\r | |
1099 | if ((Ranges[Index].BaseAddress <= BaseAddress) &&\r | |
1100 | (BaseAddress < Ranges[Index].BaseAddress + Ranges[Index].Length)\r | |
1101 | )\r | |
1102 | {\r | |
1103 | if ((LocalTypes & (1 << Ranges[Index].Type)) == 0) {\r | |
1104 | LocalTypes |= (UINT8)(1 << Ranges[Index].Type);\r | |
1105 | TypeCount++;\r | |
1106 | }\r | |
1107 | \r | |
1108 | if (BaseAddress + Length > Ranges[Index].BaseAddress + Ranges[Index].Length) {\r | |
1109 | Length -= Ranges[Index].BaseAddress + Ranges[Index].Length - BaseAddress;\r | |
1110 | BaseAddress = Ranges[Index].BaseAddress + Ranges[Index].Length;\r | |
1111 | } else {\r | |
1112 | break;\r | |
1113 | }\r | |
1114 | }\r | |
1115 | }\r | |
1116 | \r | |
1117 | if (Types != NULL) {\r | |
1118 | *Types = LocalTypes;\r | |
1119 | }\r | |
1120 | \r | |
1121 | return TypeCount;\r | |
1122 | }\r | |
1123 | \r | |
1124 | /**\r | |
1125 | Calculate the least MTRR number from vertex Start to Stop and update\r | |
1126 | the Previous of all vertices from Start to Stop is updated to reflect\r | |
1127 | how the memory range is covered by MTRR.\r | |
1128 | \r | |
1129 | @param VertexCount The count of vertices in the graph.\r | |
1130 | @param Vertices Array holding all vertices.\r | |
1131 | @param Weight 2-dimention array holding weights between vertices.\r | |
1132 | @param Start Start vertex.\r | |
1133 | @param Stop Stop vertex.\r | |
1134 | @param IncludeOptional TRUE to count the optional weight.\r | |
1135 | **/\r | |
1136 | VOID\r | |
1137 | MtrrLibCalculateLeastMtrrs (\r | |
1138 | IN UINT16 VertexCount,\r | |
1139 | IN MTRR_LIB_ADDRESS *Vertices,\r | |
1140 | IN OUT CONST UINT8 *Weight,\r | |
1141 | IN UINT16 Start,\r | |
1142 | IN UINT16 Stop,\r | |
1143 | IN BOOLEAN IncludeOptional\r | |
1144 | )\r | |
1145 | {\r | |
1146 | UINT16 Index;\r | |
1147 | UINT8 MinWeight;\r | |
1148 | UINT16 MinI;\r | |
1149 | UINT8 Mandatory;\r | |
1150 | UINT8 Optional;\r | |
1151 | \r | |
1152 | for (Index = Start; Index <= Stop; Index++) {\r | |
1153 | Vertices[Index].Visited = FALSE;\r | |
1154 | Mandatory = Weight[M (Start, Index)];\r | |
1155 | Vertices[Index].Weight = Mandatory;\r | |
1156 | if (Mandatory != MAX_WEIGHT) {\r | |
1157 | Optional = IncludeOptional ? Weight[O (Start, Index)] : 0;\r | |
1158 | Vertices[Index].Weight += Optional;\r | |
1159 | ASSERT (Vertices[Index].Weight >= Optional);\r | |
1160 | }\r | |
1161 | }\r | |
1162 | \r | |
1163 | MinI = Start;\r | |
1164 | MinWeight = 0;\r | |
1165 | while (!Vertices[Stop].Visited) {\r | |
1166 | //\r | |
1167 | // Update the weight from the shortest vertex to other unvisited vertices\r | |
1168 | //\r | |
1169 | for (Index = Start + 1; Index <= Stop; Index++) {\r | |
1170 | if (!Vertices[Index].Visited) {\r | |
1171 | Mandatory = Weight[M (MinI, Index)];\r | |
1172 | if (Mandatory != MAX_WEIGHT) {\r | |
1173 | Optional = IncludeOptional ? Weight[O (MinI, Index)] : 0;\r | |
1174 | if (MinWeight + Mandatory + Optional <= Vertices[Index].Weight) {\r | |
1175 | Vertices[Index].Weight = MinWeight + Mandatory + Optional;\r | |
1176 | Vertices[Index].Previous = MinI; // Previous is Start based.\r | |
1177 | }\r | |
1178 | }\r | |
1179 | }\r | |
1180 | }\r | |
1181 | \r | |
1182 | //\r | |
1183 | // Find the shortest vertex from Start\r | |
1184 | //\r | |
1185 | MinI = VertexCount;\r | |
1186 | MinWeight = MAX_WEIGHT;\r | |
1187 | for (Index = Start + 1; Index <= Stop; Index++) {\r | |
1188 | if (!Vertices[Index].Visited && (MinWeight > Vertices[Index].Weight)) {\r | |
1189 | MinI = Index;\r | |
1190 | MinWeight = Vertices[Index].Weight;\r | |
1191 | }\r | |
1192 | }\r | |
1193 | \r | |
1194 | //\r | |
1195 | // Mark the shortest vertex from Start as visited\r | |
1196 | //\r | |
1197 | Vertices[MinI].Visited = TRUE;\r | |
1198 | }\r | |
1199 | }\r | |
1200 | \r | |
1201 | /**\r | |
1202 | Append the MTRR setting to MTRR setting array.\r | |
1203 | \r | |
1204 | @param Mtrrs Array holding all MTRR settings.\r | |
1205 | @param MtrrCapacity Capacity of the MTRR array.\r | |
1206 | @param MtrrCount The count of MTRR settings in array.\r | |
1207 | @param BaseAddress Base address.\r | |
1208 | @param Length Length.\r | |
1209 | @param Type Memory type.\r | |
1210 | \r | |
1211 | @retval RETURN_SUCCESS MTRR setting is appended to array.\r | |
1212 | @retval RETURN_OUT_OF_RESOURCES Array is full.\r | |
1213 | **/\r | |
1214 | RETURN_STATUS\r | |
1215 | MtrrLibAppendVariableMtrr (\r | |
1216 | IN OUT MTRR_MEMORY_RANGE *Mtrrs,\r | |
1217 | IN UINT32 MtrrCapacity,\r | |
1218 | IN OUT UINT32 *MtrrCount,\r | |
1219 | IN UINT64 BaseAddress,\r | |
1220 | IN UINT64 Length,\r | |
1221 | IN MTRR_MEMORY_CACHE_TYPE Type\r | |
1222 | )\r | |
1223 | {\r | |
1224 | if (*MtrrCount == MtrrCapacity) {\r | |
1225 | return RETURN_OUT_OF_RESOURCES;\r | |
1226 | }\r | |
1227 | \r | |
1228 | Mtrrs[*MtrrCount].BaseAddress = BaseAddress;\r | |
1229 | Mtrrs[*MtrrCount].Length = Length;\r | |
1230 | Mtrrs[*MtrrCount].Type = Type;\r | |
1231 | (*MtrrCount)++;\r | |
1232 | return RETURN_SUCCESS;\r | |
1233 | }\r | |
1234 | \r | |
1235 | /**\r | |
1236 | Return the memory type that has the least precedence.\r | |
1237 | \r | |
1238 | @param TypeBits Bit mask of memory type.\r | |
1239 | \r | |
1240 | @retval Memory type that has the least precedence.\r | |
1241 | **/\r | |
1242 | MTRR_MEMORY_CACHE_TYPE\r | |
1243 | MtrrLibLowestType (\r | |
1244 | IN UINT8 TypeBits\r | |
1245 | )\r | |
1246 | {\r | |
1247 | INT8 Type;\r | |
1248 | \r | |
1249 | ASSERT (TypeBits != 0);\r | |
1250 | for (Type = 7; (INT8)TypeBits > 0; Type--, TypeBits <<= 1) {\r | |
1251 | }\r | |
1252 | \r | |
1253 | return (MTRR_MEMORY_CACHE_TYPE)Type;\r | |
1254 | }\r | |
1255 | \r | |
1256 | /**\r | |
1257 | Return TRUE when the Operand is exactly power of 2.\r | |
1258 | \r | |
1259 | @retval TRUE Operand is exactly power of 2.\r | |
1260 | @retval FALSE Operand is not power of 2.\r | |
1261 | **/\r | |
1262 | BOOLEAN\r | |
1263 | MtrrLibIsPowerOfTwo (\r | |
1264 | IN UINT64 Operand\r | |
1265 | )\r | |
1266 | {\r | |
1267 | ASSERT (Operand != 0);\r | |
1268 | return (BOOLEAN)((Operand & (Operand - 1)) == 0);\r | |
1269 | }\r | |
1270 | \r | |
1271 | /**\r | |
1272 | Calculate the subtractive path from vertex Start to Stop.\r | |
1273 | \r | |
1274 | @param DefaultType Default memory type.\r | |
1275 | @param A0 Alignment to use when base address is 0.\r | |
1276 | @param Ranges Array holding memory type settings for all memory regions.\r | |
1277 | @param RangeCount The count of memory ranges the array holds.\r | |
1278 | @param VertexCount The count of vertices in the graph.\r | |
1279 | @param Vertices Array holding all vertices.\r | |
1280 | @param Weight 2-dimention array holding weights between vertices.\r | |
1281 | @param Start Start vertex.\r | |
1282 | @param Stop Stop vertex.\r | |
1283 | @param Types Type bit mask of memory range from Start to Stop.\r | |
1284 | @param TypeCount Number of different memory types from Start to Stop.\r | |
1285 | @param Mtrrs Array holding all MTRR settings.\r | |
1286 | @param MtrrCapacity Capacity of the MTRR array.\r | |
1287 | @param MtrrCount The count of MTRR settings in array.\r | |
1288 | \r | |
1289 | @retval RETURN_SUCCESS The subtractive path is calculated successfully.\r | |
1290 | @retval RETURN_OUT_OF_RESOURCES The MTRR setting array is full.\r | |
1291 | \r | |
1292 | **/\r | |
1293 | RETURN_STATUS\r | |
1294 | MtrrLibCalculateSubtractivePath (\r | |
1295 | IN MTRR_MEMORY_CACHE_TYPE DefaultType,\r | |
1296 | IN UINT64 A0,\r | |
1297 | IN CONST MTRR_MEMORY_RANGE *Ranges,\r | |
1298 | IN UINTN RangeCount,\r | |
1299 | IN UINT16 VertexCount,\r | |
1300 | IN MTRR_LIB_ADDRESS *Vertices,\r | |
1301 | IN OUT UINT8 *Weight,\r | |
1302 | IN UINT16 Start,\r | |
1303 | IN UINT16 Stop,\r | |
1304 | IN UINT8 Types,\r | |
1305 | IN UINT8 TypeCount,\r | |
1306 | IN OUT MTRR_MEMORY_RANGE *Mtrrs OPTIONAL,\r | |
1307 | IN UINT32 MtrrCapacity OPTIONAL,\r | |
1308 | IN OUT UINT32 *MtrrCount OPTIONAL\r | |
1309 | )\r | |
1310 | {\r | |
1311 | RETURN_STATUS Status;\r | |
1312 | UINT64 Base;\r | |
1313 | UINT64 Length;\r | |
1314 | UINT8 PrecedentTypes;\r | |
1315 | UINTN Index;\r | |
1316 | UINT64 HBase;\r | |
1317 | UINT64 HLength;\r | |
1318 | UINT64 SubLength;\r | |
1319 | UINT16 SubStart;\r | |
1320 | UINT16 SubStop;\r | |
1321 | UINT16 Cur;\r | |
1322 | UINT16 Pre;\r | |
1323 | MTRR_MEMORY_CACHE_TYPE LowestType;\r | |
1324 | MTRR_MEMORY_CACHE_TYPE LowestPrecedentType;\r | |
1325 | \r | |
1326 | Base = Vertices[Start].Address;\r | |
1327 | Length = Vertices[Stop].Address - Base;\r | |
1328 | \r | |
1329 | LowestType = MtrrLibLowestType (Types);\r | |
1330 | \r | |
1331 | //\r | |
1332 | // Clear the lowest type (highest bit) to get the precedent types\r | |
1333 | //\r | |
1334 | PrecedentTypes = ~(1 << LowestType) & Types;\r | |
1335 | LowestPrecedentType = MtrrLibLowestType (PrecedentTypes);\r | |
1336 | \r | |
1337 | if (Mtrrs == NULL) {\r | |
1338 | Weight[M (Start, Stop)] = ((LowestType == DefaultType) ? 0 : 1);\r | |
1339 | Weight[O (Start, Stop)] = ((LowestType == DefaultType) ? 1 : 0);\r | |
1340 | }\r | |
1341 | \r | |
1342 | // Add all high level ranges\r | |
1343 | HBase = MAX_UINT64;\r | |
1344 | HLength = 0;\r | |
1345 | for (Index = 0; Index < RangeCount; Index++) {\r | |
1346 | if (Length == 0) {\r | |
1347 | break;\r | |
1348 | }\r | |
1349 | \r | |
1350 | if ((Base < Ranges[Index].BaseAddress) || (Ranges[Index].BaseAddress + Ranges[Index].Length <= Base)) {\r | |
1351 | continue;\r | |
1352 | }\r | |
1353 | \r | |
1354 | //\r | |
1355 | // Base is in the Range[Index]\r | |
1356 | //\r | |
1357 | if (Base + Length > Ranges[Index].BaseAddress + Ranges[Index].Length) {\r | |
1358 | SubLength = Ranges[Index].BaseAddress + Ranges[Index].Length - Base;\r | |
1359 | } else {\r | |
1360 | SubLength = Length;\r | |
1361 | }\r | |
1362 | \r | |
1363 | if (((1 << Ranges[Index].Type) & PrecedentTypes) != 0) {\r | |
1364 | //\r | |
1365 | // Meet a range whose types take precedence.\r | |
1366 | // Update the [HBase, HBase + HLength) to include the range,\r | |
1367 | // [HBase, HBase + HLength) may contain sub ranges with 2 different types, and both take precedence.\r | |
1368 | //\r | |
1369 | if (HBase == MAX_UINT64) {\r | |
1370 | HBase = Base;\r | |
1371 | }\r | |
1372 | \r | |
1373 | HLength += SubLength;\r | |
1374 | }\r | |
1375 | \r | |
1376 | Base += SubLength;\r | |
1377 | Length -= SubLength;\r | |
1378 | \r | |
1379 | if (HLength == 0) {\r | |
1380 | continue;\r | |
1381 | }\r | |
1382 | \r | |
1383 | if ((Ranges[Index].Type == LowestType) || (Length == 0)) {\r | |
1384 | // meet low type or end\r | |
1385 | \r | |
1386 | //\r | |
1387 | // Add the MTRRs for each high priority type range\r | |
1388 | // the range[HBase, HBase + HLength) contains only two types.\r | |
1389 | // We might use positive or subtractive, depending on which way uses less MTRR\r | |
1390 | //\r | |
1391 | for (SubStart = Start; SubStart <= Stop; SubStart++) {\r | |
1392 | if (Vertices[SubStart].Address == HBase) {\r | |
1393 | break;\r | |
1394 | }\r | |
1395 | }\r | |
1396 | \r | |
1397 | for (SubStop = SubStart; SubStop <= Stop; SubStop++) {\r | |
1398 | if (Vertices[SubStop].Address == HBase + HLength) {\r | |
1399 | break;\r | |
1400 | }\r | |
1401 | }\r | |
1402 | \r | |
1403 | ASSERT (Vertices[SubStart].Address == HBase);\r | |
1404 | ASSERT (Vertices[SubStop].Address == HBase + HLength);\r | |
1405 | \r | |
1406 | if ((TypeCount == 2) || (SubStart == SubStop - 1)) {\r | |
1407 | //\r | |
1408 | // add subtractive MTRRs for [HBase, HBase + HLength)\r | |
1409 | // [HBase, HBase + HLength) contains only one type.\r | |
1410 | // while - loop is to split the range to MTRR - compliant aligned range.\r | |
1411 | //\r | |
1412 | if (Mtrrs == NULL) {\r | |
1413 | Weight[M (Start, Stop)] += (UINT8)(SubStop - SubStart);\r | |
1414 | } else {\r | |
1415 | while (SubStart != SubStop) {\r | |
1416 | Status = MtrrLibAppendVariableMtrr (\r | |
1417 | Mtrrs,\r | |
1418 | MtrrCapacity,\r | |
1419 | MtrrCount,\r | |
1420 | Vertices[SubStart].Address,\r | |
1421 | Vertices[SubStart].Length,\r | |
1422 | Vertices[SubStart].Type\r | |
1423 | );\r | |
1424 | if (RETURN_ERROR (Status)) {\r | |
1425 | return Status;\r | |
1426 | }\r | |
1427 | \r | |
1428 | SubStart++;\r | |
1429 | }\r | |
1430 | }\r | |
1431 | } else {\r | |
1432 | ASSERT (TypeCount == 3);\r | |
1433 | MtrrLibCalculateLeastMtrrs (VertexCount, Vertices, Weight, SubStart, SubStop, TRUE);\r | |
1434 | \r | |
1435 | if (Mtrrs == NULL) {\r | |
1436 | Weight[M (Start, Stop)] += Vertices[SubStop].Weight;\r | |
1437 | } else {\r | |
1438 | // When we need to collect the optimal path from SubStart to SubStop\r | |
1439 | while (SubStop != SubStart) {\r | |
1440 | Cur = SubStop;\r | |
1441 | Pre = Vertices[Cur].Previous;\r | |
1442 | SubStop = Pre;\r | |
1443 | \r | |
1444 | if (Weight[M (Pre, Cur)] + Weight[O (Pre, Cur)] != 0) {\r | |
1445 | Status = MtrrLibAppendVariableMtrr (\r | |
1446 | Mtrrs,\r | |
1447 | MtrrCapacity,\r | |
1448 | MtrrCount,\r | |
1449 | Vertices[Pre].Address,\r | |
1450 | Vertices[Cur].Address - Vertices[Pre].Address,\r | |
1451 | (Pre != Cur - 1) ? LowestPrecedentType : Vertices[Pre].Type\r | |
1452 | );\r | |
1453 | if (RETURN_ERROR (Status)) {\r | |
1454 | return Status;\r | |
1455 | }\r | |
1456 | }\r | |
1457 | \r | |
1458 | if (Pre != Cur - 1) {\r | |
1459 | Status = MtrrLibCalculateSubtractivePath (\r | |
1460 | DefaultType,\r | |
1461 | A0,\r | |
1462 | Ranges,\r | |
1463 | RangeCount,\r | |
1464 | VertexCount,\r | |
1465 | Vertices,\r | |
1466 | Weight,\r | |
1467 | Pre,\r | |
1468 | Cur,\r | |
1469 | PrecedentTypes,\r | |
1470 | 2,\r | |
1471 | Mtrrs,\r | |
1472 | MtrrCapacity,\r | |
1473 | MtrrCount\r | |
1474 | );\r | |
1475 | if (RETURN_ERROR (Status)) {\r | |
1476 | return Status;\r | |
1477 | }\r | |
1478 | }\r | |
1479 | }\r | |
1480 | }\r | |
1481 | }\r | |
1482 | \r | |
1483 | //\r | |
1484 | // Reset HBase, HLength\r | |
1485 | //\r | |
1486 | HBase = MAX_UINT64;\r | |
1487 | HLength = 0;\r | |
1488 | }\r | |
1489 | }\r | |
1490 | \r | |
1491 | return RETURN_SUCCESS;\r | |
1492 | }\r | |
1493 | \r | |
1494 | /**\r | |
1495 | Calculate MTRR settings to cover the specified memory ranges.\r | |
1496 | \r | |
1497 | @param DefaultType Default memory type.\r | |
1498 | @param A0 Alignment to use when base address is 0.\r | |
1499 | @param Ranges Memory range array holding the memory type\r | |
1500 | settings for all memory address.\r | |
1501 | @param RangeCount Count of memory ranges.\r | |
1502 | @param Scratch A temporary scratch buffer that is used to perform the calculation.\r | |
1503 | This is an optional parameter that may be NULL.\r | |
1504 | @param ScratchSize Pointer to the size in bytes of the scratch buffer.\r | |
1505 | It may be updated to the actual required size when the calculation\r | |
1506 | needs more scratch buffer.\r | |
1507 | @param Mtrrs Array holding all MTRR settings.\r | |
1508 | @param MtrrCapacity Capacity of the MTRR array.\r | |
1509 | @param MtrrCount The count of MTRR settings in array.\r | |
1510 | \r | |
1511 | @retval RETURN_SUCCESS Variable MTRRs are allocated successfully.\r | |
1512 | @retval RETURN_OUT_OF_RESOURCES Count of variable MTRRs exceeds capacity.\r | |
1513 | @retval RETURN_BUFFER_TOO_SMALL The scratch buffer is too small for MTRR calculation.\r | |
1514 | **/\r | |
1515 | RETURN_STATUS\r | |
1516 | MtrrLibCalculateMtrrs (\r | |
1517 | IN MTRR_MEMORY_CACHE_TYPE DefaultType,\r | |
1518 | IN UINT64 A0,\r | |
1519 | IN CONST MTRR_MEMORY_RANGE *Ranges,\r | |
1520 | IN UINTN RangeCount,\r | |
1521 | IN VOID *Scratch,\r | |
1522 | IN OUT UINTN *ScratchSize,\r | |
1523 | IN OUT MTRR_MEMORY_RANGE *Mtrrs,\r | |
1524 | IN UINT32 MtrrCapacity,\r | |
1525 | IN OUT UINT32 *MtrrCount\r | |
1526 | )\r | |
1527 | {\r | |
1528 | UINT64 Base0;\r | |
1529 | UINT64 Base1;\r | |
1530 | UINTN Index;\r | |
1531 | UINT64 Base;\r | |
1532 | UINT64 Length;\r | |
1533 | UINT64 Alignment;\r | |
1534 | UINT64 SubLength;\r | |
1535 | MTRR_LIB_ADDRESS *Vertices;\r | |
1536 | UINT8 *Weight;\r | |
1537 | UINT32 VertexIndex;\r | |
1538 | UINT32 VertexCount;\r | |
1539 | UINTN RequiredScratchSize;\r | |
1540 | UINT8 TypeCount;\r | |
1541 | UINT16 Start;\r | |
1542 | UINT16 Stop;\r | |
1543 | UINT8 Type;\r | |
1544 | RETURN_STATUS Status;\r | |
1545 | \r | |
1546 | Base0 = Ranges[0].BaseAddress;\r | |
1547 | Base1 = Ranges[RangeCount - 1].BaseAddress + Ranges[RangeCount - 1].Length;\r | |
1548 | MTRR_LIB_ASSERT_ALIGNED (Base0, Base1 - Base0);\r | |
1549 | \r | |
1550 | //\r | |
1551 | // Count the number of vertices.\r | |
1552 | //\r | |
1553 | Vertices = (MTRR_LIB_ADDRESS *)Scratch;\r | |
1554 | for (VertexIndex = 0, Index = 0; Index < RangeCount; Index++) {\r | |
1555 | Base = Ranges[Index].BaseAddress;\r | |
1556 | Length = Ranges[Index].Length;\r | |
1557 | while (Length != 0) {\r | |
1558 | Alignment = MtrrLibBiggestAlignment (Base, A0);\r | |
1559 | SubLength = Alignment;\r | |
1560 | if (SubLength > Length) {\r | |
1561 | SubLength = GetPowerOfTwo64 (Length);\r | |
1562 | }\r | |
1563 | \r | |
1564 | if (VertexIndex < *ScratchSize / sizeof (*Vertices)) {\r | |
1565 | Vertices[VertexIndex].Address = Base;\r | |
1566 | Vertices[VertexIndex].Alignment = Alignment;\r | |
1567 | Vertices[VertexIndex].Type = Ranges[Index].Type;\r | |
1568 | Vertices[VertexIndex].Length = SubLength;\r | |
1569 | }\r | |
1570 | \r | |
1571 | Base += SubLength;\r | |
1572 | Length -= SubLength;\r | |
1573 | VertexIndex++;\r | |
1574 | }\r | |
1575 | }\r | |
1576 | \r | |
1577 | //\r | |
1578 | // Vertices[VertexIndex] = Base1, so whole vertex count is (VertexIndex + 1).\r | |
1579 | //\r | |
1580 | VertexCount = VertexIndex + 1;\r | |
1581 | DEBUG ((\r | |
1582 | DEBUG_CACHE,\r | |
1583 | " Count of vertices (%016llx - %016llx) = %d\n",\r | |
1584 | Ranges[0].BaseAddress,\r | |
1585 | Ranges[RangeCount - 1].BaseAddress + Ranges[RangeCount - 1].Length,\r | |
1586 | VertexCount\r | |
1587 | ));\r | |
1588 | ASSERT (VertexCount < MAX_UINT16);\r | |
1589 | \r | |
1590 | RequiredScratchSize = VertexCount * sizeof (*Vertices) + VertexCount * VertexCount * sizeof (*Weight);\r | |
1591 | if (*ScratchSize < RequiredScratchSize) {\r | |
1592 | *ScratchSize = RequiredScratchSize;\r | |
1593 | return RETURN_BUFFER_TOO_SMALL;\r | |
1594 | }\r | |
1595 | \r | |
1596 | Vertices[VertexCount - 1].Address = Base1;\r | |
1597 | \r | |
1598 | Weight = (UINT8 *)&Vertices[VertexCount];\r | |
1599 | for (VertexIndex = 0; VertexIndex < VertexCount; VertexIndex++) {\r | |
1600 | //\r | |
1601 | // Set optional weight between vertices and self->self to 0\r | |
1602 | //\r | |
1603 | SetMem (&Weight[M (VertexIndex, 0)], VertexIndex + 1, 0);\r | |
1604 | //\r | |
1605 | // Set mandatory weight between vertices to MAX_WEIGHT\r | |
1606 | //\r | |
1607 | SetMem (&Weight[M (VertexIndex, VertexIndex + 1)], VertexCount - VertexIndex - 1, MAX_WEIGHT);\r | |
1608 | \r | |
1609 | // Final result looks like:\r | |
1610 | // 00 FF FF FF\r | |
1611 | // 00 00 FF FF\r | |
1612 | // 00 00 00 FF\r | |
1613 | // 00 00 00 00\r | |
1614 | }\r | |
1615 | \r | |
1616 | //\r | |
1617 | // Set mandatory weight and optional weight for adjacent vertices\r | |
1618 | //\r | |
1619 | for (VertexIndex = 0; VertexIndex < VertexCount - 1; VertexIndex++) {\r | |
1620 | if (Vertices[VertexIndex].Type != DefaultType) {\r | |
1621 | Weight[M (VertexIndex, VertexIndex + 1)] = 1;\r | |
1622 | Weight[O (VertexIndex, VertexIndex + 1)] = 0;\r | |
1623 | } else {\r | |
1624 | Weight[M (VertexIndex, VertexIndex + 1)] = 0;\r | |
1625 | Weight[O (VertexIndex, VertexIndex + 1)] = 1;\r | |
1626 | }\r | |
1627 | }\r | |
1628 | \r | |
1629 | for (TypeCount = 2; TypeCount <= 3; TypeCount++) {\r | |
1630 | for (Start = 0; Start < VertexCount; Start++) {\r | |
1631 | for (Stop = Start + 2; Stop < VertexCount; Stop++) {\r | |
1632 | ASSERT (Vertices[Stop].Address > Vertices[Start].Address);\r | |
1633 | Length = Vertices[Stop].Address - Vertices[Start].Address;\r | |
1634 | if (Length > Vertices[Start].Alignment) {\r | |
1635 | //\r | |
1636 | // Pickup a new Start when [Start, Stop) cannot be described by one MTRR.\r | |
1637 | //\r | |
1638 | break;\r | |
1639 | }\r | |
1640 | \r | |
1641 | if ((Weight[M (Start, Stop)] == MAX_WEIGHT) && MtrrLibIsPowerOfTwo (Length)) {\r | |
1642 | if (MtrrLibGetNumberOfTypes (\r | |
1643 | Ranges,\r | |
1644 | RangeCount,\r | |
1645 | Vertices[Start].Address,\r | |
1646 | Vertices[Stop].Address - Vertices[Start].Address,\r | |
1647 | &Type\r | |
1648 | ) == TypeCount)\r | |
1649 | {\r | |
1650 | //\r | |
1651 | // Update the Weight[Start, Stop] using subtractive path.\r | |
1652 | //\r | |
1653 | MtrrLibCalculateSubtractivePath (\r | |
1654 | DefaultType,\r | |
1655 | A0,\r | |
1656 | Ranges,\r | |
1657 | RangeCount,\r | |
1658 | (UINT16)VertexCount,\r | |
1659 | Vertices,\r | |
1660 | Weight,\r | |
1661 | Start,\r | |
1662 | Stop,\r | |
1663 | Type,\r | |
1664 | TypeCount,\r | |
1665 | NULL,\r | |
1666 | 0,\r | |
1667 | NULL\r | |
1668 | );\r | |
1669 | } else if (TypeCount == 2) {\r | |
1670 | //\r | |
1671 | // Pick up a new Start when we expect 2-type range, but 3-type range is met.\r | |
1672 | // Because no matter how Stop is increased, we always meet 3-type range.\r | |
1673 | //\r | |
1674 | break;\r | |
1675 | }\r | |
1676 | }\r | |
1677 | }\r | |
1678 | }\r | |
1679 | }\r | |
1680 | \r | |
1681 | Status = RETURN_SUCCESS;\r | |
1682 | MtrrLibCalculateLeastMtrrs ((UINT16)VertexCount, Vertices, Weight, 0, (UINT16)VertexCount - 1, FALSE);\r | |
1683 | Stop = (UINT16)VertexCount - 1;\r | |
1684 | while (Stop != 0) {\r | |
1685 | Start = Vertices[Stop].Previous;\r | |
1686 | TypeCount = MAX_UINT8;\r | |
1687 | Type = 0;\r | |
1688 | if (Weight[M (Start, Stop)] != 0) {\r | |
1689 | TypeCount = MtrrLibGetNumberOfTypes (Ranges, RangeCount, Vertices[Start].Address, Vertices[Stop].Address - Vertices[Start].Address, &Type);\r | |
1690 | Status = MtrrLibAppendVariableMtrr (\r | |
1691 | Mtrrs,\r | |
1692 | MtrrCapacity,\r | |
1693 | MtrrCount,\r | |
1694 | Vertices[Start].Address,\r | |
1695 | Vertices[Stop].Address - Vertices[Start].Address,\r | |
1696 | MtrrLibLowestType (Type)\r | |
1697 | );\r | |
1698 | if (RETURN_ERROR (Status)) {\r | |
1699 | break;\r | |
1700 | }\r | |
1701 | }\r | |
1702 | \r | |
1703 | if (Start != Stop - 1) {\r | |
1704 | //\r | |
1705 | // substractive path\r | |
1706 | //\r | |
1707 | if (TypeCount == MAX_UINT8) {\r | |
1708 | TypeCount = MtrrLibGetNumberOfTypes (\r | |
1709 | Ranges,\r | |
1710 | RangeCount,\r | |
1711 | Vertices[Start].Address,\r | |
1712 | Vertices[Stop].Address - Vertices[Start].Address,\r | |
1713 | &Type\r | |
1714 | );\r | |
1715 | }\r | |
1716 | \r | |
1717 | Status = MtrrLibCalculateSubtractivePath (\r | |
1718 | DefaultType,\r | |
1719 | A0,\r | |
1720 | Ranges,\r | |
1721 | RangeCount,\r | |
1722 | (UINT16)VertexCount,\r | |
1723 | Vertices,\r | |
1724 | Weight,\r | |
1725 | Start,\r | |
1726 | Stop,\r | |
1727 | Type,\r | |
1728 | TypeCount,\r | |
1729 | Mtrrs,\r | |
1730 | MtrrCapacity,\r | |
1731 | MtrrCount\r | |
1732 | );\r | |
1733 | if (RETURN_ERROR (Status)) {\r | |
1734 | break;\r | |
1735 | }\r | |
1736 | }\r | |
1737 | \r | |
1738 | Stop = Start;\r | |
1739 | }\r | |
1740 | \r | |
1741 | return Status;\r | |
1742 | }\r | |
1743 | \r | |
1744 | /**\r | |
1745 | Apply the fixed MTRR settings to memory range array.\r | |
1746 | \r | |
1747 | @param Fixed The fixed MTRR settings.\r | |
1748 | @param Ranges Return the memory range array holding memory type\r | |
1749 | settings for all memory address.\r | |
1750 | @param RangeCapacity The capacity of memory range array.\r | |
1751 | @param RangeCount Return the count of memory range.\r | |
1752 | \r | |
1753 | @retval RETURN_SUCCESS The memory range array is returned successfully.\r | |
1754 | @retval RETURN_OUT_OF_RESOURCES The count of memory ranges exceeds capacity.\r | |
1755 | **/\r | |
1756 | RETURN_STATUS\r | |
1757 | MtrrLibApplyFixedMtrrs (\r | |
1758 | IN MTRR_FIXED_SETTINGS *Fixed,\r | |
1759 | IN OUT MTRR_MEMORY_RANGE *Ranges,\r | |
1760 | IN UINTN RangeCapacity,\r | |
1761 | IN OUT UINTN *RangeCount\r | |
1762 | )\r | |
1763 | {\r | |
1764 | RETURN_STATUS Status;\r | |
1765 | UINTN MsrIndex;\r | |
1766 | UINTN Index;\r | |
1767 | MTRR_MEMORY_CACHE_TYPE MemoryType;\r | |
1768 | UINT64 Base;\r | |
1769 | \r | |
1770 | Base = 0;\r | |
1771 | for (MsrIndex = 0; MsrIndex < ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {\r | |
1772 | ASSERT (Base == mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress);\r | |
1773 | for (Index = 0; Index < sizeof (UINT64); Index++) {\r | |
1774 | MemoryType = (MTRR_MEMORY_CACHE_TYPE)((UINT8 *)(&Fixed->Mtrr[MsrIndex]))[Index];\r | |
1775 | Status = MtrrLibSetMemoryType (\r | |
1776 | Ranges,\r | |
1777 | RangeCapacity,\r | |
1778 | RangeCount,\r | |
1779 | Base,\r | |
1780 | mMtrrLibFixedMtrrTable[MsrIndex].Length,\r | |
1781 | MemoryType\r | |
1782 | );\r | |
1783 | if (Status == RETURN_OUT_OF_RESOURCES) {\r | |
1784 | return Status;\r | |
1785 | }\r | |
1786 | \r | |
1787 | Base += mMtrrLibFixedMtrrTable[MsrIndex].Length;\r | |
1788 | }\r | |
1789 | }\r | |
1790 | \r | |
1791 | ASSERT (Base == BASE_1MB);\r | |
1792 | return RETURN_SUCCESS;\r | |
1793 | }\r | |
1794 | \r | |
1795 | /**\r | |
1796 | Apply the variable MTRR settings to memory range array.\r | |
1797 | \r | |
1798 | @param VariableMtrr The variable MTRR array.\r | |
1799 | @param VariableMtrrCount The count of variable MTRRs.\r | |
1800 | @param Ranges Return the memory range array with new MTRR settings applied.\r | |
1801 | @param RangeCapacity The capacity of memory range array.\r | |
1802 | @param RangeCount Return the count of memory range.\r | |
1803 | \r | |
1804 | @retval RETURN_SUCCESS The memory range array is returned successfully.\r | |
1805 | @retval RETURN_OUT_OF_RESOURCES The count of memory ranges exceeds capacity.\r | |
1806 | **/\r | |
1807 | RETURN_STATUS\r | |
1808 | MtrrLibApplyVariableMtrrs (\r | |
1809 | IN CONST MTRR_MEMORY_RANGE *VariableMtrr,\r | |
1810 | IN UINT32 VariableMtrrCount,\r | |
1811 | IN OUT MTRR_MEMORY_RANGE *Ranges,\r | |
1812 | IN UINTN RangeCapacity,\r | |
1813 | IN OUT UINTN *RangeCount\r | |
1814 | )\r | |
1815 | {\r | |
1816 | RETURN_STATUS Status;\r | |
1817 | UINTN Index;\r | |
1818 | \r | |
1819 | //\r | |
1820 | // WT > WB\r | |
1821 | // UC > *\r | |
1822 | // UC > * (except WB, UC) > WB\r | |
1823 | //\r | |
1824 | \r | |
1825 | //\r | |
1826 | // 1. Set WB\r | |
1827 | //\r | |
1828 | for (Index = 0; Index < VariableMtrrCount; Index++) {\r | |
1829 | if ((VariableMtrr[Index].Length != 0) && (VariableMtrr[Index].Type == CacheWriteBack)) {\r | |
1830 | Status = MtrrLibSetMemoryType (\r | |
1831 | Ranges,\r | |
1832 | RangeCapacity,\r | |
1833 | RangeCount,\r | |
1834 | VariableMtrr[Index].BaseAddress,\r | |
1835 | VariableMtrr[Index].Length,\r | |
1836 | VariableMtrr[Index].Type\r | |
1837 | );\r | |
1838 | if (Status == RETURN_OUT_OF_RESOURCES) {\r | |
1839 | return Status;\r | |
1840 | }\r | |
1841 | }\r | |
1842 | }\r | |
1843 | \r | |
1844 | //\r | |
1845 | // 2. Set other types than WB or UC\r | |
1846 | //\r | |
1847 | for (Index = 0; Index < VariableMtrrCount; Index++) {\r | |
1848 | if ((VariableMtrr[Index].Length != 0) &&\r | |
1849 | (VariableMtrr[Index].Type != CacheWriteBack) && (VariableMtrr[Index].Type != CacheUncacheable))\r | |
1850 | {\r | |
1851 | Status = MtrrLibSetMemoryType (\r | |
1852 | Ranges,\r | |
1853 | RangeCapacity,\r | |
1854 | RangeCount,\r | |
1855 | VariableMtrr[Index].BaseAddress,\r | |
1856 | VariableMtrr[Index].Length,\r | |
1857 | VariableMtrr[Index].Type\r | |
1858 | );\r | |
1859 | if (Status == RETURN_OUT_OF_RESOURCES) {\r | |
1860 | return Status;\r | |
1861 | }\r | |
1862 | }\r | |
1863 | }\r | |
1864 | \r | |
1865 | //\r | |
1866 | // 3. Set UC\r | |
1867 | //\r | |
1868 | for (Index = 0; Index < VariableMtrrCount; Index++) {\r | |
1869 | if ((VariableMtrr[Index].Length != 0) && (VariableMtrr[Index].Type == CacheUncacheable)) {\r | |
1870 | Status = MtrrLibSetMemoryType (\r | |
1871 | Ranges,\r | |
1872 | RangeCapacity,\r | |
1873 | RangeCount,\r | |
1874 | VariableMtrr[Index].BaseAddress,\r | |
1875 | VariableMtrr[Index].Length,\r | |
1876 | VariableMtrr[Index].Type\r | |
1877 | );\r | |
1878 | if (Status == RETURN_OUT_OF_RESOURCES) {\r | |
1879 | return Status;\r | |
1880 | }\r | |
1881 | }\r | |
1882 | }\r | |
1883 | \r | |
1884 | return RETURN_SUCCESS;\r | |
1885 | }\r | |
1886 | \r | |
1887 | /**\r | |
1888 | Return the memory type bit mask that's compatible to first type in the Ranges.\r | |
1889 | \r | |
1890 | @param Ranges Memory range array holding the memory type\r | |
1891 | settings for all memory address.\r | |
1892 | @param RangeCount Count of memory ranges.\r | |
1893 | \r | |
1894 | @return Compatible memory type bit mask.\r | |
1895 | **/\r | |
1896 | UINT8\r | |
1897 | MtrrLibGetCompatibleTypes (\r | |
1898 | IN CONST MTRR_MEMORY_RANGE *Ranges,\r | |
1899 | IN UINTN RangeCount\r | |
1900 | )\r | |
1901 | {\r | |
1902 | ASSERT (RangeCount != 0);\r | |
1903 | \r | |
1904 | switch (Ranges[0].Type) {\r | |
1905 | case CacheWriteBack:\r | |
1906 | case CacheWriteThrough:\r | |
1907 | return (1 << CacheWriteBack) | (1 << CacheWriteThrough) | (1 << CacheUncacheable);\r | |
1908 | break;\r | |
1909 | \r | |
1910 | case CacheWriteCombining:\r | |
1911 | case CacheWriteProtected:\r | |
1912 | return (1 << Ranges[0].Type) | (1 << CacheUncacheable);\r | |
1913 | break;\r | |
1914 | \r | |
1915 | case CacheUncacheable:\r | |
1916 | if (RangeCount == 1) {\r | |
1917 | return (1 << CacheUncacheable);\r | |
1918 | }\r | |
1919 | \r | |
1920 | return MtrrLibGetCompatibleTypes (&Ranges[1], RangeCount - 1);\r | |
1921 | break;\r | |
1922 | \r | |
1923 | case CacheInvalid:\r | |
1924 | default:\r | |
1925 | ASSERT (FALSE);\r | |
1926 | break;\r | |
1927 | }\r | |
1928 | \r | |
1929 | return 0;\r | |
1930 | }\r | |
1931 | \r | |
1932 | /**\r | |
1933 | Overwrite the destination MTRR settings with the source MTRR settings.\r | |
1934 | This routine is to make sure the modification to destination MTRR settings\r | |
1935 | is as small as possible.\r | |
1936 | \r | |
1937 | @param DstMtrrs Destination MTRR settings.\r | |
1938 | @param DstMtrrCount Count of destination MTRR settings.\r | |
1939 | @param SrcMtrrs Source MTRR settings.\r | |
1940 | @param SrcMtrrCount Count of source MTRR settings.\r | |
1941 | @param Modified Flag array to indicate which destination MTRR setting is modified.\r | |
1942 | **/\r | |
1943 | VOID\r | |
1944 | MtrrLibMergeVariableMtrr (\r | |
1945 | MTRR_MEMORY_RANGE *DstMtrrs,\r | |
1946 | UINT32 DstMtrrCount,\r | |
1947 | MTRR_MEMORY_RANGE *SrcMtrrs,\r | |
1948 | UINT32 SrcMtrrCount,\r | |
1949 | BOOLEAN *Modified\r | |
1950 | )\r | |
1951 | {\r | |
1952 | UINT32 DstIndex;\r | |
1953 | UINT32 SrcIndex;\r | |
1954 | \r | |
1955 | ASSERT (SrcMtrrCount <= DstMtrrCount);\r | |
1956 | \r | |
1957 | for (DstIndex = 0; DstIndex < DstMtrrCount; DstIndex++) {\r | |
1958 | Modified[DstIndex] = FALSE;\r | |
1959 | \r | |
1960 | if (DstMtrrs[DstIndex].Length == 0) {\r | |
1961 | continue;\r | |
1962 | }\r | |
1963 | \r | |
1964 | for (SrcIndex = 0; SrcIndex < SrcMtrrCount; SrcIndex++) {\r | |
1965 | if ((DstMtrrs[DstIndex].BaseAddress == SrcMtrrs[SrcIndex].BaseAddress) &&\r | |
1966 | (DstMtrrs[DstIndex].Length == SrcMtrrs[SrcIndex].Length) &&\r | |
1967 | (DstMtrrs[DstIndex].Type == SrcMtrrs[SrcIndex].Type))\r | |
1968 | {\r | |
1969 | break;\r | |
1970 | }\r | |
1971 | }\r | |
1972 | \r | |
1973 | if (SrcIndex == SrcMtrrCount) {\r | |
1974 | //\r | |
1975 | // Remove the one from DstMtrrs which is not in SrcMtrrs\r | |
1976 | //\r | |
1977 | DstMtrrs[DstIndex].Length = 0;\r | |
1978 | Modified[DstIndex] = TRUE;\r | |
1979 | } else {\r | |
1980 | //\r | |
1981 | // Remove the one from SrcMtrrs which is also in DstMtrrs\r | |
1982 | //\r | |
1983 | SrcMtrrs[SrcIndex].Length = 0;\r | |
1984 | }\r | |
1985 | }\r | |
1986 | \r | |
1987 | //\r | |
1988 | // Now valid MTRR only exists in either DstMtrrs or SrcMtrrs.\r | |
1989 | // Merge MTRRs from SrcMtrrs to DstMtrrs\r | |
1990 | //\r | |
1991 | DstIndex = 0;\r | |
1992 | for (SrcIndex = 0; SrcIndex < SrcMtrrCount; SrcIndex++) {\r | |
1993 | if (SrcMtrrs[SrcIndex].Length != 0) {\r | |
1994 | //\r | |
1995 | // Find the empty slot in DstMtrrs\r | |
1996 | //\r | |
1997 | while (DstIndex < DstMtrrCount) {\r | |
1998 | if (DstMtrrs[DstIndex].Length == 0) {\r | |
1999 | break;\r | |
2000 | }\r | |
2001 | \r | |
2002 | DstIndex++;\r | |
2003 | }\r | |
2004 | \r | |
2005 | ASSERT (DstIndex < DstMtrrCount);\r | |
2006 | CopyMem (&DstMtrrs[DstIndex], &SrcMtrrs[SrcIndex], sizeof (SrcMtrrs[0]));\r | |
2007 | Modified[DstIndex] = TRUE;\r | |
2008 | }\r | |
2009 | }\r | |
2010 | }\r | |
2011 | \r | |
2012 | /**\r | |
2013 | Calculate the variable MTRR settings for all memory ranges.\r | |
2014 | \r | |
2015 | @param DefaultType Default memory type.\r | |
2016 | @param A0 Alignment to use when base address is 0.\r | |
2017 | @param Ranges Memory range array holding the memory type\r | |
2018 | settings for all memory address.\r | |
2019 | @param RangeCount Count of memory ranges.\r | |
2020 | @param Scratch Scratch buffer to be used in MTRR calculation.\r | |
2021 | @param ScratchSize Pointer to the size of scratch buffer.\r | |
2022 | @param VariableMtrr Array holding all MTRR settings.\r | |
2023 | @param VariableMtrrCapacity Capacity of the MTRR array.\r | |
2024 | @param VariableMtrrCount The count of MTRR settings in array.\r | |
2025 | \r | |
2026 | @retval RETURN_SUCCESS Variable MTRRs are allocated successfully.\r | |
2027 | @retval RETURN_OUT_OF_RESOURCES Count of variable MTRRs exceeds capacity.\r | |
2028 | @retval RETURN_BUFFER_TOO_SMALL The scratch buffer is too small for MTRR calculation.\r | |
2029 | The required scratch buffer size is returned through ScratchSize.\r | |
2030 | **/\r | |
2031 | RETURN_STATUS\r | |
2032 | MtrrLibSetMemoryRanges (\r | |
2033 | IN MTRR_MEMORY_CACHE_TYPE DefaultType,\r | |
2034 | IN UINT64 A0,\r | |
2035 | IN MTRR_MEMORY_RANGE *Ranges,\r | |
2036 | IN UINTN RangeCount,\r | |
2037 | IN VOID *Scratch,\r | |
2038 | IN OUT UINTN *ScratchSize,\r | |
2039 | OUT MTRR_MEMORY_RANGE *VariableMtrr,\r | |
2040 | IN UINT32 VariableMtrrCapacity,\r | |
2041 | OUT UINT32 *VariableMtrrCount\r | |
2042 | )\r | |
2043 | {\r | |
2044 | RETURN_STATUS Status;\r | |
2045 | UINT32 Index;\r | |
2046 | UINT64 Base0;\r | |
2047 | UINT64 Base1;\r | |
2048 | UINT64 Alignment;\r | |
2049 | UINT8 CompatibleTypes;\r | |
2050 | UINT64 Length;\r | |
2051 | UINT32 End;\r | |
2052 | UINTN ActualScratchSize;\r | |
2053 | UINTN BiggestScratchSize;\r | |
2054 | \r | |
2055 | *VariableMtrrCount = 0;\r | |
2056 | \r | |
2057 | //\r | |
2058 | // Since the whole ranges need multiple calls of MtrrLibCalculateMtrrs().\r | |
2059 | // Each call needs different scratch buffer size.\r | |
2060 | // When the provided scratch buffer size is not sufficient in any call,\r | |
2061 | // set the GetActualScratchSize to TRUE, and following calls will only\r | |
2062 | // calculate the actual scratch size for the caller.\r | |
2063 | //\r | |
2064 | BiggestScratchSize = 0;\r | |
2065 | \r | |
2066 | for (Index = 0; Index < RangeCount;) {\r | |
2067 | Base0 = Ranges[Index].BaseAddress;\r | |
2068 | \r | |
2069 | //\r | |
2070 | // Full step is optimal\r | |
2071 | //\r | |
2072 | while (Index < RangeCount) {\r | |
2073 | ASSERT (Ranges[Index].BaseAddress == Base0);\r | |
2074 | Alignment = MtrrLibBiggestAlignment (Base0, A0);\r | |
2075 | while (Base0 + Alignment <= Ranges[Index].BaseAddress + Ranges[Index].Length) {\r | |
2076 | if ((BiggestScratchSize <= *ScratchSize) && (Ranges[Index].Type != DefaultType)) {\r | |
2077 | Status = MtrrLibAppendVariableMtrr (\r | |
2078 | VariableMtrr,\r | |
2079 | VariableMtrrCapacity,\r | |
2080 | VariableMtrrCount,\r | |
2081 | Base0,\r | |
2082 | Alignment,\r | |
2083 | Ranges[Index].Type\r | |
2084 | );\r | |
2085 | if (RETURN_ERROR (Status)) {\r | |
2086 | return Status;\r | |
2087 | }\r | |
2088 | }\r | |
2089 | \r | |
2090 | Base0 += Alignment;\r | |
2091 | Alignment = MtrrLibBiggestAlignment (Base0, A0);\r | |
2092 | }\r | |
2093 | \r | |
2094 | //\r | |
2095 | // Remove the above range from Ranges[Index]\r | |
2096 | //\r | |
2097 | Ranges[Index].Length -= Base0 - Ranges[Index].BaseAddress;\r | |
2098 | Ranges[Index].BaseAddress = Base0;\r | |
2099 | if (Ranges[Index].Length != 0) {\r | |
2100 | break;\r | |
2101 | } else {\r | |
2102 | Index++;\r | |
2103 | }\r | |
2104 | }\r | |
2105 | \r | |
2106 | if (Index == RangeCount) {\r | |
2107 | break;\r | |
2108 | }\r | |
2109 | \r | |
2110 | //\r | |
2111 | // Find continous ranges [Base0, Base1) which could be combined by MTRR.\r | |
2112 | // Per SDM, the compatible types between[B0, B1) are:\r | |
2113 | // UC, *\r | |
2114 | // WB, WT\r | |
2115 | // UC, WB, WT\r | |
2116 | //\r | |
2117 | CompatibleTypes = MtrrLibGetCompatibleTypes (&Ranges[Index], RangeCount - Index);\r | |
2118 | \r | |
2119 | End = Index; // End points to last one that matches the CompatibleTypes.\r | |
2120 | while (End + 1 < RangeCount) {\r | |
2121 | if (((1 << Ranges[End + 1].Type) & CompatibleTypes) == 0) {\r | |
2122 | break;\r | |
2123 | }\r | |
2124 | \r | |
2125 | End++;\r | |
2126 | }\r | |
2127 | \r | |
2128 | Alignment = MtrrLibBiggestAlignment (Base0, A0);\r | |
2129 | Length = GetPowerOfTwo64 (Ranges[End].BaseAddress + Ranges[End].Length - Base0);\r | |
2130 | Base1 = Base0 + MIN (Alignment, Length);\r | |
2131 | \r | |
2132 | //\r | |
2133 | // Base1 may not in Ranges[End]. Update End to the range Base1 belongs to.\r | |
2134 | //\r | |
2135 | End = Index;\r | |
2136 | while (End + 1 < RangeCount) {\r | |
2137 | if (Base1 <= Ranges[End + 1].BaseAddress) {\r | |
2138 | break;\r | |
2139 | }\r | |
2140 | \r | |
2141 | End++;\r | |
2142 | }\r | |
2143 | \r | |
2144 | Length = Ranges[End].Length;\r | |
2145 | Ranges[End].Length = Base1 - Ranges[End].BaseAddress;\r | |
2146 | ActualScratchSize = *ScratchSize;\r | |
2147 | Status = MtrrLibCalculateMtrrs (\r | |
2148 | DefaultType,\r | |
2149 | A0,\r | |
2150 | &Ranges[Index],\r | |
2151 | End + 1 - Index,\r | |
2152 | Scratch,\r | |
2153 | &ActualScratchSize,\r | |
2154 | VariableMtrr,\r | |
2155 | VariableMtrrCapacity,\r | |
2156 | VariableMtrrCount\r | |
2157 | );\r | |
2158 | if (Status == RETURN_BUFFER_TOO_SMALL) {\r | |
2159 | BiggestScratchSize = MAX (BiggestScratchSize, ActualScratchSize);\r | |
2160 | //\r | |
2161 | // Ignore this error, because we need to calculate the biggest\r | |
2162 | // scratch buffer size.\r | |
2163 | //\r | |
2164 | Status = RETURN_SUCCESS;\r | |
2165 | }\r | |
2166 | \r | |
2167 | if (RETURN_ERROR (Status)) {\r | |
2168 | return Status;\r | |
2169 | }\r | |
2170 | \r | |
2171 | if (Length != Ranges[End].Length) {\r | |
2172 | Ranges[End].BaseAddress = Base1;\r | |
2173 | Ranges[End].Length = Length - Ranges[End].Length;\r | |
2174 | Index = End;\r | |
2175 | } else {\r | |
2176 | Index = End + 1;\r | |
2177 | }\r | |
2178 | }\r | |
2179 | \r | |
2180 | if (*ScratchSize < BiggestScratchSize) {\r | |
2181 | *ScratchSize = BiggestScratchSize;\r | |
2182 | return RETURN_BUFFER_TOO_SMALL;\r | |
2183 | }\r | |
2184 | \r | |
2185 | return RETURN_SUCCESS;\r | |
2186 | }\r | |
2187 | \r | |
2188 | /**\r | |
2189 | Set the below-1MB memory attribute to fixed MTRR buffer.\r | |
2190 | Modified flag array indicates which fixed MTRR is modified.\r | |
2191 | \r | |
2192 | @param [in, out] ClearMasks The bits (when set) to clear in the fixed MTRR MSR.\r | |
2193 | @param [in, out] OrMasks The bits to set in the fixed MTRR MSR.\r | |
2194 | @param [in] BaseAddress Base address.\r | |
2195 | @param [in] Length Length.\r | |
2196 | @param [in] Type Memory type.\r | |
2197 | \r | |
2198 | @retval RETURN_SUCCESS The memory attribute is set successfully.\r | |
2199 | @retval RETURN_UNSUPPORTED The requested range or cache type was invalid\r | |
2200 | for the fixed MTRRs.\r | |
2201 | **/\r | |
2202 | RETURN_STATUS\r | |
2203 | MtrrLibSetBelow1MBMemoryAttribute (\r | |
2204 | IN OUT UINT64 *ClearMasks,\r | |
2205 | IN OUT UINT64 *OrMasks,\r | |
2206 | IN PHYSICAL_ADDRESS BaseAddress,\r | |
2207 | IN UINT64 Length,\r | |
2208 | IN MTRR_MEMORY_CACHE_TYPE Type\r | |
2209 | )\r | |
2210 | {\r | |
2211 | RETURN_STATUS Status;\r | |
2212 | UINT32 MsrIndex;\r | |
2213 | UINT64 ClearMask;\r | |
2214 | UINT64 OrMask;\r | |
2215 | \r | |
2216 | ASSERT (BaseAddress < BASE_1MB);\r | |
2217 | \r | |
2218 | MsrIndex = (UINT32)-1;\r | |
2219 | while ((BaseAddress < BASE_1MB) && (Length != 0)) {\r | |
2220 | Status = MtrrLibProgramFixedMtrr (Type, &BaseAddress, &Length, &MsrIndex, &ClearMask, &OrMask);\r | |
2221 | if (RETURN_ERROR (Status)) {\r | |
2222 | return Status;\r | |
2223 | }\r | |
2224 | \r | |
2225 | ClearMasks[MsrIndex] = ClearMasks[MsrIndex] | ClearMask;\r | |
2226 | OrMasks[MsrIndex] = (OrMasks[MsrIndex] & ~ClearMask) | OrMask;\r | |
2227 | }\r | |
2228 | \r | |
2229 | return RETURN_SUCCESS;\r | |
2230 | }\r | |
2231 | \r | |
2232 | /**\r | |
2233 | This function attempts to set the attributes into MTRR setting buffer for multiple memory ranges.\r | |
2234 | \r | |
2235 | @param[in, out] MtrrSetting MTRR setting buffer to be set.\r | |
2236 | @param[in] Scratch A temporary scratch buffer that is used to perform the calculation.\r | |
2237 | @param[in, out] ScratchSize Pointer to the size in bytes of the scratch buffer.\r | |
2238 | It may be updated to the actual required size when the calculation\r | |
2239 | needs more scratch buffer.\r | |
2240 | @param[in] Ranges Pointer to an array of MTRR_MEMORY_RANGE.\r | |
2241 | When range overlap happens, the last one takes higher priority.\r | |
2242 | When the function returns, either all the attributes are set successfully,\r | |
2243 | or none of them is set.\r | |
2244 | @param[in] RangeCount Count of MTRR_MEMORY_RANGE.\r | |
2245 | \r | |
2246 | @retval RETURN_SUCCESS The attributes were set for all the memory ranges.\r | |
2247 | @retval RETURN_INVALID_PARAMETER Length in any range is zero.\r | |
2248 | @retval RETURN_UNSUPPORTED The processor does not support one or more bytes of the\r | |
2249 | memory resource range specified by BaseAddress and Length in any range.\r | |
2250 | @retval RETURN_UNSUPPORTED The bit mask of attributes is not support for the memory resource\r | |
2251 | range specified by BaseAddress and Length in any range.\r | |
2252 | @retval RETURN_OUT_OF_RESOURCES There are not enough system resources to modify the attributes of\r | |
2253 | the memory resource ranges.\r | |
2254 | @retval RETURN_ACCESS_DENIED The attributes for the memory resource range specified by\r | |
2255 | BaseAddress and Length cannot be modified.\r | |
2256 | @retval RETURN_BUFFER_TOO_SMALL The scratch buffer is too small for MTRR calculation.\r | |
2257 | **/\r | |
2258 | RETURN_STATUS\r | |
2259 | EFIAPI\r | |
2260 | MtrrSetMemoryAttributesInMtrrSettings (\r | |
2261 | IN OUT MTRR_SETTINGS *MtrrSetting,\r | |
2262 | IN VOID *Scratch,\r | |
2263 | IN OUT UINTN *ScratchSize,\r | |
2264 | IN CONST MTRR_MEMORY_RANGE *Ranges,\r | |
2265 | IN UINTN RangeCount\r | |
2266 | )\r | |
2267 | {\r | |
2268 | RETURN_STATUS Status;\r | |
2269 | UINT32 Index;\r | |
2270 | UINT64 BaseAddress;\r | |
2271 | UINT64 Length;\r | |
2272 | BOOLEAN Above1MbExist;\r | |
2273 | \r | |
2274 | UINT64 MtrrValidBitsMask;\r | |
2275 | UINT64 MtrrValidAddressMask;\r | |
2276 | MTRR_MEMORY_CACHE_TYPE DefaultType;\r | |
2277 | MTRR_VARIABLE_SETTINGS VariableSettings;\r | |
2278 | MTRR_MEMORY_RANGE WorkingRanges[2 * ARRAY_SIZE (MtrrSetting->Variables.Mtrr) + 2];\r | |
2279 | UINTN WorkingRangeCount;\r | |
2280 | BOOLEAN Modified;\r | |
2281 | MTRR_VARIABLE_SETTING VariableSetting;\r | |
2282 | UINT32 OriginalVariableMtrrCount;\r | |
2283 | UINT32 FirmwareVariableMtrrCount;\r | |
2284 | UINT32 WorkingVariableMtrrCount;\r | |
2285 | MTRR_MEMORY_RANGE OriginalVariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];\r | |
2286 | MTRR_MEMORY_RANGE WorkingVariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];\r | |
2287 | BOOLEAN VariableSettingModified[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];\r | |
2288 | \r | |
2289 | UINT64 ClearMasks[ARRAY_SIZE (mMtrrLibFixedMtrrTable)];\r | |
2290 | UINT64 OrMasks[ARRAY_SIZE (mMtrrLibFixedMtrrTable)];\r | |
2291 | \r | |
2292 | MTRR_CONTEXT MtrrContext;\r | |
2293 | BOOLEAN MtrrContextValid;\r | |
2294 | \r | |
2295 | Status = RETURN_SUCCESS;\r | |
2296 | MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);\r | |
2297 | \r | |
2298 | //\r | |
2299 | // TRUE indicating the accordingly Variable setting needs modificaiton in OriginalVariableMtrr.\r | |
2300 | //\r | |
2301 | SetMem (VariableSettingModified, ARRAY_SIZE (VariableSettingModified), FALSE);\r | |
2302 | \r | |
2303 | //\r | |
2304 | // TRUE indicating the caller requests to set variable MTRRs.\r | |
2305 | //\r | |
2306 | Above1MbExist = FALSE;\r | |
2307 | OriginalVariableMtrrCount = 0;\r | |
2308 | \r | |
2309 | //\r | |
2310 | // 0. Dump the requests.\r | |
2311 | //\r | |
2312 | DEBUG_CODE_BEGIN ();\r | |
2313 | DEBUG ((\r | |
2314 | DEBUG_CACHE,\r | |
2315 | "Mtrr: Set Mem Attribute to %a, ScratchSize = %x%a",\r | |
2316 | (MtrrSetting == NULL) ? "Hardware" : "Buffer",\r | |
2317 | *ScratchSize,\r | |
2318 | (RangeCount <= 1) ? "," : "\n"\r | |
2319 | ));\r | |
2320 | for (Index = 0; Index < RangeCount; Index++) {\r | |
2321 | DEBUG ((\r | |
2322 | DEBUG_CACHE,\r | |
2323 | " %a: [%016lx, %016lx)\n",\r | |
2324 | mMtrrMemoryCacheTypeShortName[MIN (Ranges[Index].Type, CacheInvalid)],\r | |
2325 | Ranges[Index].BaseAddress,\r | |
2326 | Ranges[Index].BaseAddress + Ranges[Index].Length\r | |
2327 | ));\r | |
2328 | }\r | |
2329 | \r | |
2330 | DEBUG_CODE_END ();\r | |
2331 | \r | |
2332 | //\r | |
2333 | // 1. Validate the parameters.\r | |
2334 | //\r | |
2335 | if (!IsMtrrSupported ()) {\r | |
2336 | Status = RETURN_UNSUPPORTED;\r | |
2337 | goto Exit;\r | |
2338 | }\r | |
2339 | \r | |
2340 | for (Index = 0; Index < RangeCount; Index++) {\r | |
2341 | if (Ranges[Index].Length == 0) {\r | |
2342 | Status = RETURN_INVALID_PARAMETER;\r | |
2343 | goto Exit;\r | |
2344 | }\r | |
2345 | \r | |
2346 | if (((Ranges[Index].BaseAddress & ~MtrrValidAddressMask) != 0) ||\r | |
2347 | ((((Ranges[Index].BaseAddress + Ranges[Index].Length) & ~MtrrValidAddressMask) != 0) &&\r | |
2348 | ((Ranges[Index].BaseAddress + Ranges[Index].Length) != MtrrValidBitsMask + 1))\r | |
2349 | )\r | |
2350 | {\r | |
2351 | //\r | |
2352 | // Either the BaseAddress or the Limit doesn't follow the alignment requirement.\r | |
2353 | // Note: It's still valid if Limit doesn't follow the alignment requirement but equals to MAX Address.\r | |
2354 | //\r | |
2355 | Status = RETURN_UNSUPPORTED;\r | |
2356 | goto Exit;\r | |
2357 | }\r | |
2358 | \r | |
2359 | if ((Ranges[Index].Type != CacheUncacheable) &&\r | |
2360 | (Ranges[Index].Type != CacheWriteCombining) &&\r | |
2361 | (Ranges[Index].Type != CacheWriteThrough) &&\r | |
2362 | (Ranges[Index].Type != CacheWriteProtected) &&\r | |
2363 | (Ranges[Index].Type != CacheWriteBack))\r | |
2364 | {\r | |
2365 | Status = RETURN_INVALID_PARAMETER;\r | |
2366 | goto Exit;\r | |
2367 | }\r | |
2368 | \r | |
2369 | if (Ranges[Index].BaseAddress + Ranges[Index].Length > BASE_1MB) {\r | |
2370 | Above1MbExist = TRUE;\r | |
2371 | }\r | |
2372 | }\r | |
2373 | \r | |
2374 | //\r | |
2375 | // 2. Apply the above-1MB memory attribute settings.\r | |
2376 | //\r | |
2377 | if (Above1MbExist) {\r | |
2378 | //\r | |
2379 | // 2.1. Read all variable MTRRs and convert to Ranges.\r | |
2380 | //\r | |
2381 | OriginalVariableMtrrCount = GetVariableMtrrCountWorker ();\r | |
2382 | MtrrGetVariableMtrrWorker (MtrrSetting, OriginalVariableMtrrCount, &VariableSettings);\r | |
2383 | MtrrLibGetRawVariableRanges (\r | |
2384 | &VariableSettings,\r | |
2385 | OriginalVariableMtrrCount,\r | |
2386 | MtrrValidBitsMask,\r | |
2387 | MtrrValidAddressMask,\r | |
2388 | OriginalVariableMtrr\r | |
2389 | );\r | |
2390 | \r | |
2391 | DefaultType = MtrrGetDefaultMemoryTypeWorker (MtrrSetting);\r | |
2392 | WorkingRangeCount = 1;\r | |
2393 | WorkingRanges[0].BaseAddress = 0;\r | |
2394 | WorkingRanges[0].Length = MtrrValidBitsMask + 1;\r | |
2395 | WorkingRanges[0].Type = DefaultType;\r | |
2396 | \r | |
2397 | Status = MtrrLibApplyVariableMtrrs (\r | |
2398 | OriginalVariableMtrr,\r | |
2399 | OriginalVariableMtrrCount,\r | |
2400 | WorkingRanges,\r | |
2401 | ARRAY_SIZE (WorkingRanges),\r | |
2402 | &WorkingRangeCount\r | |
2403 | );\r | |
2404 | ASSERT_RETURN_ERROR (Status);\r | |
2405 | \r | |
2406 | ASSERT (OriginalVariableMtrrCount >= PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs));\r | |
2407 | FirmwareVariableMtrrCount = OriginalVariableMtrrCount - PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);\r | |
2408 | ASSERT (WorkingRangeCount <= 2 * FirmwareVariableMtrrCount + 1);\r | |
2409 | \r | |
2410 | //\r | |
2411 | // 2.2. Force [0, 1M) to UC, so that it doesn't impact subtraction algorithm.\r | |
2412 | //\r | |
2413 | Status = MtrrLibSetMemoryType (\r | |
2414 | WorkingRanges,\r | |
2415 | ARRAY_SIZE (WorkingRanges),\r | |
2416 | &WorkingRangeCount,\r | |
2417 | 0,\r | |
2418 | SIZE_1MB,\r | |
2419 | CacheUncacheable\r | |
2420 | );\r | |
2421 | ASSERT (Status != RETURN_OUT_OF_RESOURCES);\r | |
2422 | \r | |
2423 | //\r | |
2424 | // 2.3. Apply the new memory attribute settings to Ranges.\r | |
2425 | //\r | |
2426 | Modified = FALSE;\r | |
2427 | for (Index = 0; Index < RangeCount; Index++) {\r | |
2428 | BaseAddress = Ranges[Index].BaseAddress;\r | |
2429 | Length = Ranges[Index].Length;\r | |
2430 | if (BaseAddress < BASE_1MB) {\r | |
2431 | if (Length <= BASE_1MB - BaseAddress) {\r | |
2432 | continue;\r | |
2433 | }\r | |
2434 | \r | |
2435 | Length -= BASE_1MB - BaseAddress;\r | |
2436 | BaseAddress = BASE_1MB;\r | |
2437 | }\r | |
2438 | \r | |
2439 | Status = MtrrLibSetMemoryType (\r | |
2440 | WorkingRanges,\r | |
2441 | ARRAY_SIZE (WorkingRanges),\r | |
2442 | &WorkingRangeCount,\r | |
2443 | BaseAddress,\r | |
2444 | Length,\r | |
2445 | Ranges[Index].Type\r | |
2446 | );\r | |
2447 | if (Status == RETURN_ALREADY_STARTED) {\r | |
2448 | Status = RETURN_SUCCESS;\r | |
2449 | } else if (Status == RETURN_OUT_OF_RESOURCES) {\r | |
2450 | goto Exit;\r | |
2451 | } else {\r | |
2452 | ASSERT_RETURN_ERROR (Status);\r | |
2453 | Modified = TRUE;\r | |
2454 | }\r | |
2455 | }\r | |
2456 | \r | |
2457 | if (Modified) {\r | |
2458 | //\r | |
2459 | // 2.4. Calculate the Variable MTRR settings based on the Ranges.\r | |
2460 | // Buffer Too Small may be returned if the scratch buffer size is insufficient.\r | |
2461 | //\r | |
2462 | Status = MtrrLibSetMemoryRanges (\r | |
2463 | DefaultType,\r | |
2464 | LShiftU64 (1, (UINTN)HighBitSet64 (MtrrValidBitsMask)),\r | |
2465 | WorkingRanges,\r | |
2466 | WorkingRangeCount,\r | |
2467 | Scratch,\r | |
2468 | ScratchSize,\r | |
2469 | WorkingVariableMtrr,\r | |
2470 | FirmwareVariableMtrrCount + 1,\r | |
2471 | &WorkingVariableMtrrCount\r | |
2472 | );\r | |
2473 | if (RETURN_ERROR (Status)) {\r | |
2474 | goto Exit;\r | |
2475 | }\r | |
2476 | \r | |
2477 | //\r | |
2478 | // 2.5. Remove the [0, 1MB) MTRR if it still exists (not merged with other range)\r | |
2479 | //\r | |
2480 | for (Index = 0; Index < WorkingVariableMtrrCount; Index++) {\r | |
2481 | if ((WorkingVariableMtrr[Index].BaseAddress == 0) && (WorkingVariableMtrr[Index].Length == SIZE_1MB)) {\r | |
2482 | ASSERT (WorkingVariableMtrr[Index].Type == CacheUncacheable);\r | |
2483 | WorkingVariableMtrrCount--;\r | |
2484 | CopyMem (\r | |
2485 | &WorkingVariableMtrr[Index],\r | |
2486 | &WorkingVariableMtrr[Index + 1],\r | |
2487 | (WorkingVariableMtrrCount - Index) * sizeof (WorkingVariableMtrr[0])\r | |
2488 | );\r | |
2489 | break;\r | |
2490 | }\r | |
2491 | }\r | |
2492 | \r | |
2493 | if (WorkingVariableMtrrCount > FirmwareVariableMtrrCount) {\r | |
2494 | Status = RETURN_OUT_OF_RESOURCES;\r | |
2495 | goto Exit;\r | |
2496 | }\r | |
2497 | \r | |
2498 | //\r | |
2499 | // 2.6. Merge the WorkingVariableMtrr to OriginalVariableMtrr\r | |
2500 | // Make sure least modification is made to OriginalVariableMtrr.\r | |
2501 | //\r | |
2502 | MtrrLibMergeVariableMtrr (\r | |
2503 | OriginalVariableMtrr,\r | |
2504 | OriginalVariableMtrrCount,\r | |
2505 | WorkingVariableMtrr,\r | |
2506 | WorkingVariableMtrrCount,\r | |
2507 | VariableSettingModified\r | |
2508 | );\r | |
2509 | }\r | |
2510 | }\r | |
2511 | \r | |
2512 | //\r | |
2513 | // 3. Apply the below-1MB memory attribute settings.\r | |
2514 | //\r | |
2515 | // (Value & ~0 | 0) still equals to (Value)\r | |
2516 | //\r | |
2517 | ZeroMem (ClearMasks, sizeof (ClearMasks));\r | |
2518 | ZeroMem (OrMasks, sizeof (OrMasks));\r | |
2519 | for (Index = 0; Index < RangeCount; Index++) {\r | |
2520 | if (Ranges[Index].BaseAddress >= BASE_1MB) {\r | |
2521 | continue;\r | |
2522 | }\r | |
2523 | \r | |
2524 | Status = MtrrLibSetBelow1MBMemoryAttribute (\r | |
2525 | ClearMasks,\r | |
2526 | OrMasks,\r | |
2527 | Ranges[Index].BaseAddress,\r | |
2528 | Ranges[Index].Length,\r | |
2529 | Ranges[Index].Type\r | |
2530 | );\r | |
2531 | if (RETURN_ERROR (Status)) {\r | |
2532 | goto Exit;\r | |
2533 | }\r | |
2534 | }\r | |
2535 | \r | |
2536 | MtrrContextValid = FALSE;\r | |
2537 | //\r | |
2538 | // 4. Write fixed MTRRs that have been modified\r | |
2539 | //\r | |
2540 | for (Index = 0; Index < ARRAY_SIZE (ClearMasks); Index++) {\r | |
2541 | if (ClearMasks[Index] != 0) {\r | |
2542 | if (MtrrSetting != NULL) {\r | |
2543 | MtrrSetting->Fixed.Mtrr[Index] = (MtrrSetting->Fixed.Mtrr[Index] & ~ClearMasks[Index]) | OrMasks[Index];\r | |
2544 | } else {\r | |
2545 | if (!MtrrContextValid) {\r | |
2546 | MtrrLibPreMtrrChange (&MtrrContext);\r | |
2547 | MtrrContextValid = TRUE;\r | |
2548 | }\r | |
2549 | \r | |
2550 | AsmMsrAndThenOr64 (mMtrrLibFixedMtrrTable[Index].Msr, ~ClearMasks[Index], OrMasks[Index]);\r | |
2551 | }\r | |
2552 | }\r | |
2553 | }\r | |
2554 | \r | |
2555 | //\r | |
2556 | // 5. Write variable MTRRs that have been modified\r | |
2557 | //\r | |
2558 | for (Index = 0; Index < OriginalVariableMtrrCount; Index++) {\r | |
2559 | if (VariableSettingModified[Index]) {\r | |
2560 | if (OriginalVariableMtrr[Index].Length != 0) {\r | |
2561 | VariableSetting.Base = (OriginalVariableMtrr[Index].BaseAddress & MtrrValidAddressMask)\r | |
2562 | | (UINT8)OriginalVariableMtrr[Index].Type;\r | |
2563 | VariableSetting.Mask = ((~(OriginalVariableMtrr[Index].Length - 1)) & MtrrValidAddressMask) | BIT11;\r | |
2564 | } else {\r | |
2565 | VariableSetting.Base = 0;\r | |
2566 | VariableSetting.Mask = 0;\r | |
2567 | }\r | |
2568 | \r | |
2569 | if (MtrrSetting != NULL) {\r | |
2570 | CopyMem (&MtrrSetting->Variables.Mtrr[Index], &VariableSetting, sizeof (VariableSetting));\r | |
2571 | } else {\r | |
2572 | if (!MtrrContextValid) {\r | |
2573 | MtrrLibPreMtrrChange (&MtrrContext);\r | |
2574 | MtrrContextValid = TRUE;\r | |
2575 | }\r | |
2576 | \r | |
2577 | AsmWriteMsr64 (\r | |
2578 | MSR_IA32_MTRR_PHYSBASE0 + (Index << 1),\r | |
2579 | VariableSetting.Base\r | |
2580 | );\r | |
2581 | AsmWriteMsr64 (\r | |
2582 | MSR_IA32_MTRR_PHYSMASK0 + (Index << 1),\r | |
2583 | VariableSetting.Mask\r | |
2584 | );\r | |
2585 | }\r | |
2586 | }\r | |
2587 | }\r | |
2588 | \r | |
2589 | if (MtrrSetting != NULL) {\r | |
2590 | ((MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&MtrrSetting->MtrrDefType)->Bits.E = 1;\r | |
2591 | ((MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&MtrrSetting->MtrrDefType)->Bits.FE = 1;\r | |
2592 | } else {\r | |
2593 | if (MtrrContextValid) {\r | |
2594 | MtrrLibPostMtrrChange (&MtrrContext);\r | |
2595 | }\r | |
2596 | }\r | |
2597 | \r | |
2598 | Exit:\r | |
2599 | DEBUG ((DEBUG_CACHE, " Result = %r\n", Status));\r | |
2600 | if (!RETURN_ERROR (Status)) {\r | |
2601 | MtrrDebugPrintAllMtrrsWorker (MtrrSetting);\r | |
2602 | }\r | |
2603 | \r | |
2604 | return Status;\r | |
2605 | }\r | |
2606 | \r | |
2607 | /**\r | |
2608 | This function attempts to set the attributes into MTRR setting buffer for a memory range.\r | |
2609 | \r | |
2610 | @param[in, out] MtrrSetting MTRR setting buffer to be set.\r | |
2611 | @param[in] BaseAddress The physical address that is the start address\r | |
2612 | of a memory range.\r | |
2613 | @param[in] Length The size in bytes of the memory range.\r | |
2614 | @param[in] Attribute The bit mask of attributes to set for the\r | |
2615 | memory range.\r | |
2616 | \r | |
2617 | @retval RETURN_SUCCESS The attributes were set for the memory range.\r | |
2618 | @retval RETURN_INVALID_PARAMETER Length is zero.\r | |
2619 | @retval RETURN_UNSUPPORTED The processor does not support one or more bytes of the\r | |
2620 | memory resource range specified by BaseAddress and Length.\r | |
2621 | @retval RETURN_UNSUPPORTED The bit mask of attributes is not support for the memory resource\r | |
2622 | range specified by BaseAddress and Length.\r | |
2623 | @retval RETURN_ACCESS_DENIED The attributes for the memory resource range specified by\r | |
2624 | BaseAddress and Length cannot be modified.\r | |
2625 | @retval RETURN_OUT_OF_RESOURCES There are not enough system resources to modify the attributes of\r | |
2626 | the memory resource range.\r | |
2627 | Multiple memory range attributes setting by calling this API multiple\r | |
2628 | times may fail with status RETURN_OUT_OF_RESOURCES. It may not mean\r | |
2629 | the number of CPU MTRRs are too small to set such memory attributes.\r | |
2630 | Pass the multiple memory range attributes to one call of\r | |
2631 | MtrrSetMemoryAttributesInMtrrSettings() may succeed.\r | |
2632 | @retval RETURN_BUFFER_TOO_SMALL The fixed internal scratch buffer is too small for MTRR calculation.\r | |
2633 | Caller should use MtrrSetMemoryAttributesInMtrrSettings() to specify\r | |
2634 | external scratch buffer.\r | |
2635 | **/\r | |
2636 | RETURN_STATUS\r | |
2637 | EFIAPI\r | |
2638 | MtrrSetMemoryAttributeInMtrrSettings (\r | |
2639 | IN OUT MTRR_SETTINGS *MtrrSetting,\r | |
2640 | IN PHYSICAL_ADDRESS BaseAddress,\r | |
2641 | IN UINT64 Length,\r | |
2642 | IN MTRR_MEMORY_CACHE_TYPE Attribute\r | |
2643 | )\r | |
2644 | {\r | |
2645 | UINT8 Scratch[SCRATCH_BUFFER_SIZE];\r | |
2646 | UINTN ScratchSize;\r | |
2647 | MTRR_MEMORY_RANGE Range;\r | |
2648 | \r | |
2649 | Range.BaseAddress = BaseAddress;\r | |
2650 | Range.Length = Length;\r | |
2651 | Range.Type = Attribute;\r | |
2652 | ScratchSize = sizeof (Scratch);\r | |
2653 | return MtrrSetMemoryAttributesInMtrrSettings (MtrrSetting, Scratch, &ScratchSize, &Range, 1);\r | |
2654 | }\r | |
2655 | \r | |
2656 | /**\r | |
2657 | This function attempts to set the attributes for a memory range.\r | |
2658 | \r | |
2659 | @param[in] BaseAddress The physical address that is the start\r | |
2660 | address of a memory range.\r | |
2661 | @param[in] Length The size in bytes of the memory range.\r | |
2662 | @param[in] Attributes The bit mask of attributes to set for the\r | |
2663 | memory range.\r | |
2664 | \r | |
2665 | @retval RETURN_SUCCESS The attributes were set for the memory\r | |
2666 | range.\r | |
2667 | @retval RETURN_INVALID_PARAMETER Length is zero.\r | |
2668 | @retval RETURN_UNSUPPORTED The processor does not support one or\r | |
2669 | more bytes of the memory resource range\r | |
2670 | specified by BaseAddress and Length.\r | |
2671 | @retval RETURN_UNSUPPORTED The bit mask of attributes is not support\r | |
2672 | for the memory resource range specified\r | |
2673 | by BaseAddress and Length.\r | |
2674 | @retval RETURN_ACCESS_DENIED The attributes for the memory resource\r | |
2675 | range specified by BaseAddress and Length\r | |
2676 | cannot be modified.\r | |
2677 | @retval RETURN_OUT_OF_RESOURCES There are not enough system resources to\r | |
2678 | modify the attributes of the memory\r | |
2679 | resource range.\r | |
2680 | Multiple memory range attributes setting by calling this API multiple\r | |
2681 | times may fail with status RETURN_OUT_OF_RESOURCES. It may not mean\r | |
2682 | the number of CPU MTRRs are too small to set such memory attributes.\r | |
2683 | Pass the multiple memory range attributes to one call of\r | |
2684 | MtrrSetMemoryAttributesInMtrrSettings() may succeed.\r | |
2685 | @retval RETURN_BUFFER_TOO_SMALL The fixed internal scratch buffer is too small for MTRR calculation.\r | |
2686 | Caller should use MtrrSetMemoryAttributesInMtrrSettings() to specify\r | |
2687 | external scratch buffer.\r | |
2688 | **/\r | |
2689 | RETURN_STATUS\r | |
2690 | EFIAPI\r | |
2691 | MtrrSetMemoryAttribute (\r | |
2692 | IN PHYSICAL_ADDRESS BaseAddress,\r | |
2693 | IN UINT64 Length,\r | |
2694 | IN MTRR_MEMORY_CACHE_TYPE Attribute\r | |
2695 | )\r | |
2696 | {\r | |
2697 | return MtrrSetMemoryAttributeInMtrrSettings (NULL, BaseAddress, Length, Attribute);\r | |
2698 | }\r | |
2699 | \r | |
2700 | /**\r | |
2701 | Worker function setting variable MTRRs\r | |
2702 | \r | |
2703 | @param[in] VariableSettings A buffer to hold variable MTRRs content.\r | |
2704 | \r | |
2705 | **/\r | |
2706 | VOID\r | |
2707 | MtrrSetVariableMtrrWorker (\r | |
2708 | IN MTRR_VARIABLE_SETTINGS *VariableSettings\r | |
2709 | )\r | |
2710 | {\r | |
2711 | UINT32 Index;\r | |
2712 | UINT32 VariableMtrrCount;\r | |
2713 | \r | |
2714 | VariableMtrrCount = GetVariableMtrrCountWorker ();\r | |
2715 | ASSERT (VariableMtrrCount <= ARRAY_SIZE (VariableSettings->Mtrr));\r | |
2716 | \r | |
2717 | for (Index = 0; Index < VariableMtrrCount; Index++) {\r | |
2718 | AsmWriteMsr64 (\r | |
2719 | MSR_IA32_MTRR_PHYSBASE0 + (Index << 1),\r | |
2720 | VariableSettings->Mtrr[Index].Base\r | |
2721 | );\r | |
2722 | AsmWriteMsr64 (\r | |
2723 | MSR_IA32_MTRR_PHYSMASK0 + (Index << 1),\r | |
2724 | VariableSettings->Mtrr[Index].Mask\r | |
2725 | );\r | |
2726 | }\r | |
2727 | }\r | |
2728 | \r | |
2729 | /**\r | |
2730 | Worker function setting fixed MTRRs\r | |
2731 | \r | |
2732 | @param[in] FixedSettings A buffer to hold fixed MTRRs content.\r | |
2733 | \r | |
2734 | **/\r | |
2735 | VOID\r | |
2736 | MtrrSetFixedMtrrWorker (\r | |
2737 | IN MTRR_FIXED_SETTINGS *FixedSettings\r | |
2738 | )\r | |
2739 | {\r | |
2740 | UINT32 Index;\r | |
2741 | \r | |
2742 | for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {\r | |
2743 | AsmWriteMsr64 (\r | |
2744 | mMtrrLibFixedMtrrTable[Index].Msr,\r | |
2745 | FixedSettings->Mtrr[Index]\r | |
2746 | );\r | |
2747 | }\r | |
2748 | }\r | |
2749 | \r | |
2750 | /**\r | |
2751 | This function gets the content in all MTRRs (variable and fixed)\r | |
2752 | \r | |
2753 | @param[out] MtrrSetting A buffer to hold all MTRRs content.\r | |
2754 | \r | |
2755 | @retval the pointer of MtrrSetting\r | |
2756 | \r | |
2757 | **/\r | |
2758 | MTRR_SETTINGS *\r | |
2759 | EFIAPI\r | |
2760 | MtrrGetAllMtrrs (\r | |
2761 | OUT MTRR_SETTINGS *MtrrSetting\r | |
2762 | )\r | |
2763 | {\r | |
2764 | if (!IsMtrrSupported ()) {\r | |
2765 | return MtrrSetting;\r | |
2766 | }\r | |
2767 | \r | |
2768 | //\r | |
2769 | // Get fixed MTRRs\r | |
2770 | //\r | |
2771 | MtrrGetFixedMtrrWorker (&MtrrSetting->Fixed);\r | |
2772 | \r | |
2773 | //\r | |
2774 | // Get variable MTRRs\r | |
2775 | //\r | |
2776 | MtrrGetVariableMtrrWorker (\r | |
2777 | NULL,\r | |
2778 | GetVariableMtrrCountWorker (),\r | |
2779 | &MtrrSetting->Variables\r | |
2780 | );\r | |
2781 | \r | |
2782 | //\r | |
2783 | // Get MTRR_DEF_TYPE value\r | |
2784 | //\r | |
2785 | MtrrSetting->MtrrDefType = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);\r | |
2786 | \r | |
2787 | return MtrrSetting;\r | |
2788 | }\r | |
2789 | \r | |
2790 | /**\r | |
2791 | This function sets all MTRRs (variable and fixed)\r | |
2792 | \r | |
2793 | @param[in] MtrrSetting A buffer holding all MTRRs content.\r | |
2794 | \r | |
2795 | @retval The pointer of MtrrSetting\r | |
2796 | \r | |
2797 | **/\r | |
2798 | MTRR_SETTINGS *\r | |
2799 | EFIAPI\r | |
2800 | MtrrSetAllMtrrs (\r | |
2801 | IN MTRR_SETTINGS *MtrrSetting\r | |
2802 | )\r | |
2803 | {\r | |
2804 | MTRR_CONTEXT MtrrContext;\r | |
2805 | \r | |
2806 | if (!IsMtrrSupported ()) {\r | |
2807 | return MtrrSetting;\r | |
2808 | }\r | |
2809 | \r | |
2810 | MtrrLibPreMtrrChange (&MtrrContext);\r | |
2811 | \r | |
2812 | //\r | |
2813 | // Set fixed MTRRs\r | |
2814 | //\r | |
2815 | MtrrSetFixedMtrrWorker (&MtrrSetting->Fixed);\r | |
2816 | \r | |
2817 | //\r | |
2818 | // Set variable MTRRs\r | |
2819 | //\r | |
2820 | MtrrSetVariableMtrrWorker (&MtrrSetting->Variables);\r | |
2821 | \r | |
2822 | //\r | |
2823 | // Set MTRR_DEF_TYPE value\r | |
2824 | //\r | |
2825 | AsmWriteMsr64 (MSR_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);\r | |
2826 | \r | |
2827 | MtrrLibPostMtrrChangeEnableCache (&MtrrContext);\r | |
2828 | \r | |
2829 | return MtrrSetting;\r | |
2830 | }\r | |
2831 | \r | |
2832 | /**\r | |
2833 | Checks if MTRR is supported.\r | |
2834 | \r | |
2835 | @retval TRUE MTRR is supported.\r | |
2836 | @retval FALSE MTRR is not supported.\r | |
2837 | \r | |
2838 | **/\r | |
2839 | BOOLEAN\r | |
2840 | EFIAPI\r | |
2841 | IsMtrrSupported (\r | |
2842 | VOID\r | |
2843 | )\r | |
2844 | {\r | |
2845 | CPUID_VERSION_INFO_EDX Edx;\r | |
2846 | MSR_IA32_MTRRCAP_REGISTER MtrrCap;\r | |
2847 | \r | |
2848 | //\r | |
2849 | // Check CPUID(1).EDX[12] for MTRR capability\r | |
2850 | //\r | |
2851 | AsmCpuid (CPUID_VERSION_INFO, NULL, NULL, NULL, &Edx.Uint32);\r | |
2852 | if (Edx.Bits.MTRR == 0) {\r | |
2853 | return FALSE;\r | |
2854 | }\r | |
2855 | \r | |
2856 | //\r | |
2857 | // Check number of variable MTRRs and fixed MTRRs existence.\r | |
2858 | // If number of variable MTRRs is zero, or fixed MTRRs do not\r | |
2859 | // exist, return false.\r | |
2860 | //\r | |
2861 | MtrrCap.Uint64 = AsmReadMsr64 (MSR_IA32_MTRRCAP);\r | |
2862 | if ((MtrrCap.Bits.VCNT == 0) || (MtrrCap.Bits.FIX == 0)) {\r | |
2863 | return FALSE;\r | |
2864 | }\r | |
2865 | \r | |
2866 | return TRUE;\r | |
2867 | }\r | |
2868 | \r | |
2869 | /**\r | |
2870 | Worker function prints all MTRRs for debugging.\r | |
2871 | \r | |
2872 | If MtrrSetting is not NULL, print MTRR settings from input MTRR\r | |
2873 | settings buffer.\r | |
2874 | If MtrrSetting is NULL, print MTRR settings from MTRRs.\r | |
2875 | \r | |
2876 | @param MtrrSetting A buffer holding all MTRRs content.\r | |
2877 | **/\r | |
2878 | VOID\r | |
2879 | MtrrDebugPrintAllMtrrsWorker (\r | |
2880 | IN MTRR_SETTINGS *MtrrSetting\r | |
2881 | )\r | |
2882 | {\r | |
2883 | DEBUG_CODE_BEGIN ();\r | |
2884 | MTRR_SETTINGS LocalMtrrs;\r | |
2885 | MTRR_SETTINGS *Mtrrs;\r | |
2886 | UINTN Index;\r | |
2887 | UINTN RangeCount;\r | |
2888 | UINT64 MtrrValidBitsMask;\r | |
2889 | UINT64 MtrrValidAddressMask;\r | |
2890 | UINT32 VariableMtrrCount;\r | |
2891 | BOOLEAN ContainVariableMtrr;\r | |
2892 | MTRR_MEMORY_RANGE Ranges[\r | |
2893 | ARRAY_SIZE (mMtrrLibFixedMtrrTable) * sizeof (UINT64) + 2 * ARRAY_SIZE (Mtrrs->Variables.Mtrr) + 1\r | |
2894 | ];\r | |
2895 | MTRR_MEMORY_RANGE RawVariableRanges[ARRAY_SIZE (Mtrrs->Variables.Mtrr)];\r | |
2896 | \r | |
2897 | if (!IsMtrrSupported ()) {\r | |
2898 | return;\r | |
2899 | }\r | |
2900 | \r | |
2901 | VariableMtrrCount = GetVariableMtrrCountWorker ();\r | |
2902 | \r | |
2903 | if (MtrrSetting != NULL) {\r | |
2904 | Mtrrs = MtrrSetting;\r | |
2905 | } else {\r | |
2906 | MtrrGetAllMtrrs (&LocalMtrrs);\r | |
2907 | Mtrrs = &LocalMtrrs;\r | |
2908 | }\r | |
2909 | \r | |
2910 | //\r | |
2911 | // Dump RAW MTRR contents\r | |
2912 | //\r | |
2913 | DEBUG ((DEBUG_CACHE, "MTRR Settings:\n"));\r | |
2914 | DEBUG ((DEBUG_CACHE, "=============\n"));\r | |
2915 | DEBUG ((DEBUG_CACHE, "MTRR Default Type: %016lx\n", Mtrrs->MtrrDefType));\r | |
2916 | for (Index = 0; Index < ARRAY_SIZE (mMtrrLibFixedMtrrTable); Index++) {\r | |
2917 | DEBUG ((DEBUG_CACHE, "Fixed MTRR[%02d] : %016lx\n", Index, Mtrrs->Fixed.Mtrr[Index]));\r | |
2918 | }\r | |
2919 | \r | |
2920 | ContainVariableMtrr = FALSE;\r | |
2921 | for (Index = 0; Index < VariableMtrrCount; Index++) {\r | |
2922 | if ((Mtrrs->Variables.Mtrr[Index].Mask & BIT11) == 0) {\r | |
2923 | //\r | |
2924 | // If mask is not valid, then do not display range\r | |
2925 | //\r | |
2926 | continue;\r | |
2927 | }\r | |
2928 | \r | |
2929 | ContainVariableMtrr = TRUE;\r | |
2930 | DEBUG ((\r | |
2931 | DEBUG_CACHE,\r | |
2932 | "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",\r | |
2933 | Index,\r | |
2934 | Mtrrs->Variables.Mtrr[Index].Base,\r | |
2935 | Mtrrs->Variables.Mtrr[Index].Mask\r | |
2936 | ));\r | |
2937 | }\r | |
2938 | \r | |
2939 | if (!ContainVariableMtrr) {\r | |
2940 | DEBUG ((DEBUG_CACHE, "Variable MTRR : None.\n"));\r | |
2941 | }\r | |
2942 | \r | |
2943 | DEBUG ((DEBUG_CACHE, "\n"));\r | |
2944 | \r | |
2945 | //\r | |
2946 | // Dump MTRR setting in ranges\r | |
2947 | //\r | |
2948 | DEBUG ((DEBUG_CACHE, "Memory Ranges:\n"));\r | |
2949 | DEBUG ((DEBUG_CACHE, "====================================\n"));\r | |
2950 | MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);\r | |
2951 | Ranges[0].BaseAddress = 0;\r | |
2952 | Ranges[0].Length = MtrrValidBitsMask + 1;\r | |
2953 | Ranges[0].Type = MtrrGetDefaultMemoryTypeWorker (Mtrrs);\r | |
2954 | RangeCount = 1;\r | |
2955 | \r | |
2956 | MtrrLibGetRawVariableRanges (\r | |
2957 | &Mtrrs->Variables,\r | |
2958 | VariableMtrrCount,\r | |
2959 | MtrrValidBitsMask,\r | |
2960 | MtrrValidAddressMask,\r | |
2961 | RawVariableRanges\r | |
2962 | );\r | |
2963 | MtrrLibApplyVariableMtrrs (\r | |
2964 | RawVariableRanges,\r | |
2965 | VariableMtrrCount,\r | |
2966 | Ranges,\r | |
2967 | ARRAY_SIZE (Ranges),\r | |
2968 | &RangeCount\r | |
2969 | );\r | |
2970 | \r | |
2971 | MtrrLibApplyFixedMtrrs (&Mtrrs->Fixed, Ranges, ARRAY_SIZE (Ranges), &RangeCount);\r | |
2972 | \r | |
2973 | for (Index = 0; Index < RangeCount; Index++) {\r | |
2974 | DEBUG ((\r | |
2975 | DEBUG_CACHE,\r | |
2976 | "%a:%016lx-%016lx\n",\r | |
2977 | mMtrrMemoryCacheTypeShortName[Ranges[Index].Type],\r | |
2978 | Ranges[Index].BaseAddress,\r | |
2979 | Ranges[Index].BaseAddress + Ranges[Index].Length - 1\r | |
2980 | ));\r | |
2981 | }\r | |
2982 | \r | |
2983 | DEBUG_CODE_END ();\r | |
2984 | }\r | |
2985 | \r | |
2986 | /**\r | |
2987 | This function prints all MTRRs for debugging.\r | |
2988 | **/\r | |
2989 | VOID\r | |
2990 | EFIAPI\r | |
2991 | MtrrDebugPrintAllMtrrs (\r | |
2992 | VOID\r | |
2993 | )\r | |
2994 | {\r | |
2995 | MtrrDebugPrintAllMtrrsWorker (NULL);\r | |
2996 | }\r |