]>
Commit | Line | Data |
---|---|---|
c8ec22a2 JY |
1 | /** @file\r |
2 | \r | |
c8c48cbb | 3 | Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.<BR>\r |
16a16ea6 | 4 | SPDX-License-Identifier: BSD-2-Clause-Patent\r |
c8ec22a2 JY |
5 | \r |
6 | **/\r | |
7 | \r | |
8 | #include <Uefi.h>\r | |
9 | #include <Library/BaseLib.h>\r | |
10 | #include <Library/CacheLib.h>\r | |
11 | #include <Library/CacheAsRamLib.h>\r | |
12 | #include "CacheLibInternal.h"\r | |
13 | \r | |
c8ec22a2 JY |
14 | /**\r |
15 | Search the memory cache type for specific memory from MTRR.\r | |
16 | \r | |
17 | @param[in] MemoryAddress the address of target memory\r | |
18 | @param[in] MemoryLength the length of target memory\r | |
19 | @param[in] ValidMtrrAddressMask the MTRR address mask\r | |
20 | @param[out] UsedMsrNum the used MSR number\r | |
21 | @param[out] UsedMemoryCacheType the cache type for the target memory\r | |
22 | \r | |
23 | @retval EFI_SUCCESS The memory is found in MTRR and cache type is returned\r | |
24 | @retval EFI_NOT_FOUND The memory is not found in MTRR\r | |
25 | \r | |
26 | **/\r | |
27 | EFI_STATUS\r | |
28 | SearchForExactMtrr (\r | |
29 | IN EFI_PHYSICAL_ADDRESS MemoryAddress,\r | |
30 | IN UINT64 MemoryLength,\r | |
31 | IN UINT64 ValidMtrrAddressMask,\r | |
32 | OUT UINT32 *UsedMsrNum,\r | |
33 | OUT EFI_MEMORY_CACHE_TYPE *MemoryCacheType\r | |
34 | );\r | |
35 | \r | |
36 | /**\r | |
37 | Check if CacheType match current default setting.\r | |
38 | \r | |
39 | @param[in] MemoryCacheType input cache type to be checked.\r | |
40 | \r | |
41 | @retval TRUE MemoryCacheType is default MTRR setting.\r | |
baae777b | 42 | @retval FALSE MemoryCacheType is NOT default MTRR setting.\r |
c8ec22a2 JY |
43 | **/\r |
44 | BOOLEAN\r | |
45 | IsDefaultType (\r | |
46 | IN EFI_MEMORY_CACHE_TYPE MemoryCacheType\r | |
47 | );\r | |
48 | \r | |
49 | /**\r | |
50 | Return MTRR alignment requirement for base address and size.\r | |
51 | \r | |
52 | @param[in] BaseAddress Base address.\r | |
53 | @param[in] Size Size.\r | |
54 | \r | |
55 | @retval Zero Alligned.\r | |
56 | @retval Non-Zero Not alligned.\r | |
57 | \r | |
58 | **/\r | |
59 | UINT32\r | |
60 | CheckMtrrAlignment (\r | |
61 | IN UINT64 BaseAddress,\r | |
62 | IN UINT64 Size\r | |
63 | );\r | |
64 | \r | |
65 | typedef struct {\r | |
66 | UINT32 Msr;\r | |
67 | UINT32 BaseAddress;\r | |
68 | UINT32 Length;\r | |
69 | } EFI_FIXED_MTRR;\r | |
70 | \r | |
71 | EFI_FIXED_MTRR mFixedMtrrTable[] = {\r | |
72 | { EFI_MSR_IA32_MTRR_FIX64K_00000, 0, 0x10000},\r | |
73 | { EFI_MSR_IA32_MTRR_FIX16K_80000, 0x80000, 0x4000},\r | |
74 | { EFI_MSR_IA32_MTRR_FIX16K_A0000, 0xA0000, 0x4000},\r | |
75 | { EFI_MSR_IA32_MTRR_FIX4K_C0000, 0xC0000, 0x1000},\r | |
76 | { EFI_MSR_IA32_MTRR_FIX4K_C8000, 0xC8000, 0x1000},\r | |
77 | { EFI_MSR_IA32_MTRR_FIX4K_D0000, 0xD0000, 0x1000},\r | |
78 | { EFI_MSR_IA32_MTRR_FIX4K_D8000, 0xD8000, 0x1000},\r | |
79 | { EFI_MSR_IA32_MTRR_FIX4K_E0000, 0xE0000, 0x1000},\r | |
80 | { EFI_MSR_IA32_MTRR_FIX4K_E8000, 0xE8000, 0x1000},\r | |
81 | { EFI_MSR_IA32_MTRR_FIX4K_F0000, 0xF0000, 0x1000},\r | |
82 | { EFI_MSR_IA32_MTRR_FIX4K_F8000, 0xF8000, 0x1000}\r | |
83 | };\r | |
84 | \r | |
85 | /**\r | |
86 | Given the input, check if the number of MTRR is lesser.\r | |
87 | if positive or subtractive.\r | |
88 | \r | |
89 | @param[in] Input Length of Memory to program MTRR.\r | |
90 | \r | |
91 | @retval Zero do positive.\r | |
92 | @retval Non-Zero do subtractive.\r | |
93 | \r | |
94 | **/\r | |
95 | INT8\r | |
96 | CheckDirection (\r | |
97 | IN UINT64 Input\r | |
98 | )\r | |
99 | {\r | |
100 | return 0;\r | |
101 | }\r | |
102 | \r | |
103 | /**\r | |
104 | Disable cache and its mtrr.\r | |
105 | \r | |
106 | @param[out] OldMtrr To return the Old MTRR value\r | |
107 | \r | |
108 | **/\r | |
109 | VOID\r | |
110 | EfiDisableCacheMtrr (\r | |
111 | OUT UINT64 *OldMtrr\r | |
112 | )\r | |
113 | {\r | |
114 | UINT64 TempQword;\r | |
115 | \r | |
116 | //\r | |
117 | // Disable Cache MTRR\r | |
118 | //\r | |
119 | *OldMtrr = AsmReadMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE);\r | |
120 | TempQword = (*OldMtrr) & ~B_EFI_MSR_GLOBAL_MTRR_ENABLE & ~B_EFI_MSR_FIXED_MTRR_ENABLE;\r | |
121 | AsmWriteMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE, TempQword);\r | |
122 | AsmDisableCache ();\r | |
123 | }\r | |
124 | \r | |
125 | /**\r | |
126 | Recover cache MTRR.\r | |
127 | \r | |
128 | @param[in] EnableMtrr Whether to enable the MTRR\r | |
129 | @param[in] OldMtrr The saved old MTRR value to restore when not to enable the MTRR\r | |
130 | \r | |
131 | **/\r | |
132 | VOID\r | |
133 | EfiRecoverCacheMtrr (\r | |
134 | IN BOOLEAN EnableMtrr,\r | |
135 | IN UINT64 OldMtrr\r | |
136 | )\r | |
137 | {\r | |
138 | UINT64 TempQword;\r | |
139 | \r | |
140 | //\r | |
141 | // Enable Cache MTRR\r | |
142 | //\r | |
143 | if (EnableMtrr) {\r | |
144 | TempQword = AsmReadMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE);\r | |
a81fcd30 | 145 | TempQword |= (UINT64)(B_EFI_MSR_GLOBAL_MTRR_ENABLE | B_EFI_MSR_FIXED_MTRR_ENABLE);\r |
c8ec22a2 JY |
146 | } else {\r |
147 | TempQword = OldMtrr;\r | |
148 | }\r | |
149 | \r | |
150 | AsmWriteMsr64 (EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE, TempQword);\r | |
151 | \r | |
152 | AsmEnableCache ();\r | |
153 | }\r | |
154 | \r | |
155 | /**\r | |
156 | Programming MTRR according to Memory address, length, and type.\r | |
157 | \r | |
158 | @param[in] MtrrNumber the variable MTRR index number\r | |
159 | @param[in] MemoryAddress the address of target memory\r | |
160 | @param[in] MemoryLength the length of target memory\r | |
161 | @param[in] MemoryCacheType the cache type of target memory\r | |
162 | @param[in] ValidMtrrAddressMask the MTRR address mask\r | |
163 | \r | |
164 | **/\r | |
165 | VOID\r | |
166 | EfiProgramMtrr (\r | |
167 | IN UINTN MtrrNumber,\r | |
168 | IN EFI_PHYSICAL_ADDRESS MemoryAddress,\r | |
169 | IN UINT64 MemoryLength,\r | |
170 | IN EFI_MEMORY_CACHE_TYPE MemoryCacheType,\r | |
171 | IN UINT64 ValidMtrrAddressMask\r | |
172 | )\r | |
173 | {\r | |
174 | UINT64 TempQword;\r | |
175 | UINT64 OldMtrr;\r | |
176 | \r | |
177 | if (MemoryLength == 0) {\r | |
178 | return;\r | |
179 | }\r | |
180 | \r | |
181 | EfiDisableCacheMtrr (&OldMtrr);\r | |
182 | \r | |
183 | //\r | |
184 | // MTRR Physical Base\r | |
185 | //\r | |
186 | TempQword = (MemoryAddress & ValidMtrrAddressMask) | MemoryCacheType;\r | |
187 | AsmWriteMsr64 (MtrrNumber, TempQword);\r | |
188 | \r | |
189 | //\r | |
190 | // MTRR Physical Mask\r | |
191 | //\r | |
192 | TempQword = ~(MemoryLength - 1);\r | |
193 | AsmWriteMsr64 (MtrrNumber + 1, (TempQword & ValidMtrrAddressMask) | B_EFI_MSR_CACHE_MTRR_VALID);\r | |
194 | \r | |
195 | EfiRecoverCacheMtrr (TRUE, OldMtrr);\r | |
196 | }\r | |
197 | \r | |
198 | /**\r | |
199 | Calculate the maximum value which is a power of 2, but less the MemoryLength.\r | |
200 | \r | |
201 | @param[in] MemoryAddress Memory address.\r | |
202 | @param[in] MemoryLength The number to pass in.\r | |
203 | \r | |
204 | @return The maximum value which is align to power of 2 and less the MemoryLength\r | |
205 | \r | |
206 | **/\r | |
207 | UINT64\r | |
208 | Power2MaxMemory (\r | |
209 | IN UINT64 MemoryAddress,\r | |
210 | IN UINT64 MemoryLength\r | |
211 | )\r | |
212 | {\r | |
213 | UINT64 Result;\r | |
214 | \r | |
215 | if (MemoryLength == 0) {\r | |
216 | return EFI_INVALID_PARAMETER;\r | |
217 | }\r | |
218 | \r | |
219 | //\r | |
220 | // Compute inital power of 2 size to return\r | |
221 | //\r | |
c8c48cbb | 222 | Result = GetPowerOfTwo64(MemoryLength);\r |
c8ec22a2 JY |
223 | \r |
224 | //\r | |
225 | // Special case base of 0 as all ranges are valid\r | |
226 | //\r | |
227 | if (MemoryAddress == 0) {\r | |
228 | return Result;\r | |
229 | }\r | |
230 | \r | |
231 | //\r | |
232 | // Loop till a value that can be mapped to this base address is found\r | |
233 | //\r | |
234 | while (CheckMtrrAlignment (MemoryAddress, Result) != 0) {\r | |
235 | //\r | |
236 | // Need to try the next smaller power of 2\r | |
237 | //\r | |
238 | Result = RShiftU64 (Result, 1);\r | |
239 | }\r | |
240 | \r | |
241 | return Result;\r | |
242 | }\r | |
243 | \r | |
244 | /**\r | |
245 | Return MTRR alignment requirement for base address and size.\r | |
246 | \r | |
247 | @param[in] BaseAddress Base address.\r | |
248 | @param[in] Size Size.\r | |
249 | \r | |
250 | @retval Zero Alligned.\r | |
251 | @retval Non-Zero Not alligned.\r | |
252 | \r | |
253 | **/\r | |
254 | UINT32\r | |
255 | CheckMtrrAlignment (\r | |
256 | IN UINT64 BaseAddress,\r | |
257 | IN UINT64 Size\r | |
258 | )\r | |
259 | {\r | |
260 | UINT32 ShiftedBase;\r | |
261 | UINT32 ShiftedSize;\r | |
262 | \r | |
263 | //\r | |
264 | // Shift base and size right 12 bits to allow for larger memory sizes. The\r | |
265 | // MTRRs do not use the first 12 bits so this is safe for now. Only supports\r | |
266 | // up to 52 bits of physical address space.\r | |
267 | //\r | |
268 | ShiftedBase = (UINT32) RShiftU64 (BaseAddress, 12);\r | |
269 | ShiftedSize = (UINT32) RShiftU64 (Size, 12);\r | |
270 | \r | |
271 | //\r | |
272 | // Return the results to the caller of the MOD\r | |
273 | //\r | |
274 | return ShiftedBase % ShiftedSize;\r | |
275 | }\r | |
276 | \r | |
c8ec22a2 JY |
277 | /**\r |
278 | Programs fixed MTRRs registers.\r | |
279 | \r | |
280 | @param[in] MemoryCacheType The memory type to set.\r | |
281 | @param[in] Base The base address of memory range.\r | |
282 | @param[in] Length The length of memory range.\r | |
283 | \r | |
284 | @retval RETURN_SUCCESS The cache type was updated successfully\r | |
285 | @retval RETURN_UNSUPPORTED The requested range or cache type was invalid\r | |
286 | for the fixed MTRRs.\r | |
287 | \r | |
288 | **/\r | |
289 | EFI_STATUS\r | |
290 | ProgramFixedMtrr (\r | |
291 | IN EFI_MEMORY_CACHE_TYPE MemoryCacheType,\r | |
292 | IN UINT64 *Base,\r | |
293 | IN UINT64 *Len\r | |
294 | )\r | |
295 | {\r | |
296 | UINT32 MsrNum;\r | |
297 | UINT32 ByteShift;\r | |
298 | UINT64 TempQword;\r | |
299 | UINT64 OrMask;\r | |
300 | UINT64 ClearMask;\r | |
301 | \r | |
302 | TempQword = 0;\r | |
303 | OrMask = 0;\r | |
304 | ClearMask = 0;\r | |
305 | \r | |
306 | for (MsrNum = 0; MsrNum < V_EFI_FIXED_MTRR_NUMBER; MsrNum++) {\r | |
307 | if ((*Base >= mFixedMtrrTable[MsrNum].BaseAddress) &&\r | |
308 | (*Base < (mFixedMtrrTable[MsrNum].BaseAddress + 8 * mFixedMtrrTable[MsrNum].Length))) {\r | |
309 | break;\r | |
310 | }\r | |
311 | }\r | |
312 | if (MsrNum == V_EFI_FIXED_MTRR_NUMBER ) {\r | |
313 | return EFI_DEVICE_ERROR;\r | |
314 | }\r | |
315 | //\r | |
316 | // We found the fixed MTRR to be programmed\r | |
317 | //\r | |
318 | for (ByteShift=0; ByteShift < 8; ByteShift++) {\r | |
319 | if ( *Base == (mFixedMtrrTable[MsrNum].BaseAddress + ByteShift * mFixedMtrrTable[MsrNum].Length)) {\r | |
320 | break;\r | |
321 | }\r | |
322 | }\r | |
323 | if (ByteShift == 8 ) {\r | |
324 | return EFI_DEVICE_ERROR;\r | |
325 | }\r | |
326 | for (; ((ByteShift<8) && (*Len >= mFixedMtrrTable[MsrNum].Length));ByteShift++) {\r | |
327 | OrMask |= LShiftU64((UINT64) MemoryCacheType, (UINT32) (ByteShift* 8));\r | |
328 | ClearMask |= LShiftU64((UINT64) 0xFF, (UINT32) (ByteShift * 8));\r | |
329 | *Len -= mFixedMtrrTable[MsrNum].Length;\r | |
330 | *Base += mFixedMtrrTable[MsrNum].Length;\r | |
331 | }\r | |
90be2221 | 332 | TempQword = (AsmReadMsr64 (mFixedMtrrTable[MsrNum].Msr) & (~ClearMask)) | OrMask;\r |
c8ec22a2 JY |
333 | AsmWriteMsr64 (mFixedMtrrTable[MsrNum].Msr, TempQword);\r |
334 | \r | |
335 | return EFI_SUCCESS;\r | |
336 | }\r | |
337 | \r | |
338 | /**\r | |
339 | Check if there is a valid variable MTRR that overlaps the given range.\r | |
340 | \r | |
341 | @param[in] Start Base Address of the range to check.\r | |
342 | @param[in] End End address of the range to check.\r | |
343 | \r | |
344 | @retval TRUE Mtrr overlap.\r | |
345 | @retval FALSE Mtrr not overlap.\r | |
346 | **/\r | |
347 | BOOLEAN\r | |
348 | CheckMtrrOverlap (\r | |
349 | IN EFI_PHYSICAL_ADDRESS Start,\r | |
350 | IN EFI_PHYSICAL_ADDRESS End\r | |
351 | )\r | |
352 | {\r | |
353 | return FALSE;\r | |
354 | }\r | |
355 | \r | |
356 | /**\r | |
357 | Given the memory range and cache type, programs the MTRRs.\r | |
358 | \r | |
359 | @param[in] MemoryAddress Base Address of Memory to program MTRR.\r | |
360 | @param[in] MemoryLength Length of Memory to program MTRR.\r | |
361 | @param[in] MemoryCacheType Cache Type.\r | |
362 | \r | |
363 | @retval EFI_SUCCESS Mtrr are set successfully.\r | |
364 | @retval EFI_LOAD_ERROR No empty MTRRs to use.\r | |
365 | @retval EFI_INVALID_PARAMETER The input parameter is not valid.\r | |
366 | @retval others An error occurs when setting MTTR.\r | |
367 | \r | |
368 | **/\r | |
369 | EFI_STATUS\r | |
370 | EFIAPI\r | |
371 | SetCacheAttributes (\r | |
372 | IN EFI_PHYSICAL_ADDRESS MemoryAddress,\r | |
373 | IN UINT64 MemoryLength,\r | |
374 | IN EFI_MEMORY_CACHE_TYPE MemoryCacheType\r | |
375 | )\r | |
376 | {\r | |
377 | EFI_STATUS Status;\r | |
378 | UINT32 MsrNum, MsrNumEnd;\r | |
379 | UINT64 TempQword;\r | |
380 | UINT32 LastVariableMtrrForBios;\r | |
381 | UINT64 OldMtrr;\r | |
382 | UINT32 UsedMsrNum;\r | |
383 | EFI_MEMORY_CACHE_TYPE UsedMemoryCacheType;\r | |
384 | UINT64 ValidMtrrAddressMask;\r | |
385 | UINT32 Cpuid_RegEax;\r | |
386 | \r | |
387 | AsmCpuid (CPUID_EXTENDED_FUNCTION, &Cpuid_RegEax, NULL, NULL, NULL);\r | |
388 | if (Cpuid_RegEax >= CPUID_VIR_PHY_ADDRESS_SIZE) {\r | |
389 | AsmCpuid (CPUID_VIR_PHY_ADDRESS_SIZE, &Cpuid_RegEax, NULL, NULL, NULL);\r | |
390 | ValidMtrrAddressMask = (LShiftU64((UINT64) 1, (Cpuid_RegEax & 0xFF)) - 1) & (~(UINT64)0x0FFF);\r | |
391 | } else {\r | |
392 | ValidMtrrAddressMask = (LShiftU64((UINT64) 1, 36) - 1) & (~(UINT64)0x0FFF);\r | |
393 | }\r | |
394 | \r | |
395 | //\r | |
396 | // Check for invalid parameter\r | |
397 | //\r | |
398 | if ((MemoryAddress & ~ValidMtrrAddressMask) != 0 || (MemoryLength & ~ValidMtrrAddressMask) != 0) {\r | |
399 | return EFI_INVALID_PARAMETER;\r | |
400 | }\r | |
401 | \r | |
402 | if (MemoryLength == 0) {\r | |
403 | return EFI_INVALID_PARAMETER;\r | |
404 | }\r | |
405 | \r | |
406 | switch (MemoryCacheType) {\r | |
407 | case EFI_CACHE_UNCACHEABLE:\r | |
408 | case EFI_CACHE_WRITECOMBINING:\r | |
409 | case EFI_CACHE_WRITETHROUGH:\r | |
410 | case EFI_CACHE_WRITEPROTECTED:\r | |
411 | case EFI_CACHE_WRITEBACK:\r | |
412 | break;\r | |
413 | \r | |
414 | default:\r | |
415 | return EFI_INVALID_PARAMETER;\r | |
416 | }\r | |
417 | \r | |
418 | //\r | |
419 | // Check if Fixed MTRR\r | |
420 | //\r | |
421 | if ((MemoryAddress + MemoryLength) <= (1 << 20)) {\r | |
422 | Status = EFI_SUCCESS;\r | |
423 | EfiDisableCacheMtrr (&OldMtrr);\r | |
424 | while ((MemoryLength > 0) && (Status == EFI_SUCCESS)) {\r | |
425 | Status = ProgramFixedMtrr (MemoryCacheType, &MemoryAddress, &MemoryLength);\r | |
426 | }\r | |
427 | EfiRecoverCacheMtrr (TRUE, OldMtrr);\r | |
428 | return Status;\r | |
429 | }\r | |
430 | \r | |
431 | //\r | |
432 | // Search if the range attribute has been set before\r | |
433 | //\r | |
434 | Status = SearchForExactMtrr(\r | |
435 | MemoryAddress,\r | |
436 | MemoryLength,\r | |
437 | ValidMtrrAddressMask,\r | |
438 | &UsedMsrNum,\r | |
439 | &UsedMemoryCacheType\r | |
440 | );\r | |
441 | \r | |
442 | if (!EFI_ERROR(Status)) {\r | |
443 | //\r | |
444 | // Compare if it has the same type as current setting\r | |
445 | //\r | |
446 | if (UsedMemoryCacheType == MemoryCacheType) {\r | |
447 | return EFI_SUCCESS;\r | |
448 | } else {\r | |
449 | //\r | |
450 | // Different type\r | |
451 | //\r | |
452 | \r | |
453 | //\r | |
454 | // Check if the set type is the same as Default Type\r | |
455 | //\r | |
456 | if (IsDefaultType(MemoryCacheType)) {\r | |
457 | //\r | |
458 | // Clear the MTRR\r | |
459 | //\r | |
460 | AsmWriteMsr64(UsedMsrNum, 0);\r | |
461 | AsmWriteMsr64(UsedMsrNum + 1, 0);\r | |
462 | \r | |
463 | return EFI_SUCCESS;\r | |
464 | } else {\r | |
465 | //\r | |
466 | // Modify the MTRR type\r | |
467 | //\r | |
468 | EfiProgramMtrr(UsedMsrNum,\r | |
469 | MemoryAddress,\r | |
470 | MemoryLength,\r | |
471 | MemoryCacheType,\r | |
472 | ValidMtrrAddressMask\r | |
473 | );\r | |
474 | return EFI_SUCCESS;\r | |
475 | }\r | |
476 | }\r | |
477 | }\r | |
478 | \r | |
479 | #if 0\r | |
480 | //\r | |
481 | // @bug - Need to create memory map so that when checking for overlap we\r | |
482 | // can determine if an overlap exists based on all caching requests.\r | |
483 | //\r | |
484 | // Don't waste a variable MTRR if the caching attrib is same as default in MTRR_DEF_TYPE\r | |
485 | //\r | |
486 | if (MemoryCacheType == (AsmReadMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE) & B_EFI_MSR_CACHE_MEMORY_TYPE)) {\r | |
487 | if (!CheckMtrrOverlap (MemoryAddress, MemoryAddress+MemoryLength-1)) {\r | |
488 | return EFI_SUCCESS;\r | |
489 | }\r | |
490 | }\r | |
491 | #endif\r | |
492 | \r | |
493 | //\r | |
494 | // Find first unused MTRR\r | |
495 | //\r | |
496 | MsrNumEnd = EFI_MSR_CACHE_VARIABLE_MTRR_BASE + (2 * (UINT32)(AsmReadMsr64(EFI_MSR_IA32_MTRR_CAP) & B_EFI_MSR_IA32_MTRR_CAP_VARIABLE_SUPPORT));\r | |
497 | for (MsrNum = EFI_MSR_CACHE_VARIABLE_MTRR_BASE; MsrNum < MsrNumEnd; MsrNum +=2) {\r | |
498 | if ((AsmReadMsr64(MsrNum+1) & B_EFI_MSR_CACHE_MTRR_VALID) == 0 ) {\r | |
499 | break;\r | |
500 | }\r | |
501 | }\r | |
502 | \r | |
503 | //\r | |
504 | // Reserve 1 MTRR pair for OS.\r | |
505 | //\r | |
506 | LastVariableMtrrForBios = MsrNumEnd - 1 - (EFI_CACHE_NUM_VAR_MTRR_PAIRS_FOR_OS * 2);\r | |
507 | if (MsrNum > LastVariableMtrrForBios) {\r | |
508 | return EFI_LOAD_ERROR;\r | |
509 | }\r | |
510 | \r | |
511 | //\r | |
512 | // Special case for 1 MB base address\r | |
513 | //\r | |
514 | if (MemoryAddress == BASE_1MB) {\r | |
515 | MemoryAddress = 0;\r | |
516 | }\r | |
517 | \r | |
518 | //\r | |
519 | // Program MTRRs\r | |
520 | //\r | |
521 | TempQword = MemoryLength;\r | |
522 | \r | |
523 | if (TempQword == Power2MaxMemory(MemoryAddress, TempQword)) {\r | |
524 | EfiProgramMtrr(MsrNum,\r | |
525 | MemoryAddress,\r | |
526 | MemoryLength,\r | |
527 | MemoryCacheType,\r | |
528 | ValidMtrrAddressMask\r | |
529 | );\r | |
530 | \r | |
531 | } else {\r | |
532 | //\r | |
533 | // Fill in MTRRs with values. Direction can not be checked for this method\r | |
534 | // as we are using WB as the default cache type and only setting areas to UC.\r | |
535 | //\r | |
536 | do {\r | |
537 | //\r | |
538 | // Do boundary check so we don't go past last MTRR register\r | |
539 | // for BIOS use. Leave one MTRR pair for OS use.\r | |
540 | //\r | |
541 | if (MsrNum > LastVariableMtrrForBios) {\r | |
542 | return EFI_LOAD_ERROR;\r | |
543 | }\r | |
544 | \r | |
545 | //\r | |
546 | // Set next power of 2 region\r | |
547 | //\r | |
548 | MemoryLength = Power2MaxMemory(MemoryAddress, TempQword);\r | |
549 | EfiProgramMtrr(MsrNum,\r | |
550 | MemoryAddress,\r | |
551 | MemoryLength,\r | |
552 | MemoryCacheType,\r | |
553 | ValidMtrrAddressMask\r | |
554 | );\r | |
555 | MemoryAddress += MemoryLength;\r | |
556 | TempQword -= MemoryLength;\r | |
557 | MsrNum += 2;\r | |
558 | } while (TempQword != 0);\r | |
559 | }\r | |
560 | \r | |
561 | return EFI_SUCCESS;\r | |
562 | }\r | |
563 | \r | |
564 | /**\r | |
565 | Reset all the MTRRs to a known state.\r | |
566 | \r | |
567 | @retval EFI_SUCCESS All MTRRs have been reset successfully.\r | |
568 | \r | |
569 | **/\r | |
570 | EFI_STATUS\r | |
571 | EFIAPI\r | |
572 | ResetCacheAttributes (\r | |
573 | VOID\r | |
574 | )\r | |
575 | {\r | |
576 | UINT32 MsrNum, MsrNumEnd;\r | |
577 | UINT16 Index;\r | |
578 | UINT64 OldMtrr;\r | |
579 | UINT64 CacheType;\r | |
580 | BOOLEAN DisableCar;\r | |
581 | Index = 0;\r | |
582 | DisableCar = TRUE;\r | |
583 | \r | |
584 | //\r | |
585 | // Determine default cache type\r | |
586 | //\r | |
587 | CacheType = EFI_CACHE_UNCACHEABLE;\r | |
588 | \r | |
589 | //\r | |
590 | // Set default cache type\r | |
591 | //\r | |
592 | AsmWriteMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE, CacheType);\r | |
593 | \r | |
594 | //\r | |
595 | // Disable CAR\r | |
596 | //\r | |
597 | DisableCacheAsRam (DisableCar);\r | |
598 | \r | |
599 | EfiDisableCacheMtrr (&OldMtrr);\r | |
600 | \r | |
601 | //\r | |
602 | // Reset Fixed MTRRs\r | |
603 | //\r | |
604 | for (Index = 0; Index < V_EFI_FIXED_MTRR_NUMBER; Index++) {\r | |
605 | AsmWriteMsr64 (mFixedMtrrTable[Index].Msr, 0);\r | |
606 | }\r | |
607 | \r | |
608 | //\r | |
609 | // Reset Variable MTRRs\r | |
610 | //\r | |
611 | MsrNumEnd = EFI_MSR_CACHE_VARIABLE_MTRR_BASE + (2 * (UINT32)(AsmReadMsr64(EFI_MSR_IA32_MTRR_CAP) & B_EFI_MSR_IA32_MTRR_CAP_VARIABLE_SUPPORT));\r | |
612 | for (MsrNum = EFI_MSR_CACHE_VARIABLE_MTRR_BASE; MsrNum < MsrNumEnd; MsrNum++) {\r | |
613 | AsmWriteMsr64 (MsrNum, 0);\r | |
614 | }\r | |
615 | \r | |
616 | //\r | |
617 | // Enable Fixed and Variable MTRRs\r | |
618 | //\r | |
619 | EfiRecoverCacheMtrr (TRUE, OldMtrr);\r | |
620 | \r | |
621 | return EFI_SUCCESS;\r | |
622 | }\r | |
623 | \r | |
624 | /**\r | |
625 | Search the memory cache type for specific memory from MTRR.\r | |
626 | \r | |
627 | @param[in] MemoryAddress the address of target memory\r | |
628 | @param[in] MemoryLength the length of target memory\r | |
629 | @param[in] ValidMtrrAddressMask the MTRR address mask\r | |
630 | @param[out] UsedMsrNum the used MSR number\r | |
631 | @param[out] UsedMemoryCacheType the cache type for the target memory\r | |
632 | \r | |
633 | @retval EFI_SUCCESS The memory is found in MTRR and cache type is returned\r | |
634 | @retval EFI_NOT_FOUND The memory is not found in MTRR\r | |
635 | \r | |
636 | **/\r | |
637 | EFI_STATUS\r | |
638 | SearchForExactMtrr (\r | |
639 | IN EFI_PHYSICAL_ADDRESS MemoryAddress,\r | |
640 | IN UINT64 MemoryLength,\r | |
641 | IN UINT64 ValidMtrrAddressMask,\r | |
642 | OUT UINT32 *UsedMsrNum,\r | |
643 | OUT EFI_MEMORY_CACHE_TYPE *UsedMemoryCacheType\r | |
644 | )\r | |
645 | {\r | |
646 | UINT32 MsrNum, MsrNumEnd;\r | |
647 | UINT64 TempQword;\r | |
648 | \r | |
649 | if (MemoryLength == 0) {\r | |
650 | return EFI_INVALID_PARAMETER;\r | |
651 | }\r | |
652 | \r | |
653 | MsrNumEnd = EFI_MSR_CACHE_VARIABLE_MTRR_BASE + (2 * (UINT32)(AsmReadMsr64(EFI_MSR_IA32_MTRR_CAP) & B_EFI_MSR_IA32_MTRR_CAP_VARIABLE_SUPPORT));\r | |
654 | for (MsrNum = EFI_MSR_CACHE_VARIABLE_MTRR_BASE; MsrNum < MsrNumEnd; MsrNum +=2) {\r | |
655 | TempQword = AsmReadMsr64(MsrNum+1);\r | |
656 | if ((TempQword & B_EFI_MSR_CACHE_MTRR_VALID) == 0) {\r | |
657 | continue;\r | |
658 | }\r | |
659 | \r | |
660 | if ((TempQword & ValidMtrrAddressMask) != ((~(MemoryLength - 1)) & ValidMtrrAddressMask)) {\r | |
661 | continue;\r | |
662 | }\r | |
663 | \r | |
664 | TempQword = AsmReadMsr64 (MsrNum);\r | |
665 | if ((TempQword & ValidMtrrAddressMask) != (MemoryAddress & ValidMtrrAddressMask)) {\r | |
666 | continue;\r | |
667 | }\r | |
668 | \r | |
669 | *UsedMemoryCacheType = (EFI_MEMORY_CACHE_TYPE)(TempQword & B_EFI_MSR_CACHE_MEMORY_TYPE);\r | |
670 | *UsedMsrNum = MsrNum;\r | |
671 | \r | |
672 | return EFI_SUCCESS;\r | |
673 | }\r | |
674 | \r | |
675 | return EFI_NOT_FOUND;\r | |
676 | }\r | |
677 | \r | |
678 | /**\r | |
679 | Check if CacheType match current default setting.\r | |
680 | \r | |
681 | @param[in] MemoryCacheType input cache type to be checked.\r | |
682 | \r | |
683 | @retval TRUE MemoryCacheType is default MTRR setting.\r | |
684 | @retval TRUE MemoryCacheType is NOT default MTRR setting.\r | |
685 | **/\r | |
686 | BOOLEAN\r | |
687 | IsDefaultType (\r | |
688 | IN EFI_MEMORY_CACHE_TYPE MemoryCacheType\r | |
689 | )\r | |
690 | {\r | |
691 | if ((AsmReadMsr64(EFI_MSR_CACHE_IA32_MTRR_DEF_TYPE) & B_EFI_MSR_CACHE_MEMORY_TYPE) != MemoryCacheType) {\r | |
692 | return FALSE;\r | |
693 | }\r | |
694 | \r | |
695 | return TRUE;\r | |
696 | }\r | |
697 | \r |