]> git.proxmox.com Git - mirror_edk2.git/blob - ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
ArmPkg/ArmLib: Added ArmReadMidr()
[mirror_edk2.git] / ArmPkg / Library / ArmLib / AArch64 / AArch64Support.S
1 #------------------------------------------------------------------------------
2 #
3 # Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
4 # Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
5 #
6 # This program and the accompanying materials
7 # are licensed and made available under the terms and conditions of the BSD License
8 # which accompanies this distribution. The full text of the license may be found at
9 # http://opensource.org/licenses/bsd-license.php
10 #
11 # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
13 #
14 #------------------------------------------------------------------------------
15
16 #include <Chipset/AArch64.h>
17 #include <AsmMacroIoLibV8.h>
18
19 .text
20 .align 3
21
22 GCC_ASM_EXPORT (ArmInvalidateInstructionCache)
23 GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)
24 GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)
25 GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)
26 GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)
27 GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)
28 GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)
29 GCC_ASM_EXPORT (ArmDrainWriteBuffer)
30 GCC_ASM_EXPORT (ArmEnableMmu)
31 GCC_ASM_EXPORT (ArmDisableMmu)
32 GCC_ASM_EXPORT (ArmDisableCachesAndMmu)
33 GCC_ASM_EXPORT (ArmMmuEnabled)
34 GCC_ASM_EXPORT (ArmEnableDataCache)
35 GCC_ASM_EXPORT (ArmDisableDataCache)
36 GCC_ASM_EXPORT (ArmEnableInstructionCache)
37 GCC_ASM_EXPORT (ArmDisableInstructionCache)
38 GCC_ASM_EXPORT (ArmDisableAlignmentCheck)
39 GCC_ASM_EXPORT (ArmEnableAlignmentCheck)
40 GCC_ASM_EXPORT (ArmEnableBranchPrediction)
41 GCC_ASM_EXPORT (ArmDisableBranchPrediction)
42 GCC_ASM_EXPORT (AArch64AllDataCachesOperation)
43 GCC_ASM_EXPORT (AArch64PerformPoUDataCacheOperation)
44 GCC_ASM_EXPORT (ArmDataMemoryBarrier)
45 GCC_ASM_EXPORT (ArmDataSyncronizationBarrier)
46 GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier)
47 GCC_ASM_EXPORT (ArmWriteVBar)
48 GCC_ASM_EXPORT (ArmVFPImplemented)
49 GCC_ASM_EXPORT (ArmEnableVFP)
50 GCC_ASM_EXPORT (ArmCallWFI)
51 GCC_ASM_EXPORT (ArmInvalidateInstructionAndDataTlb)
52 GCC_ASM_EXPORT (ArmReadMpidr)
53 GCC_ASM_EXPORT (ArmReadMidr)
54 GCC_ASM_EXPORT (ArmReadTpidrurw)
55 GCC_ASM_EXPORT (ArmWriteTpidrurw)
56 GCC_ASM_EXPORT (ArmIsArchTimerImplemented)
57 GCC_ASM_EXPORT (ArmReadIdPfr0)
58 GCC_ASM_EXPORT (ArmReadIdPfr1)
59 GCC_ASM_EXPORT (ArmWriteHcr)
60 GCC_ASM_EXPORT (ArmReadCurrentEL)
61
62 .set CTRL_M_BIT, (1 << 0)
63 .set CTRL_A_BIT, (1 << 1)
64 .set CTRL_C_BIT, (1 << 2)
65 .set CTRL_I_BIT, (1 << 12)
66 .set CTRL_V_BIT, (1 << 12)
67 .set CPACR_VFP_BITS, (3 << 20)
68
69 ASM_PFX(ArmInvalidateDataCacheEntryByMVA):
70 dc ivac, x0 // Invalidate single data cache line
71 dsb sy
72 isb
73 ret
74
75
76 ASM_PFX(ArmCleanDataCacheEntryByMVA):
77 dc cvac, x0 // Clean single data cache line
78 dsb sy
79 isb
80 ret
81
82
83 ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):
84 dc civac, x0 // Clean and invalidate single data cache line
85 dsb sy
86 isb
87 ret
88
89
90 ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):
91 dc isw, x0 // Invalidate this line
92 dsb sy
93 isb
94 ret
95
96
97 ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):
98 dc cisw, x0 // Clean and Invalidate this line
99 dsb sy
100 isb
101 ret
102
103
104 ASM_PFX(ArmCleanDataCacheEntryBySetWay):
105 dc csw, x0 // Clean this line
106 dsb sy
107 isb
108 ret
109
110
111 ASM_PFX(ArmInvalidateInstructionCache):
112 ic iallu // Invalidate entire instruction cache
113 dsb sy
114 isb
115 ret
116
117
118 ASM_PFX(ArmEnableMmu):
119 EL1_OR_EL2_OR_EL3(x1)
120 1: mrs x0, sctlr_el1 // Read System control register EL1
121 b 4f
122 2: mrs x0, sctlr_el2 // Read System control register EL2
123 b 4f
124 3: mrs x0, sctlr_el3 // Read System control register EL3
125 4: orr x0, x0, #CTRL_M_BIT // Set MMU enable bit
126 EL1_OR_EL2_OR_EL3(x1)
127 1: tlbi vmalle1
128 isb
129 msr sctlr_el1, x0 // Write back
130 b 4f
131 2: tlbi alle2
132 isb
133 msr sctlr_el2, x0 // Write back
134 b 4f
135 3: tlbi alle3
136 isb
137 msr sctlr_el3, x0 // Write back
138 4: dsb sy
139 isb
140 ret
141
142
143 ASM_PFX(ArmDisableMmu):
144 EL1_OR_EL2_OR_EL3(x1)
145 1: mrs x0, sctlr_el1 // Read System Control Register EL1
146 b 4f
147 2: mrs x0, sctlr_el2 // Read System Control Register EL2
148 b 4f
149 3: mrs x0, sctlr_el3 // Read System Control Register EL3
150 4: bic x0, x0, #CTRL_M_BIT // Clear MMU enable bit
151 EL1_OR_EL2_OR_EL3(x1)
152 1: msr sctlr_el1, x0 // Write back
153 tlbi vmalle1
154 b 4f
155 2: msr sctlr_el2, x0 // Write back
156 tlbi alle2
157 b 4f
158 3: msr sctlr_el3, x0 // Write back
159 tlbi alle3
160 4: dsb sy
161 isb
162 ret
163
164
165 ASM_PFX(ArmDisableCachesAndMmu):
166 EL1_OR_EL2_OR_EL3(x1)
167 1: mrs x0, sctlr_el1 // Get control register EL1
168 b 4f
169 2: mrs x0, sctlr_el2 // Get control register EL2
170 b 4f
171 3: mrs x0, sctlr_el3 // Get control register EL3
172 4: bic x0, x0, #CTRL_M_BIT // Disable MMU
173 bic x0, x0, #CTRL_C_BIT // Disable D Cache
174 bic x0, x0, #CTRL_I_BIT // Disable I Cache
175 EL1_OR_EL2_OR_EL3(x1)
176 1: msr sctlr_el1, x0 // Write back control register
177 b 4f
178 2: msr sctlr_el2, x0 // Write back control register
179 b 4f
180 3: msr sctlr_el3, x0 // Write back control register
181 4: dsb sy
182 isb
183 ret
184
185
186 ASM_PFX(ArmMmuEnabled):
187 EL1_OR_EL2_OR_EL3(x1)
188 1: mrs x0, sctlr_el1 // Get control register EL1
189 b 4f
190 2: mrs x0, sctlr_el2 // Get control register EL2
191 b 4f
192 3: mrs x0, sctlr_el3 // Get control register EL3
193 4: and x0, x0, #CTRL_M_BIT
194 ret
195
196
197 ASM_PFX(ArmEnableDataCache):
198 EL1_OR_EL2_OR_EL3(x1)
199 1: mrs x0, sctlr_el1 // Get control register EL1
200 b 4f
201 2: mrs x0, sctlr_el2 // Get control register EL2
202 b 4f
203 3: mrs x0, sctlr_el3 // Get control register EL3
204 4: orr x0, x0, #CTRL_C_BIT // Set C bit
205 EL1_OR_EL2_OR_EL3(x1)
206 1: msr sctlr_el1, x0 // Write back control register
207 b 4f
208 2: msr sctlr_el2, x0 // Write back control register
209 b 4f
210 3: msr sctlr_el3, x0 // Write back control register
211 4: dsb sy
212 isb
213 ret
214
215
216 ASM_PFX(ArmDisableDataCache):
217 EL1_OR_EL2_OR_EL3(x1)
218 1: mrs x0, sctlr_el1 // Get control register EL1
219 b 4f
220 2: mrs x0, sctlr_el2 // Get control register EL2
221 b 4f
222 3: mrs x0, sctlr_el3 // Get control register EL3
223 4: bic x0, x0, #CTRL_C_BIT // Clear C bit
224 EL1_OR_EL2_OR_EL3(x1)
225 1: msr sctlr_el1, x0 // Write back control register
226 b 4f
227 2: msr sctlr_el2, x0 // Write back control register
228 b 4f
229 3: msr sctlr_el3, x0 // Write back control register
230 4: dsb sy
231 isb
232 ret
233
234
235 ASM_PFX(ArmEnableInstructionCache):
236 EL1_OR_EL2_OR_EL3(x1)
237 1: mrs x0, sctlr_el1 // Get control register EL1
238 b 4f
239 2: mrs x0, sctlr_el2 // Get control register EL2
240 b 4f
241 3: mrs x0, sctlr_el3 // Get control register EL3
242 4: orr x0, x0, #CTRL_I_BIT // Set I bit
243 EL1_OR_EL2_OR_EL3(x1)
244 1: msr sctlr_el1, x0 // Write back control register
245 b 4f
246 2: msr sctlr_el2, x0 // Write back control register
247 b 4f
248 3: msr sctlr_el3, x0 // Write back control register
249 4: dsb sy
250 isb
251 ret
252
253
254 ASM_PFX(ArmDisableInstructionCache):
255 EL1_OR_EL2_OR_EL3(x1)
256 1: mrs x0, sctlr_el1 // Get control register EL1
257 b 4f
258 2: mrs x0, sctlr_el2 // Get control register EL2
259 b 4f
260 3: mrs x0, sctlr_el3 // Get control register EL3
261 4: bic x0, x0, #CTRL_I_BIT // Clear I bit
262 EL1_OR_EL2_OR_EL3(x1)
263 1: msr sctlr_el1, x0 // Write back control register
264 b 4f
265 2: msr sctlr_el2, x0 // Write back control register
266 b 4f
267 3: msr sctlr_el3, x0 // Write back control register
268 4: dsb sy
269 isb
270 ret
271
272
273 ASM_PFX(ArmEnableAlignmentCheck):
274 EL1_OR_EL2(x1)
275 1: mrs x0, sctlr_el1 // Get control register EL1
276 b 3f
277 2: mrs x0, sctlr_el2 // Get control register EL2
278 3: orr x0, x0, #CTRL_A_BIT // Set A (alignment check) bit
279 EL1_OR_EL2(x1)
280 1: msr sctlr_el1, x0 // Write back control register
281 b 3f
282 2: msr sctlr_el2, x0 // Write back control register
283 3: dsb sy
284 isb
285 ret
286
287
288 ASM_PFX(ArmDisableAlignmentCheck):
289 EL1_OR_EL2_OR_EL3(x1)
290 1: mrs x0, sctlr_el1 // Get control register EL1
291 b 4f
292 2: mrs x0, sctlr_el2 // Get control register EL2
293 b 4f
294 3: mrs x0, sctlr_el3 // Get control register EL3
295 4: bic x0, x0, #CTRL_A_BIT // Clear A (alignment check) bit
296 EL1_OR_EL2_OR_EL3(x1)
297 1: msr sctlr_el1, x0 // Write back control register
298 b 4f
299 2: msr sctlr_el2, x0 // Write back control register
300 b 4f
301 3: msr sctlr_el3, x0 // Write back control register
302 4: dsb sy
303 isb
304 ret
305
306
307 // Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now
308 ASM_PFX(ArmEnableBranchPrediction):
309 ret
310
311
312 // Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now.
313 ASM_PFX(ArmDisableBranchPrediction):
314 ret
315
316
317 ASM_PFX(AArch64AllDataCachesOperation):
318 // We can use regs 0-7 and 9-15 without having to save/restore.
319 // Save our link register on the stack.
320 str x30, [sp, #-0x10]!
321 mov x1, x0 // Save Function call in x1
322 mrs x6, clidr_el1 // Read EL1 CLIDR
323 and x3, x6, #0x7000000 // Mask out all but Level of Coherency (LoC)
324 lsr x3, x3, #23 // Left align cache level value - the level is shifted by 1 to the
325 // right to ease the access to CSSELR and the Set/Way operation.
326 cbz x3, L_Finished // No need to clean if LoC is 0
327 mov x10, #0 // Start clean at cache level 0
328 b Loop1
329
330 ASM_PFX(AArch64PerformPoUDataCacheOperation):
331 // We can use regs 0-7 and 9-15 without having to save/restore.
332 // Save our link register on the stack.
333 str x30, [sp, #-0x10]!
334 mov x1, x0 // Save Function call in x1
335 mrs x6, clidr_el1 // Read EL1 CLIDR
336 and x3, x6, #0x38000000 // Mask out all but Point of Unification (PoU)
337 lsr x3, x3, #26 // Left align cache level value - the level is shifted by 1 to the
338 // right to ease the access to CSSELR and the Set/Way operation.
339 cbz x3, L_Finished // No need to clean if LoC is 0
340 mov x10, #0 // Start clean at cache level 0
341
342 Loop1:
343 add x2, x10, x10, lsr #1 // Work out 3x cachelevel for cache info
344 lsr x12, x6, x2 // bottom 3 bits are the Cache type for this level
345 and x12, x12, #7 // get those 3 bits alone
346 cmp x12, #2 // what cache at this level?
347 b.lt L_Skip // no cache or only instruction cache at this level
348 msr csselr_el1, x10 // write the Cache Size selection register with current level (CSSELR)
349 isb // isb to sync the change to the CacheSizeID reg
350 mrs x12, ccsidr_el1 // reads current Cache Size ID register (CCSIDR)
351 and x2, x12, #0x7 // extract the line length field
352 add x2, x2, #4 // add 4 for the line length offset (log2 16 bytes)
353 mov x4, #0x400
354 sub x4, x4, #1
355 and x4, x4, x12, lsr #3 // x4 is the max number on the way size (right aligned)
356 clz w5, w4 // w5 is the bit position of the way size increment
357 mov x7, #0x00008000
358 sub x7, x7, #1
359 and x7, x7, x12, lsr #13 // x7 is the max number of the index size (right aligned)
360
361 Loop2:
362 mov x9, x4 // x9 working copy of the max way size (right aligned)
363
364 Loop3:
365 lsl x11, x9, x5
366 orr x0, x10, x11 // factor in the way number and cache number
367 lsl x11, x7, x2
368 orr x0, x0, x11 // factor in the index number
369
370 blr x1 // Goto requested cache operation
371
372 subs x9, x9, #1 // decrement the way number
373 b.ge Loop3
374 subs x7, x7, #1 // decrement the index
375 b.ge Loop2
376 L_Skip:
377 add x10, x10, #2 // increment the cache number
378 cmp x3, x10
379 b.gt Loop1
380
381 L_Finished:
382 dsb sy
383 isb
384 ldr x30, [sp], #0x10
385 ret
386
387
388 ASM_PFX(ArmDataMemoryBarrier):
389 dmb sy
390 ret
391
392
393 ASM_PFX(ArmDataSyncronizationBarrier):
394 ASM_PFX(ArmDrainWriteBuffer):
395 dsb sy
396 ret
397
398
399 ASM_PFX(ArmInstructionSynchronizationBarrier):
400 isb
401 ret
402
403
404 ASM_PFX(ArmWriteVBar):
405 EL1_OR_EL2_OR_EL3(x1)
406 1: msr vbar_el1, x0 // Set the Address of the EL1 Vector Table in the VBAR register
407 b 4f
408 2: msr vbar_el2, x0 // Set the Address of the EL2 Vector Table in the VBAR register
409 b 4f
410 3: msr vbar_el3, x0 // Set the Address of the EL3 Vector Table in the VBAR register
411 4: isb
412 ret
413
414 ASM_PFX(ArmEnableVFP):
415 // Check whether floating-point is implemented in the processor.
416 mov x1, x30 // Save LR
417 bl ArmReadIdPfr0 // Read EL1 Processor Feature Register (PFR0)
418 mov x30, x1 // Restore LR
419 ands x0, x0, #AARCH64_PFR0_FP// Extract bits indicating VFP implementation
420 cmp x0, #0 // VFP is implemented if '0'.
421 b.ne 4f // Exit if VFP not implemented.
422 // FVP is implemented.
423 // Make sure VFP exceptions are not trapped (to any exception level).
424 mrs x0, cpacr_el1 // Read EL1 Coprocessor Access Control Register (CPACR)
425 orr x0, x0, #CPACR_VFP_BITS // Disable FVP traps to EL1
426 msr cpacr_el1, x0 // Write back EL1 Coprocessor Access Control Register (CPACR)
427 mov x1, #AARCH64_CPTR_TFP // TFP Bit for trapping VFP Exceptions
428 EL1_OR_EL2_OR_EL3(x2)
429 1:ret // Not configurable in EL1
430 2:mrs x0, cptr_el2 // Disable VFP traps to EL2
431 bic x0, x0, x1
432 msr cptr_el2, x0
433 ret
434 3:mrs x0, cptr_el3 // Disable VFP traps to EL3
435 bic x0, x0, x1
436 msr cptr_el3, x0
437 4:ret
438
439
440 ASM_PFX(ArmCallWFI):
441 wfi
442 ret
443
444
445 ASM_PFX(ArmInvalidateInstructionAndDataTlb):
446 EL1_OR_EL2_OR_EL3(x0)
447 1: tlbi vmalle1
448 b 4f
449 2: tlbi alle2
450 b 4f
451 3: tlbi alle3
452 4: dsb sy
453 isb
454 ret
455
456
457 ASM_PFX(ArmReadMpidr):
458 mrs x0, mpidr_el1 // read EL1 MPIDR
459 ret
460
461 ASM_PFX(ArmReadMidr):
462 mrs x0, midr_el1 // Read Main ID Register
463 ret
464
465 // Keep old function names for C compatibilty for now. Change later?
466 ASM_PFX(ArmReadTpidrurw):
467 mrs x0, tpidr_el0 // read tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
468 ret
469
470
471 // Keep old function names for C compatibilty for now. Change later?
472 ASM_PFX(ArmWriteTpidrurw):
473 msr tpidr_el0, x0 // write tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
474 ret
475
476
477 // Arch timers are mandatory on AArch64
478 ASM_PFX(ArmIsArchTimerImplemented):
479 mov x0, #1
480 ret
481
482
483 ASM_PFX(ArmReadIdPfr0):
484 mrs x0, id_aa64pfr0_el1 // Read ID_AA64PFR0 Register
485 ret
486
487
488 // Q: id_aa64pfr1_el1 not defined yet. What does this funtion want to access?
489 // A: used to setup arch timer. Check if we have security extensions, permissions to set stuff.
490 // See: ArmPkg/Library/ArmArchTimerLib/AArch64/ArmArchTimerLib.c
491 // Not defined yet, but stick in here for now, should read all zeros.
492 ASM_PFX(ArmReadIdPfr1):
493 mrs x0, id_aa64pfr1_el1 // Read ID_PFR1 Register
494 ret
495
496 // VOID ArmWriteHcr(UINTN Hcr)
497 ASM_PFX(ArmWriteHcr):
498 msr hcr_el2, x0 // Write the passed HCR value
499 ret
500
501 // UINTN ArmReadCurrentEL(VOID)
502 ASM_PFX(ArmReadCurrentEL):
503 mrs x0, CurrentEL
504 ret
505
506 dead:
507 b dead
508
509 ASM_FUNCTION_REMOVE_IF_UNREFERENCED