]> git.proxmox.com Git - mirror_edk2.git/blob - ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
f973a35c21d6a67a72fb0ce2451005c1a06b7a4a
[mirror_edk2.git] / ArmPkg / Library / ArmLib / AArch64 / AArch64Support.S
1 #------------------------------------------------------------------------------
2 #
3 # Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
4 # Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
5 #
6 # This program and the accompanying materials
7 # are licensed and made available under the terms and conditions of the BSD License
8 # which accompanies this distribution. The full text of the license may be found at
9 # http://opensource.org/licenses/bsd-license.php
10 #
11 # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
13 #
14 #------------------------------------------------------------------------------
15
16 #include <Chipset/AArch64.h>
17 #include <AsmMacroIoLibV8.h>
18
19 .text
20 .align 3
21
22 GCC_ASM_EXPORT (ArmInvalidateInstructionCache)
23 GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)
24 GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)
25 GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)
26 GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)
27 GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)
28 GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)
29 GCC_ASM_EXPORT (ArmDrainWriteBuffer)
30 GCC_ASM_EXPORT (ArmEnableMmu)
31 GCC_ASM_EXPORT (ArmDisableMmu)
32 GCC_ASM_EXPORT (ArmDisableCachesAndMmu)
33 GCC_ASM_EXPORT (ArmMmuEnabled)
34 GCC_ASM_EXPORT (ArmEnableDataCache)
35 GCC_ASM_EXPORT (ArmDisableDataCache)
36 GCC_ASM_EXPORT (ArmEnableInstructionCache)
37 GCC_ASM_EXPORT (ArmDisableInstructionCache)
38 GCC_ASM_EXPORT (ArmDisableAlignmentCheck)
39 GCC_ASM_EXPORT (ArmEnableAlignmentCheck)
40 GCC_ASM_EXPORT (ArmEnableBranchPrediction)
41 GCC_ASM_EXPORT (ArmDisableBranchPrediction)
42 GCC_ASM_EXPORT (AArch64AllDataCachesOperation)
43 GCC_ASM_EXPORT (ArmDataMemoryBarrier)
44 GCC_ASM_EXPORT (ArmDataSynchronizationBarrier)
45 GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier)
46 GCC_ASM_EXPORT (ArmWriteVBar)
47 GCC_ASM_EXPORT (ArmReadVBar)
48 GCC_ASM_EXPORT (ArmEnableVFP)
49 GCC_ASM_EXPORT (ArmCallWFI)
50 GCC_ASM_EXPORT (ArmReadMpidr)
51 GCC_ASM_EXPORT (ArmReadTpidrurw)
52 GCC_ASM_EXPORT (ArmWriteTpidrurw)
53 GCC_ASM_EXPORT (ArmIsArchTimerImplemented)
54 GCC_ASM_EXPORT (ArmReadIdPfr0)
55 GCC_ASM_EXPORT (ArmReadIdPfr1)
56 GCC_ASM_EXPORT (ArmWriteHcr)
57 GCC_ASM_EXPORT (ArmReadCurrentEL)
58
59 .set CTRL_M_BIT, (1 << 0)
60 .set CTRL_A_BIT, (1 << 1)
61 .set CTRL_C_BIT, (1 << 2)
62 .set CTRL_I_BIT, (1 << 12)
63 .set CTRL_V_BIT, (1 << 12)
64 .set CPACR_VFP_BITS, (3 << 20)
65
66 ASM_PFX(ArmInvalidateDataCacheEntryByMVA):
67 dc ivac, x0 // Invalidate single data cache line
68 dsb sy
69 isb
70 ret
71
72
73 ASM_PFX(ArmCleanDataCacheEntryByMVA):
74 dc cvac, x0 // Clean single data cache line
75 dsb sy
76 isb
77 ret
78
79
80 ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):
81 dc civac, x0 // Clean and invalidate single data cache line
82 dsb sy
83 isb
84 ret
85
86
87 ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):
88 dc isw, x0 // Invalidate this line
89 dsb sy
90 isb
91 ret
92
93
94 ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):
95 dc cisw, x0 // Clean and Invalidate this line
96 dsb sy
97 isb
98 ret
99
100
101 ASM_PFX(ArmCleanDataCacheEntryBySetWay):
102 dc csw, x0 // Clean this line
103 dsb sy
104 isb
105 ret
106
107
108 ASM_PFX(ArmInvalidateInstructionCache):
109 ic iallu // Invalidate entire instruction cache
110 dsb sy
111 isb
112 ret
113
114
115 ASM_PFX(ArmEnableMmu):
116 EL1_OR_EL2_OR_EL3(x1)
117 1: mrs x0, sctlr_el1 // Read System control register EL1
118 b 4f
119 2: mrs x0, sctlr_el2 // Read System control register EL2
120 b 4f
121 3: mrs x0, sctlr_el3 // Read System control register EL3
122 4: orr x0, x0, #CTRL_M_BIT // Set MMU enable bit
123 EL1_OR_EL2_OR_EL3(x1)
124 1: tlbi vmalle1
125 dsb nsh
126 isb
127 msr sctlr_el1, x0 // Write back
128 b 4f
129 2: tlbi alle2
130 dsb nsh
131 isb
132 msr sctlr_el2, x0 // Write back
133 b 4f
134 3: tlbi alle3
135 dsb nsh
136 isb
137 msr sctlr_el3, x0 // Write back
138 4: isb
139 ret
140
141
142 ASM_PFX(ArmDisableMmu):
143 EL1_OR_EL2_OR_EL3(x1)
144 1: mrs x0, sctlr_el1 // Read System Control Register EL1
145 b 4f
146 2: mrs x0, sctlr_el2 // Read System Control Register EL2
147 b 4f
148 3: mrs x0, sctlr_el3 // Read System Control Register EL3
149 4: and x0, x0, #~CTRL_M_BIT // Clear MMU enable bit
150 EL1_OR_EL2_OR_EL3(x1)
151 1: msr sctlr_el1, x0 // Write back
152 tlbi vmalle1
153 b 4f
154 2: msr sctlr_el2, x0 // Write back
155 tlbi alle2
156 b 4f
157 3: msr sctlr_el3, x0 // Write back
158 tlbi alle3
159 4: dsb sy
160 isb
161 ret
162
163
164 ASM_PFX(ArmDisableCachesAndMmu):
165 EL1_OR_EL2_OR_EL3(x1)
166 1: mrs x0, sctlr_el1 // Get control register EL1
167 b 4f
168 2: mrs x0, sctlr_el2 // Get control register EL2
169 b 4f
170 3: mrs x0, sctlr_el3 // Get control register EL3
171 4: mov x1, #~(CTRL_M_BIT | CTRL_C_BIT | CTRL_I_BIT) // Disable MMU, D & I caches
172 and x0, x0, x1
173 EL1_OR_EL2_OR_EL3(x1)
174 1: msr sctlr_el1, x0 // Write back control register
175 b 4f
176 2: msr sctlr_el2, x0 // Write back control register
177 b 4f
178 3: msr sctlr_el3, x0 // Write back control register
179 4: dsb sy
180 isb
181 ret
182
183
184 ASM_PFX(ArmMmuEnabled):
185 EL1_OR_EL2_OR_EL3(x1)
186 1: mrs x0, sctlr_el1 // Get control register EL1
187 b 4f
188 2: mrs x0, sctlr_el2 // Get control register EL2
189 b 4f
190 3: mrs x0, sctlr_el3 // Get control register EL3
191 4: and x0, x0, #CTRL_M_BIT
192 ret
193
194
195 ASM_PFX(ArmEnableDataCache):
196 EL1_OR_EL2_OR_EL3(x1)
197 1: mrs x0, sctlr_el1 // Get control register EL1
198 b 4f
199 2: mrs x0, sctlr_el2 // Get control register EL2
200 b 4f
201 3: mrs x0, sctlr_el3 // Get control register EL3
202 4: orr x0, x0, #CTRL_C_BIT // Set C bit
203 EL1_OR_EL2_OR_EL3(x1)
204 1: msr sctlr_el1, x0 // Write back control register
205 b 4f
206 2: msr sctlr_el2, x0 // Write back control register
207 b 4f
208 3: msr sctlr_el3, x0 // Write back control register
209 4: dsb sy
210 isb
211 ret
212
213
214 ASM_PFX(ArmDisableDataCache):
215 EL1_OR_EL2_OR_EL3(x1)
216 1: mrs x0, sctlr_el1 // Get control register EL1
217 b 4f
218 2: mrs x0, sctlr_el2 // Get control register EL2
219 b 4f
220 3: mrs x0, sctlr_el3 // Get control register EL3
221 4: and x0, x0, #~CTRL_C_BIT // Clear C bit
222 EL1_OR_EL2_OR_EL3(x1)
223 1: msr sctlr_el1, x0 // Write back control register
224 b 4f
225 2: msr sctlr_el2, x0 // Write back control register
226 b 4f
227 3: msr sctlr_el3, x0 // Write back control register
228 4: dsb sy
229 isb
230 ret
231
232
233 ASM_PFX(ArmEnableInstructionCache):
234 EL1_OR_EL2_OR_EL3(x1)
235 1: mrs x0, sctlr_el1 // Get control register EL1
236 b 4f
237 2: mrs x0, sctlr_el2 // Get control register EL2
238 b 4f
239 3: mrs x0, sctlr_el3 // Get control register EL3
240 4: orr x0, x0, #CTRL_I_BIT // Set I bit
241 EL1_OR_EL2_OR_EL3(x1)
242 1: msr sctlr_el1, x0 // Write back control register
243 b 4f
244 2: msr sctlr_el2, x0 // Write back control register
245 b 4f
246 3: msr sctlr_el3, x0 // Write back control register
247 4: dsb sy
248 isb
249 ret
250
251
252 ASM_PFX(ArmDisableInstructionCache):
253 EL1_OR_EL2_OR_EL3(x1)
254 1: mrs x0, sctlr_el1 // Get control register EL1
255 b 4f
256 2: mrs x0, sctlr_el2 // Get control register EL2
257 b 4f
258 3: mrs x0, sctlr_el3 // Get control register EL3
259 4: and x0, x0, #~CTRL_I_BIT // Clear I bit
260 EL1_OR_EL2_OR_EL3(x1)
261 1: msr sctlr_el1, x0 // Write back control register
262 b 4f
263 2: msr sctlr_el2, x0 // Write back control register
264 b 4f
265 3: msr sctlr_el3, x0 // Write back control register
266 4: dsb sy
267 isb
268 ret
269
270
271 ASM_PFX(ArmEnableAlignmentCheck):
272 EL1_OR_EL2(x1)
273 1: mrs x0, sctlr_el1 // Get control register EL1
274 b 3f
275 2: mrs x0, sctlr_el2 // Get control register EL2
276 3: orr x0, x0, #CTRL_A_BIT // Set A (alignment check) bit
277 EL1_OR_EL2(x1)
278 1: msr sctlr_el1, x0 // Write back control register
279 b 3f
280 2: msr sctlr_el2, x0 // Write back control register
281 3: dsb sy
282 isb
283 ret
284
285
286 ASM_PFX(ArmDisableAlignmentCheck):
287 EL1_OR_EL2_OR_EL3(x1)
288 1: mrs x0, sctlr_el1 // Get control register EL1
289 b 4f
290 2: mrs x0, sctlr_el2 // Get control register EL2
291 b 4f
292 3: mrs x0, sctlr_el3 // Get control register EL3
293 4: and x0, x0, #~CTRL_A_BIT // Clear A (alignment check) bit
294 EL1_OR_EL2_OR_EL3(x1)
295 1: msr sctlr_el1, x0 // Write back control register
296 b 4f
297 2: msr sctlr_el2, x0 // Write back control register
298 b 4f
299 3: msr sctlr_el3, x0 // Write back control register
300 4: dsb sy
301 isb
302 ret
303
304
305 // Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now
306 ASM_PFX(ArmEnableBranchPrediction):
307 ret
308
309
310 // Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now.
311 ASM_PFX(ArmDisableBranchPrediction):
312 ret
313
314
315 ASM_PFX(AArch64AllDataCachesOperation):
316 // We can use regs 0-7 and 9-15 without having to save/restore.
317 // Save our link register on the stack. - The stack must always be quad-word aligned
318 str x30, [sp, #-16]!
319 mov x1, x0 // Save Function call in x1
320 mrs x6, clidr_el1 // Read EL1 CLIDR
321 and x3, x6, #0x7000000 // Mask out all but Level of Coherency (LoC)
322 lsr x3, x3, #23 // Left align cache level value - the level is shifted by 1 to the
323 // right to ease the access to CSSELR and the Set/Way operation.
324 cbz x3, L_Finished // No need to clean if LoC is 0
325 mov x10, #0 // Start clean at cache level 0
326
327 Loop1:
328 add x2, x10, x10, lsr #1 // Work out 3x cachelevel for cache info
329 lsr x12, x6, x2 // bottom 3 bits are the Cache type for this level
330 and x12, x12, #7 // get those 3 bits alone
331 cmp x12, #2 // what cache at this level?
332 b.lt L_Skip // no cache or only instruction cache at this level
333 msr csselr_el1, x10 // write the Cache Size selection register with current level (CSSELR)
334 isb // isb to sync the change to the CacheSizeID reg
335 mrs x12, ccsidr_el1 // reads current Cache Size ID register (CCSIDR)
336 and x2, x12, #0x7 // extract the line length field
337 add x2, x2, #4 // add 4 for the line length offset (log2 16 bytes)
338 mov x4, #0x400
339 sub x4, x4, #1
340 and x4, x4, x12, lsr #3 // x4 is the max number on the way size (right aligned)
341 clz w5, w4 // w5 is the bit position of the way size increment
342 mov x7, #0x00008000
343 sub x7, x7, #1
344 and x7, x7, x12, lsr #13 // x7 is the max number of the index size (right aligned)
345
346 Loop2:
347 mov x9, x4 // x9 working copy of the max way size (right aligned)
348
349 Loop3:
350 lsl x11, x9, x5
351 orr x0, x10, x11 // factor in the way number and cache number
352 lsl x11, x7, x2
353 orr x0, x0, x11 // factor in the index number
354
355 blr x1 // Goto requested cache operation
356
357 subs x9, x9, #1 // decrement the way number
358 b.ge Loop3
359 subs x7, x7, #1 // decrement the index
360 b.ge Loop2
361 L_Skip:
362 add x10, x10, #2 // increment the cache number
363 cmp x3, x10
364 b.gt Loop1
365
366 L_Finished:
367 dsb sy
368 isb
369 ldr x30, [sp], #0x10
370 ret
371
372
373 ASM_PFX(ArmDataMemoryBarrier):
374 dmb sy
375 ret
376
377
378 ASM_PFX(ArmDataSynchronizationBarrier):
379 ASM_PFX(ArmDrainWriteBuffer):
380 dsb sy
381 ret
382
383
384 ASM_PFX(ArmInstructionSynchronizationBarrier):
385 isb
386 ret
387
388
389 ASM_PFX(ArmWriteVBar):
390 EL1_OR_EL2_OR_EL3(x1)
391 1: msr vbar_el1, x0 // Set the Address of the EL1 Vector Table in the VBAR register
392 b 4f
393 2: msr vbar_el2, x0 // Set the Address of the EL2 Vector Table in the VBAR register
394 b 4f
395 3: msr vbar_el3, x0 // Set the Address of the EL3 Vector Table in the VBAR register
396 4: isb
397 ret
398
399 ASM_PFX(ArmReadVBar):
400 EL1_OR_EL2_OR_EL3(x1)
401 1: mrs x0, vbar_el1 // Set the Address of the EL1 Vector Table in the VBAR register
402 ret
403 2: mrs x0, vbar_el2 // Set the Address of the EL2 Vector Table in the VBAR register
404 ret
405 3: mrs x0, vbar_el3 // Set the Address of the EL3 Vector Table in the VBAR register
406 ret
407
408
409 ASM_PFX(ArmEnableVFP):
410 // Check whether floating-point is implemented in the processor.
411 mov x1, x30 // Save LR
412 bl ArmReadIdPfr0 // Read EL1 Processor Feature Register (PFR0)
413 mov x30, x1 // Restore LR
414 ands x0, x0, #AARCH64_PFR0_FP// Extract bits indicating VFP implementation
415 cmp x0, #0 // VFP is implemented if '0'.
416 b.ne 4f // Exit if VFP not implemented.
417 // FVP is implemented.
418 // Make sure VFP exceptions are not trapped (to any exception level).
419 mrs x0, cpacr_el1 // Read EL1 Coprocessor Access Control Register (CPACR)
420 orr x0, x0, #CPACR_VFP_BITS // Disable FVP traps to EL1
421 msr cpacr_el1, x0 // Write back EL1 Coprocessor Access Control Register (CPACR)
422 mov x1, #AARCH64_CPTR_TFP // TFP Bit for trapping VFP Exceptions
423 EL1_OR_EL2_OR_EL3(x2)
424 1:ret // Not configurable in EL1
425 2:mrs x0, cptr_el2 // Disable VFP traps to EL2
426 bic x0, x0, x1
427 msr cptr_el2, x0
428 ret
429 3:mrs x0, cptr_el3 // Disable VFP traps to EL3
430 bic x0, x0, x1
431 msr cptr_el3, x0
432 4:ret
433
434
435 ASM_PFX(ArmCallWFI):
436 wfi
437 ret
438
439
440 ASM_PFX(ArmReadMpidr):
441 mrs x0, mpidr_el1 // read EL1 MPIDR
442 ret
443
444
445 // Keep old function names for C compatibilty for now. Change later?
446 ASM_PFX(ArmReadTpidrurw):
447 mrs x0, tpidr_el0 // read tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
448 ret
449
450
451 // Keep old function names for C compatibilty for now. Change later?
452 ASM_PFX(ArmWriteTpidrurw):
453 msr tpidr_el0, x0 // write tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
454 ret
455
456
457 // Arch timers are mandatory on AArch64
458 ASM_PFX(ArmIsArchTimerImplemented):
459 mov x0, #1
460 ret
461
462
463 ASM_PFX(ArmReadIdPfr0):
464 mrs x0, id_aa64pfr0_el1 // Read ID_AA64PFR0 Register
465 ret
466
467
468 // Q: id_aa64pfr1_el1 not defined yet. What does this funtion want to access?
469 // A: used to setup arch timer. Check if we have security extensions, permissions to set stuff.
470 // See: ArmPkg/Library/ArmArchTimerLib/AArch64/ArmArchTimerLib.c
471 // Not defined yet, but stick in here for now, should read all zeros.
472 ASM_PFX(ArmReadIdPfr1):
473 mrs x0, id_aa64pfr1_el1 // Read ID_PFR1 Register
474 ret
475
476 // VOID ArmWriteHcr(UINTN Hcr)
477 ASM_PFX(ArmWriteHcr):
478 msr hcr_el2, x0 // Write the passed HCR value
479 ret
480
481 // UINTN ArmReadCurrentEL(VOID)
482 ASM_PFX(ArmReadCurrentEL):
483 mrs x0, CurrentEL
484 ret
485
486 ASM_FUNCTION_REMOVE_IF_UNREFERENCED