]> git.proxmox.com Git - mirror_edk2.git/blame - ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S
ARM Packages: Corrected non-DOS line endings
[mirror_edk2.git] / ArmPkg / Library / ArmLib / ArmV7 / ArmV7Support.S
CommitLineData
bd6b9799 1#------------------------------------------------------------------------------ \r
2#\r
3# Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>\r
9401d6f4 4# Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.\r
bd6b9799 5#\r
6# This program and the accompanying materials\r
7# are licensed and made available under the terms and conditions of the BSD License\r
8# which accompanies this distribution. The full text of the license may be found at\r
9# http://opensource.org/licenses/bsd-license.php\r
10#\r
11# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
12# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
13#\r
14#------------------------------------------------------------------------------\r
15\r
16.text\r
17.align 2\r
18\r
19GCC_ASM_EXPORT (ArmInvalidateInstructionCache)\r
20GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)\r
21GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)\r
22GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)\r
23GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)\r
24GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)\r
25GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)\r
26GCC_ASM_EXPORT (ArmDrainWriteBuffer)\r
27GCC_ASM_EXPORT (ArmEnableMmu)\r
28GCC_ASM_EXPORT (ArmDisableMmu)\r
29GCC_ASM_EXPORT (ArmDisableCachesAndMmu)\r
30GCC_ASM_EXPORT (ArmMmuEnabled)\r
31GCC_ASM_EXPORT (ArmEnableDataCache)\r
32GCC_ASM_EXPORT (ArmDisableDataCache)\r
33GCC_ASM_EXPORT (ArmEnableInstructionCache)\r
34GCC_ASM_EXPORT (ArmDisableInstructionCache)\r
35GCC_ASM_EXPORT (ArmEnableSWPInstruction)\r
36GCC_ASM_EXPORT (ArmEnableBranchPrediction)\r
37GCC_ASM_EXPORT (ArmDisableBranchPrediction)\r
38GCC_ASM_EXPORT (ArmSetLowVectors)\r
39GCC_ASM_EXPORT (ArmSetHighVectors)\r
40GCC_ASM_EXPORT (ArmV7AllDataCachesOperation)\r
d60f6af4 41GCC_ASM_EXPORT (ArmV7PerformPoUDataCacheOperation)\r
bd6b9799 42GCC_ASM_EXPORT (ArmDataMemoryBarrier)\r
43GCC_ASM_EXPORT (ArmDataSyncronizationBarrier)\r
44GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier)\r
836c3500 45GCC_ASM_EXPORT (ArmReadVBar)\r
bd6b9799 46GCC_ASM_EXPORT (ArmWriteVBar)\r
47GCC_ASM_EXPORT (ArmEnableVFP)\r
48GCC_ASM_EXPORT (ArmCallWFI)\r
49GCC_ASM_EXPORT (ArmReadCbar)\r
50GCC_ASM_EXPORT (ArmInvalidateInstructionAndDataTlb)\r
51GCC_ASM_EXPORT (ArmReadMpidr)\r
52GCC_ASM_EXPORT (ArmReadTpidrurw)\r
53GCC_ASM_EXPORT (ArmWriteTpidrurw)\r
54GCC_ASM_EXPORT (ArmIsArchTimerImplemented)\r
55GCC_ASM_EXPORT (ArmReadIdPfr1)\r
56\r
57.set DC_ON, (0x1<<2)\r
58.set IC_ON, (0x1<<12)\r
59.set CTRL_M_BIT, (1 << 0)\r
60.set CTRL_C_BIT, (1 << 2)\r
61.set CTRL_B_BIT, (1 << 7)\r
62.set CTRL_I_BIT, (1 << 12)\r
63\r
64\r
65ASM_PFX(ArmInvalidateDataCacheEntryByMVA):\r
66 mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line \r
67 dsb\r
68 isb\r
69 bx lr\r
70\r
71ASM_PFX(ArmCleanDataCacheEntryByMVA):\r
72 mcr p15, 0, r0, c7, c10, 1 @clean single data cache line \r
73 dsb\r
74 isb\r
75 bx lr\r
76\r
77\r
78ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):\r
79 mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line\r
80 dsb\r
81 isb\r
82 bx lr\r
83\r
84\r
85ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):\r
86 mcr p15, 0, r0, c7, c6, 2 @ Invalidate this line \r
87 dsb\r
88 isb\r
89 bx lr\r
90\r
91\r
92ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):\r
93 mcr p15, 0, r0, c7, c14, 2 @ Clean and Invalidate this line \r
94 dsb\r
95 isb\r
96 bx lr\r
97\r
98\r
99ASM_PFX(ArmCleanDataCacheEntryBySetWay):\r
100 mcr p15, 0, r0, c7, c10, 2 @ Clean this line \r
101 dsb\r
102 isb\r
103 bx lr\r
104\r
105ASM_PFX(ArmInvalidateInstructionCache):\r
106 mcr p15,0,R0,c7,c5,0 @Invalidate entire instruction cache\r
107 dsb\r
108 isb\r
109 bx LR\r
110\r
111ASM_PFX(ArmEnableMmu):\r
112 mrc p15,0,R0,c1,c0,0\r
113 orr R0,R0,#1\r
114 mcr p15,0,R0,c1,c0,0\r
115 dsb\r
116 isb\r
117 bx LR\r
118\r
119\r
120ASM_PFX(ArmDisableMmu):\r
121 mrc p15,0,R0,c1,c0,0\r
122 bic R0,R0,#1\r
123 mcr p15,0,R0,c1,c0,0 @Disable MMU\r
124\r
125 mcr p15,0,R0,c8,c7,0 @Invalidate TLB\r
126 mcr p15,0,R0,c7,c5,6 @Invalidate Branch predictor array\r
127 dsb\r
128 isb\r
129 bx LR\r
130\r
131ASM_PFX(ArmDisableCachesAndMmu):\r
132 mrc p15, 0, r0, c1, c0, 0 @ Get control register\r
133 bic r0, r0, #CTRL_M_BIT @ Disable MMU\r
134 bic r0, r0, #CTRL_C_BIT @ Disable D Cache\r
135 bic r0, r0, #CTRL_I_BIT @ Disable I Cache\r
136 mcr p15, 0, r0, c1, c0, 0 @ Write control register\r
137 dsb\r
138 isb\r
139 bx LR\r
140\r
141ASM_PFX(ArmMmuEnabled):\r
142 mrc p15,0,R0,c1,c0,0\r
143 and R0,R0,#1\r
144 bx LR \r
145\r
146ASM_PFX(ArmEnableDataCache):\r
147 ldr R1,=DC_ON\r
148 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
149 orr R0,R0,R1 @Set C bit\r
150 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
151 dsb\r
152 isb\r
153 bx LR\r
154 \r
155ASM_PFX(ArmDisableDataCache):\r
156 ldr R1,=DC_ON\r
157 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
158 bic R0,R0,R1 @Clear C bit\r
159 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
160 dsb\r
161 isb\r
162 bx LR\r
163\r
164ASM_PFX(ArmEnableInstructionCache):\r
165 ldr R1,=IC_ON\r
166 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
167 orr R0,R0,R1 @Set I bit\r
168 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
169 dsb\r
170 isb\r
171 bx LR\r
172 \r
173ASM_PFX(ArmDisableInstructionCache):\r
174 ldr R1,=IC_ON\r
175 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
176 bic R0,R0,R1 @Clear I bit.\r
177 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
178 dsb\r
179 isb\r
180 bx LR\r
181\r
182ASM_PFX(ArmEnableSWPInstruction):\r
183 mrc p15, 0, r0, c1, c0, 0\r
184 orr r0, r0, #0x00000400\r
185 mcr p15, 0, r0, c1, c0, 0\r
186 isb\r
187 bx LR\r
188\r
189ASM_PFX(ArmEnableBranchPrediction):\r
190 mrc p15, 0, r0, c1, c0, 0\r
191 orr r0, r0, #0x00000800\r
192 mcr p15, 0, r0, c1, c0, 0\r
193 dsb\r
194 isb\r
195 bx LR\r
196\r
197ASM_PFX(ArmDisableBranchPrediction):\r
198 mrc p15, 0, r0, c1, c0, 0\r
199 bic r0, r0, #0x00000800\r
200 mcr p15, 0, r0, c1, c0, 0\r
201 dsb\r
202 isb\r
203 bx LR\r
204\r
205ASM_PFX(ArmSetLowVectors):\r
206 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
207 bic r0, r0, #0x00002000 @ clear V bit\r
208 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
209 isb\r
210 bx LR\r
211\r
212ASM_PFX(ArmSetHighVectors):\r
213 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
c6ba1c12 214 orr r0, r0, #0x00002000 @ Set V bit\r
bd6b9799 215 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
216 isb\r
217 bx LR\r
218\r
219ASM_PFX(ArmV7AllDataCachesOperation):\r
220 stmfd SP!,{r4-r12, LR}\r
221 mov R1, R0 @ Save Function call in R1\r
222 mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR\r
223 ands R3, R6, #0x7000000 @ Mask out all but Level of Coherency (LoC)\r
224 mov R3, R3, LSR #23 @ Cache level value (naturally aligned)\r
225 beq L_Finished\r
226 mov R10, #0\r
227\r
228Loop1: \r
229 add R2, R10, R10, LSR #1 @ Work out 3xcachelevel\r
230 mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level\r
231 and R12, R12, #7 @ get those 3 bits alone\r
232 cmp R12, #2\r
233 blt L_Skip @ no cache or only instruction cache at this level\r
234 mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction\r
235 isb @ isb to sync the change to the CacheSizeID reg \r
236 mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)\r
237 and R2, R12, #0x7 @ extract the line length field\r
238 add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)\r
239@ ldr R4, =0x3FF\r
240 mov R4, #0x400\r
241 sub R4, R4, #1\r
242 ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)\r
243 clz R5, R4 @ R5 is the bit position of the way size increment\r
244@ ldr R7, =0x00007FFF\r
245 mov R7, #0x00008000\r
246 sub R7, R7, #1\r
247 ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)\r
248\r
249Loop2: \r
250 mov R9, R4 @ R9 working copy of the max way size (right aligned)\r
251\r
252Loop3: \r
253 orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11\r
254 orr R0, R0, R7, LSL R2 @ factor in the index number\r
255\r
256 blx R1\r
257\r
258 subs R9, R9, #1 @ decrement the way number\r
259 bge Loop3\r
260 subs R7, R7, #1 @ decrement the index\r
261 bge Loop2\r
262L_Skip: \r
263 add R10, R10, #2 @ increment the cache number\r
264 cmp R3, R10\r
265 bgt Loop1\r
266 \r
267L_Finished:\r
268 dsb\r
269 ldmfd SP!, {r4-r12, lr}\r
d60f6af4 270 bx LR\r
271\r
272ASM_PFX(ArmV7PerformPoUDataCacheOperation):\r
273 stmfd SP!,{r4-r12, LR}\r
274 mov R1, R0 @ Save Function call in R1\r
275 mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR\r
276 ands R3, R6, #0x38000000 @ Mask out all but Level of Unification (LoU)\r
277 mov R3, R3, LSR #26 @ Cache level value (naturally aligned)\r
278 beq Finished2\r
279 mov R10, #0\r
280\r
281Loop4:\r
282 add R2, R10, R10, LSR #1 @ Work out 3xcachelevel\r
283 mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level\r
284 and R12, R12, #7 @ get those 3 bits alone\r
285 cmp R12, #2\r
286 blt Skip2 @ no cache or only instruction cache at this level\r
287 mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction\r
288 isb @ isb to sync the change to the CacheSizeID reg \r
289 mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)\r
290 and R2, R12, #0x7 @ extract the line length field\r
291 add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)\r
292 ldr R4, =0x3FF\r
293 ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)\r
294 clz R5, R4 @ R5 is the bit position of the way size increment\r
295 ldr R7, =0x00007FFF\r
296 ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)\r
297\r
298Loop5:\r
299 mov R9, R4 @ R9 working copy of the max way size (right aligned)\r
300\r
301Loop6:\r
302 orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11\r
303 orr R0, R0, R7, LSL R2 @ factor in the index number\r
304\r
305 blx R1\r
306\r
307 subs R9, R9, #1 @ decrement the way number\r
308 bge Loop6\r
309 subs R7, R7, #1 @ decrement the index\r
310 bge Loop5\r
311Skip2:\r
312 add R10, R10, #2 @ increment the cache number\r
313 cmp R3, R10\r
314 bgt Loop4\r
315 \r
316Finished2:\r
317 dsb\r
318 ldmfd SP!, {r4-r12, lr}\r
bd6b9799 319 bx LR\r
320\r
321ASM_PFX(ArmDataMemoryBarrier):\r
322 dmb\r
323 bx LR\r
324 \r
325ASM_PFX(ArmDataSyncronizationBarrier):\r
326ASM_PFX(ArmDrainWriteBuffer):\r
327 dsb\r
328 bx LR\r
329 \r
330ASM_PFX(ArmInstructionSynchronizationBarrier):\r
331 isb\r
332 bx LR\r
333\r
836c3500 334ASM_PFX(ArmReadVBar):\r
335 # Set the Address of the Vector Table in the VBAR register\r
336 mrc p15, 0, r0, c12, c0, 0\r
337 bx lr\r
338\r
bd6b9799 339ASM_PFX(ArmWriteVBar):\r
340 # Set the Address of the Vector Table in the VBAR register\r
341 mcr p15, 0, r0, c12, c0, 0 \r
342 # Ensure the SCTLR.V bit is clear\r
343 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
344 bic r0, r0, #0x00002000 @ clear V bit\r
345 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
346 isb\r
347 bx lr\r
348\r
349ASM_PFX(ArmEnableVFP):\r
350 # Read CPACR (Coprocessor Access Control Register)\r
351 mrc p15, 0, r0, c1, c0, 2\r
352 # Enable VPF access (Full Access to CP10, CP11) (V* instructions)\r
353 orr r0, r0, #0x00f00000\r
354 # Write back CPACR (Coprocessor Access Control Register)\r
355 mcr p15, 0, r0, c1, c0, 2\r
18029bb9 356 isb\r
bd6b9799 357 # Set EN bit in FPEXC. The Advanced SIMD and VFP extensions are enabled and operate normally.\r
358 mov r0, #0x40000000\r
359 mcr p10,#0x7,r0,c8,c0,#0\r
360 bx lr\r
361\r
362ASM_PFX(ArmCallWFI):\r
363 wfi\r
364 bx lr\r
365\r
366#Note: Return 0 in Uniprocessor implementation\r
367ASM_PFX(ArmReadCbar):\r
368 mrc p15, 4, r0, c15, c0, 0 @ Read Configuration Base Address Register\r
369 bx lr\r
370\r
371ASM_PFX(ArmInvalidateInstructionAndDataTlb):\r
372 mcr p15, 0, r0, c8, c7, 0 @ Invalidate Inst TLB and Data TLB\r
373 dsb\r
374 bx lr\r
375\r
376ASM_PFX(ArmReadMpidr):\r
377 mrc p15, 0, r0, c0, c0, 5 @ read MPIDR\r
378 bx lr\r
f6c5a29b 379 \r
bd6b9799 380ASM_PFX(ArmReadTpidrurw):\r
381 mrc p15, 0, r0, c13, c0, 2 @ read TPIDRURW\r
382 bx lr\r
383\r
384ASM_PFX(ArmWriteTpidrurw):\r
385 mcr p15, 0, r0, c13, c0, 2 @ write TPIDRURW\r
386 bx lr\r
387\r
388ASM_PFX(ArmIsArchTimerImplemented):\r
389 mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1\r
390 and r0, r0, #0x000F0000\r
391 bx lr\r
392\r
393ASM_PFX(ArmReadIdPfr1):\r
394 mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1 Register\r
395 bx lr\r
396\r
397ASM_FUNCTION_REMOVE_IF_UNREFERENCED\r