]> git.proxmox.com Git - mirror_edk2.git/blame_incremental - ArmPkg/Library/ArmLib/Arm/ArmV7Support.S
ArmPkg/ArmBaseLib: clean up directory structure
[mirror_edk2.git] / ArmPkg / Library / ArmLib / Arm / ArmV7Support.S
... / ...
CommitLineData
1#------------------------------------------------------------------------------\r
2#\r
3# Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>\r
4# Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.\r
5# Copyright (c) 2016, Linaro Limited. All rights reserved.\r
6#\r
7# This program and the accompanying materials\r
8# are licensed and made available under the terms and conditions of the BSD License\r
9# which accompanies this distribution. The full text of the license may be found at\r
10# http://opensource.org/licenses/bsd-license.php\r
11#\r
12# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
13# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
14#\r
15#------------------------------------------------------------------------------\r
16\r
17#include <AsmMacroIoLib.h>\r
18\r
19.set DC_ON, (0x1<<2)\r
20.set IC_ON, (0x1<<12)\r
21.set CTRL_M_BIT, (1 << 0)\r
22.set CTRL_C_BIT, (1 << 2)\r
23.set CTRL_B_BIT, (1 << 7)\r
24.set CTRL_I_BIT, (1 << 12)\r
25\r
26\r
27ASM_FUNC(ArmInvalidateDataCacheEntryByMVA)\r
28 mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line\r
29 bx lr\r
30\r
31ASM_FUNC(ArmCleanDataCacheEntryByMVA)\r
32 mcr p15, 0, r0, c7, c10, 1 @clean single data cache line\r
33 bx lr\r
34\r
35\r
36ASM_FUNC(ArmCleanDataCacheEntryToPoUByMVA)\r
37 mcr p15, 0, r0, c7, c11, 1 @clean single data cache line to PoU\r
38 bx lr\r
39\r
40ASM_FUNC(ArmInvalidateInstructionCacheEntryToPoUByMVA)\r
41 mcr p15, 0, r0, c7, c5, 1 @Invalidate single instruction cache line to PoU\r
42 mcr p15, 0, r0, c7, c5, 7 @Invalidate branch predictor\r
43 bx lr\r
44\r
45ASM_FUNC(ArmCleanInvalidateDataCacheEntryByMVA)\r
46 mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line\r
47 bx lr\r
48\r
49\r
50ASM_FUNC(ArmInvalidateDataCacheEntryBySetWay)\r
51 mcr p15, 0, r0, c7, c6, 2 @ Invalidate this line\r
52 bx lr\r
53\r
54\r
55ASM_FUNC(ArmCleanInvalidateDataCacheEntryBySetWay)\r
56 mcr p15, 0, r0, c7, c14, 2 @ Clean and Invalidate this line\r
57 bx lr\r
58\r
59\r
60ASM_FUNC(ArmCleanDataCacheEntryBySetWay)\r
61 mcr p15, 0, r0, c7, c10, 2 @ Clean this line\r
62 bx lr\r
63\r
64ASM_FUNC(ArmInvalidateInstructionCache)\r
65 mcr p15,0,R0,c7,c5,0 @Invalidate entire instruction cache\r
66 dsb\r
67 isb\r
68 bx LR\r
69\r
70ASM_FUNC(ArmEnableMmu)\r
71 mrc p15,0,R0,c1,c0,0\r
72 orr R0,R0,#1\r
73 mcr p15,0,R0,c1,c0,0\r
74 dsb\r
75 isb\r
76 bx LR\r
77\r
78\r
79ASM_FUNC(ArmDisableMmu)\r
80 mrc p15,0,R0,c1,c0,0\r
81 bic R0,R0,#1\r
82 mcr p15,0,R0,c1,c0,0 @Disable MMU\r
83\r
84 mcr p15,0,R0,c8,c7,0 @Invalidate TLB\r
85 mcr p15,0,R0,c7,c5,6 @Invalidate Branch predictor array\r
86 dsb\r
87 isb\r
88 bx LR\r
89\r
90ASM_FUNC(ArmDisableCachesAndMmu)\r
91 mrc p15, 0, r0, c1, c0, 0 @ Get control register\r
92 bic r0, r0, #CTRL_M_BIT @ Disable MMU\r
93 bic r0, r0, #CTRL_C_BIT @ Disable D Cache\r
94 bic r0, r0, #CTRL_I_BIT @ Disable I Cache\r
95 mcr p15, 0, r0, c1, c0, 0 @ Write control register\r
96 dsb\r
97 isb\r
98 bx LR\r
99\r
100ASM_FUNC(ArmMmuEnabled)\r
101 mrc p15,0,R0,c1,c0,0\r
102 and R0,R0,#1\r
103 bx LR\r
104\r
105ASM_FUNC(ArmEnableDataCache)\r
106 ldr R1,=DC_ON\r
107 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
108 orr R0,R0,R1 @Set C bit\r
109 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
110 dsb\r
111 isb\r
112 bx LR\r
113\r
114ASM_FUNC(ArmDisableDataCache)\r
115 ldr R1,=DC_ON\r
116 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
117 bic R0,R0,R1 @Clear C bit\r
118 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
119 dsb\r
120 isb\r
121 bx LR\r
122\r
123ASM_FUNC(ArmEnableInstructionCache)\r
124 ldr R1,=IC_ON\r
125 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
126 orr R0,R0,R1 @Set I bit\r
127 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
128 dsb\r
129 isb\r
130 bx LR\r
131\r
132ASM_FUNC(ArmDisableInstructionCache)\r
133 ldr R1,=IC_ON\r
134 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
135 bic R0,R0,R1 @Clear I bit.\r
136 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
137 dsb\r
138 isb\r
139 bx LR\r
140\r
141ASM_FUNC(ArmEnableSWPInstruction)\r
142 mrc p15, 0, r0, c1, c0, 0\r
143 orr r0, r0, #0x00000400\r
144 mcr p15, 0, r0, c1, c0, 0\r
145 isb\r
146 bx LR\r
147\r
148ASM_FUNC(ArmEnableBranchPrediction)\r
149 mrc p15, 0, r0, c1, c0, 0\r
150 orr r0, r0, #0x00000800\r
151 mcr p15, 0, r0, c1, c0, 0\r
152 dsb\r
153 isb\r
154 bx LR\r
155\r
156ASM_FUNC(ArmDisableBranchPrediction)\r
157 mrc p15, 0, r0, c1, c0, 0\r
158 bic r0, r0, #0x00000800\r
159 mcr p15, 0, r0, c1, c0, 0\r
160 dsb\r
161 isb\r
162 bx LR\r
163\r
164ASM_FUNC(ArmSetLowVectors)\r
165 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
166 bic r0, r0, #0x00002000 @ clear V bit\r
167 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
168 isb\r
169 bx LR\r
170\r
171ASM_FUNC(ArmSetHighVectors)\r
172 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
173 orr r0, r0, #0x00002000 @ Set V bit\r
174 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
175 isb\r
176 bx LR\r
177\r
178ASM_FUNC(ArmV7AllDataCachesOperation)\r
179 stmfd SP!,{r4-r12, LR}\r
180 mov R1, R0 @ Save Function call in R1\r
181 mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR\r
182 ands R3, R6, #0x7000000 @ Mask out all but Level of Coherency (LoC)\r
183 mov R3, R3, LSR #23 @ Cache level value (naturally aligned)\r
184 beq L_Finished\r
185 mov R10, #0\r
186\r
187Loop1:\r
188 add R2, R10, R10, LSR #1 @ Work out 3xcachelevel\r
189 mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level\r
190 and R12, R12, #7 @ get those 3 bits alone\r
191 cmp R12, #2\r
192 blt L_Skip @ no cache or only instruction cache at this level\r
193 mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction\r
194 isb @ isb to sync the change to the CacheSizeID reg\r
195 mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)\r
196 and R2, R12, #0x7 @ extract the line length field\r
197 add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)\r
198@ ldr R4, =0x3FF\r
199 mov R4, #0x400\r
200 sub R4, R4, #1\r
201 ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)\r
202 clz R5, R4 @ R5 is the bit position of the way size increment\r
203@ ldr R7, =0x00007FFF\r
204 mov R7, #0x00008000\r
205 sub R7, R7, #1\r
206 ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)\r
207\r
208Loop2:\r
209 mov R9, R4 @ R9 working copy of the max way size (right aligned)\r
210\r
211Loop3:\r
212 orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11\r
213 orr R0, R0, R7, LSL R2 @ factor in the index number\r
214\r
215 blx R1\r
216\r
217 subs R9, R9, #1 @ decrement the way number\r
218 bge Loop3\r
219 subs R7, R7, #1 @ decrement the index\r
220 bge Loop2\r
221L_Skip:\r
222 add R10, R10, #2 @ increment the cache number\r
223 cmp R3, R10\r
224 bgt Loop1\r
225\r
226L_Finished:\r
227 dsb\r
228 ldmfd SP!, {r4-r12, lr}\r
229 bx LR\r
230\r
231ASM_FUNC(ArmDataMemoryBarrier)\r
232 dmb\r
233 bx LR\r
234\r
235ASM_FUNC(ArmDataSynchronizationBarrier)\r
236 dsb\r
237 bx LR\r
238\r
239ASM_FUNC(ArmInstructionSynchronizationBarrier)\r
240 isb\r
241 bx LR\r
242\r
243ASM_FUNC(ArmReadVBar)\r
244 # Set the Address of the Vector Table in the VBAR register\r
245 mrc p15, 0, r0, c12, c0, 0\r
246 bx lr\r
247\r
248ASM_FUNC(ArmWriteVBar)\r
249 # Set the Address of the Vector Table in the VBAR register\r
250 mcr p15, 0, r0, c12, c0, 0\r
251 # Ensure the SCTLR.V bit is clear\r
252 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
253 bic r0, r0, #0x00002000 @ clear V bit\r
254 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
255 isb\r
256 bx lr\r
257\r
258ASM_FUNC(ArmEnableVFP)\r
259 # Read CPACR (Coprocessor Access Control Register)\r
260 mrc p15, 0, r0, c1, c0, 2\r
261 # Enable VPF access (Full Access to CP10, CP11) (V* instructions)\r
262 orr r0, r0, #0x00f00000\r
263 # Write back CPACR (Coprocessor Access Control Register)\r
264 mcr p15, 0, r0, c1, c0, 2\r
265 isb\r
266 # Set EN bit in FPEXC. The Advanced SIMD and VFP extensions are enabled and operate normally.\r
267 mov r0, #0x40000000\r
268#ifndef __clang__\r
269 mcr p10,#0x7,r0,c8,c0,#0\r
270#else\r
271 vmsr fpexc, r0\r
272#endif\r
273 bx lr\r
274\r
275ASM_FUNC(ArmCallWFI)\r
276 wfi\r
277 bx lr\r
278\r
279#Note: Return 0 in Uniprocessor implementation\r
280ASM_FUNC(ArmReadCbar)\r
281 mrc p15, 4, r0, c15, c0, 0 @ Read Configuration Base Address Register\r
282 bx lr\r
283\r
284ASM_FUNC(ArmReadMpidr)\r
285 mrc p15, 0, r0, c0, c0, 5 @ read MPIDR\r
286 bx lr\r
287\r
288ASM_FUNC(ArmReadTpidrurw)\r
289 mrc p15, 0, r0, c13, c0, 2 @ read TPIDRURW\r
290 bx lr\r
291\r
292ASM_FUNC(ArmWriteTpidrurw)\r
293 mcr p15, 0, r0, c13, c0, 2 @ write TPIDRURW\r
294 bx lr\r
295\r
296ASM_FUNC(ArmIsArchTimerImplemented)\r
297 mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1\r
298 and r0, r0, #0x000F0000\r
299 bx lr\r
300\r
301ASM_FUNC(ArmReadIdPfr1)\r
302 mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1 Register\r
303 bx lr\r
304\r
305ASM_FUNCTION_REMOVE_IF_UNREFERENCED\r