]>
Commit | Line | Data |
---|---|---|
1 | #------------------------------------------------------------------------------\r | |
2 | #\r | |
3 | # Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>\r | |
4 | # Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.\r | |
5 | #\r | |
6 | # This program and the accompanying materials\r | |
7 | # are licensed and made available under the terms and conditions of the BSD License\r | |
8 | # which accompanies this distribution. The full text of the license may be found at\r | |
9 | # http://opensource.org/licenses/bsd-license.php\r | |
10 | #\r | |
11 | # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r | |
12 | # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r | |
13 | #\r | |
14 | #------------------------------------------------------------------------------\r | |
15 | \r | |
16 | .text\r | |
17 | .align 2\r | |
18 | \r | |
19 | GCC_ASM_EXPORT (ArmInvalidateInstructionCache)\r | |
20 | GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)\r | |
21 | GCC_ASM_EXPORT (ArmInvalidateInstructionCacheEntryToPoUByMVA)\r | |
22 | GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)\r | |
23 | GCC_ASM_EXPORT (ArmCleanDataCacheEntryToPoUByMVA)\r | |
24 | GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)\r | |
25 | GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)\r | |
26 | GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)\r | |
27 | GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)\r | |
28 | GCC_ASM_EXPORT (ArmEnableMmu)\r | |
29 | GCC_ASM_EXPORT (ArmDisableMmu)\r | |
30 | GCC_ASM_EXPORT (ArmDisableCachesAndMmu)\r | |
31 | GCC_ASM_EXPORT (ArmMmuEnabled)\r | |
32 | GCC_ASM_EXPORT (ArmEnableDataCache)\r | |
33 | GCC_ASM_EXPORT (ArmDisableDataCache)\r | |
34 | GCC_ASM_EXPORT (ArmEnableInstructionCache)\r | |
35 | GCC_ASM_EXPORT (ArmDisableInstructionCache)\r | |
36 | GCC_ASM_EXPORT (ArmEnableSWPInstruction)\r | |
37 | GCC_ASM_EXPORT (ArmEnableBranchPrediction)\r | |
38 | GCC_ASM_EXPORT (ArmDisableBranchPrediction)\r | |
39 | GCC_ASM_EXPORT (ArmSetLowVectors)\r | |
40 | GCC_ASM_EXPORT (ArmSetHighVectors)\r | |
41 | GCC_ASM_EXPORT (ArmV7AllDataCachesOperation)\r | |
42 | GCC_ASM_EXPORT (ArmDataMemoryBarrier)\r | |
43 | GCC_ASM_EXPORT (ArmDataSynchronizationBarrier)\r | |
44 | GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier)\r | |
45 | GCC_ASM_EXPORT (ArmReadVBar)\r | |
46 | GCC_ASM_EXPORT (ArmWriteVBar)\r | |
47 | GCC_ASM_EXPORT (ArmEnableVFP)\r | |
48 | GCC_ASM_EXPORT (ArmCallWFI)\r | |
49 | GCC_ASM_EXPORT (ArmReadCbar)\r | |
50 | GCC_ASM_EXPORT (ArmReadMpidr)\r | |
51 | GCC_ASM_EXPORT (ArmReadTpidrurw)\r | |
52 | GCC_ASM_EXPORT (ArmWriteTpidrurw)\r | |
53 | GCC_ASM_EXPORT (ArmIsArchTimerImplemented)\r | |
54 | GCC_ASM_EXPORT (ArmReadIdPfr1)\r | |
55 | GCC_ASM_EXPORT (ArmReadIdMmfr0)\r | |
56 | \r | |
57 | .set DC_ON, (0x1<<2)\r | |
58 | .set IC_ON, (0x1<<12)\r | |
59 | .set CTRL_M_BIT, (1 << 0)\r | |
60 | .set CTRL_C_BIT, (1 << 2)\r | |
61 | .set CTRL_B_BIT, (1 << 7)\r | |
62 | .set CTRL_I_BIT, (1 << 12)\r | |
63 | \r | |
64 | \r | |
65 | ASM_PFX(ArmInvalidateDataCacheEntryByMVA):\r | |
66 | mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line\r | |
67 | bx lr\r | |
68 | \r | |
69 | ASM_PFX(ArmCleanDataCacheEntryByMVA):\r | |
70 | mcr p15, 0, r0, c7, c10, 1 @clean single data cache line\r | |
71 | bx lr\r | |
72 | \r | |
73 | \r | |
74 | ASM_PFX(ArmCleanDataCacheEntryToPoUByMVA):\r | |
75 | mcr p15, 0, r0, c7, c11, 1 @clean single data cache line to PoU\r | |
76 | bx lr\r | |
77 | \r | |
78 | ASM_PFX(ArmInvalidateInstructionCacheEntryToPoUByMVA):\r | |
79 | mcr p15, 0, r0, c7, c5, 1 @Invalidate single instruction cache line to PoU\r | |
80 | mcr p15, 0, r0, c7, c5, 7 @Invalidate branch predictor\r | |
81 | bx lr\r | |
82 | \r | |
83 | ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):\r | |
84 | mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line\r | |
85 | bx lr\r | |
86 | \r | |
87 | \r | |
88 | ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):\r | |
89 | mcr p15, 0, r0, c7, c6, 2 @ Invalidate this line\r | |
90 | bx lr\r | |
91 | \r | |
92 | \r | |
93 | ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):\r | |
94 | mcr p15, 0, r0, c7, c14, 2 @ Clean and Invalidate this line\r | |
95 | bx lr\r | |
96 | \r | |
97 | \r | |
98 | ASM_PFX(ArmCleanDataCacheEntryBySetWay):\r | |
99 | mcr p15, 0, r0, c7, c10, 2 @ Clean this line\r | |
100 | bx lr\r | |
101 | \r | |
102 | ASM_PFX(ArmInvalidateInstructionCache):\r | |
103 | mcr p15,0,R0,c7,c5,0 @Invalidate entire instruction cache\r | |
104 | dsb\r | |
105 | isb\r | |
106 | bx LR\r | |
107 | \r | |
108 | ASM_PFX(ArmEnableMmu):\r | |
109 | mrc p15,0,R0,c1,c0,0\r | |
110 | orr R0,R0,#1\r | |
111 | mcr p15,0,R0,c1,c0,0\r | |
112 | dsb\r | |
113 | isb\r | |
114 | bx LR\r | |
115 | \r | |
116 | \r | |
117 | ASM_PFX(ArmDisableMmu):\r | |
118 | mrc p15,0,R0,c1,c0,0\r | |
119 | bic R0,R0,#1\r | |
120 | mcr p15,0,R0,c1,c0,0 @Disable MMU\r | |
121 | \r | |
122 | mcr p15,0,R0,c8,c7,0 @Invalidate TLB\r | |
123 | mcr p15,0,R0,c7,c5,6 @Invalidate Branch predictor array\r | |
124 | dsb\r | |
125 | isb\r | |
126 | bx LR\r | |
127 | \r | |
128 | ASM_PFX(ArmDisableCachesAndMmu):\r | |
129 | mrc p15, 0, r0, c1, c0, 0 @ Get control register\r | |
130 | bic r0, r0, #CTRL_M_BIT @ Disable MMU\r | |
131 | bic r0, r0, #CTRL_C_BIT @ Disable D Cache\r | |
132 | bic r0, r0, #CTRL_I_BIT @ Disable I Cache\r | |
133 | mcr p15, 0, r0, c1, c0, 0 @ Write control register\r | |
134 | dsb\r | |
135 | isb\r | |
136 | bx LR\r | |
137 | \r | |
138 | ASM_PFX(ArmMmuEnabled):\r | |
139 | mrc p15,0,R0,c1,c0,0\r | |
140 | and R0,R0,#1\r | |
141 | bx LR\r | |
142 | \r | |
143 | ASM_PFX(ArmEnableDataCache):\r | |
144 | ldr R1,=DC_ON\r | |
145 | mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r | |
146 | orr R0,R0,R1 @Set C bit\r | |
147 | mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r | |
148 | dsb\r | |
149 | isb\r | |
150 | bx LR\r | |
151 | \r | |
152 | ASM_PFX(ArmDisableDataCache):\r | |
153 | ldr R1,=DC_ON\r | |
154 | mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r | |
155 | bic R0,R0,R1 @Clear C bit\r | |
156 | mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r | |
157 | dsb\r | |
158 | isb\r | |
159 | bx LR\r | |
160 | \r | |
161 | ASM_PFX(ArmEnableInstructionCache):\r | |
162 | ldr R1,=IC_ON\r | |
163 | mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r | |
164 | orr R0,R0,R1 @Set I bit\r | |
165 | mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r | |
166 | dsb\r | |
167 | isb\r | |
168 | bx LR\r | |
169 | \r | |
170 | ASM_PFX(ArmDisableInstructionCache):\r | |
171 | ldr R1,=IC_ON\r | |
172 | mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r | |
173 | bic R0,R0,R1 @Clear I bit.\r | |
174 | mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r | |
175 | dsb\r | |
176 | isb\r | |
177 | bx LR\r | |
178 | \r | |
179 | ASM_PFX(ArmEnableSWPInstruction):\r | |
180 | mrc p15, 0, r0, c1, c0, 0\r | |
181 | orr r0, r0, #0x00000400\r | |
182 | mcr p15, 0, r0, c1, c0, 0\r | |
183 | isb\r | |
184 | bx LR\r | |
185 | \r | |
186 | ASM_PFX(ArmEnableBranchPrediction):\r | |
187 | mrc p15, 0, r0, c1, c0, 0\r | |
188 | orr r0, r0, #0x00000800\r | |
189 | mcr p15, 0, r0, c1, c0, 0\r | |
190 | dsb\r | |
191 | isb\r | |
192 | bx LR\r | |
193 | \r | |
194 | ASM_PFX(ArmDisableBranchPrediction):\r | |
195 | mrc p15, 0, r0, c1, c0, 0\r | |
196 | bic r0, r0, #0x00000800\r | |
197 | mcr p15, 0, r0, c1, c0, 0\r | |
198 | dsb\r | |
199 | isb\r | |
200 | bx LR\r | |
201 | \r | |
202 | ASM_PFX(ArmSetLowVectors):\r | |
203 | mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r | |
204 | bic r0, r0, #0x00002000 @ clear V bit\r | |
205 | mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r | |
206 | isb\r | |
207 | bx LR\r | |
208 | \r | |
209 | ASM_PFX(ArmSetHighVectors):\r | |
210 | mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r | |
211 | orr r0, r0, #0x00002000 @ Set V bit\r | |
212 | mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r | |
213 | isb\r | |
214 | bx LR\r | |
215 | \r | |
216 | ASM_PFX(ArmV7AllDataCachesOperation):\r | |
217 | stmfd SP!,{r4-r12, LR}\r | |
218 | mov R1, R0 @ Save Function call in R1\r | |
219 | mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR\r | |
220 | ands R3, R6, #0x7000000 @ Mask out all but Level of Coherency (LoC)\r | |
221 | mov R3, R3, LSR #23 @ Cache level value (naturally aligned)\r | |
222 | beq L_Finished\r | |
223 | mov R10, #0\r | |
224 | \r | |
225 | Loop1:\r | |
226 | add R2, R10, R10, LSR #1 @ Work out 3xcachelevel\r | |
227 | mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level\r | |
228 | and R12, R12, #7 @ get those 3 bits alone\r | |
229 | cmp R12, #2\r | |
230 | blt L_Skip @ no cache or only instruction cache at this level\r | |
231 | mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction\r | |
232 | isb @ isb to sync the change to the CacheSizeID reg\r | |
233 | mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)\r | |
234 | and R2, R12, #0x7 @ extract the line length field\r | |
235 | add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)\r | |
236 | @ ldr R4, =0x3FF\r | |
237 | mov R4, #0x400\r | |
238 | sub R4, R4, #1\r | |
239 | ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)\r | |
240 | clz R5, R4 @ R5 is the bit position of the way size increment\r | |
241 | @ ldr R7, =0x00007FFF\r | |
242 | mov R7, #0x00008000\r | |
243 | sub R7, R7, #1\r | |
244 | ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)\r | |
245 | \r | |
246 | Loop2:\r | |
247 | mov R9, R4 @ R9 working copy of the max way size (right aligned)\r | |
248 | \r | |
249 | Loop3:\r | |
250 | orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11\r | |
251 | orr R0, R0, R7, LSL R2 @ factor in the index number\r | |
252 | \r | |
253 | blx R1\r | |
254 | \r | |
255 | subs R9, R9, #1 @ decrement the way number\r | |
256 | bge Loop3\r | |
257 | subs R7, R7, #1 @ decrement the index\r | |
258 | bge Loop2\r | |
259 | L_Skip:\r | |
260 | add R10, R10, #2 @ increment the cache number\r | |
261 | cmp R3, R10\r | |
262 | bgt Loop1\r | |
263 | \r | |
264 | L_Finished:\r | |
265 | dsb\r | |
266 | ldmfd SP!, {r4-r12, lr}\r | |
267 | bx LR\r | |
268 | \r | |
269 | ASM_PFX(ArmDataMemoryBarrier):\r | |
270 | dmb\r | |
271 | bx LR\r | |
272 | \r | |
273 | ASM_PFX(ArmDataSynchronizationBarrier):\r | |
274 | dsb\r | |
275 | bx LR\r | |
276 | \r | |
277 | ASM_PFX(ArmInstructionSynchronizationBarrier):\r | |
278 | isb\r | |
279 | bx LR\r | |
280 | \r | |
281 | ASM_PFX(ArmReadVBar):\r | |
282 | # Set the Address of the Vector Table in the VBAR register\r | |
283 | mrc p15, 0, r0, c12, c0, 0\r | |
284 | bx lr\r | |
285 | \r | |
286 | ASM_PFX(ArmWriteVBar):\r | |
287 | # Set the Address of the Vector Table in the VBAR register\r | |
288 | mcr p15, 0, r0, c12, c0, 0\r | |
289 | # Ensure the SCTLR.V bit is clear\r | |
290 | mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r | |
291 | bic r0, r0, #0x00002000 @ clear V bit\r | |
292 | mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r | |
293 | isb\r | |
294 | bx lr\r | |
295 | \r | |
296 | ASM_PFX(ArmEnableVFP):\r | |
297 | # Read CPACR (Coprocessor Access Control Register)\r | |
298 | mrc p15, 0, r0, c1, c0, 2\r | |
299 | # Enable VPF access (Full Access to CP10, CP11) (V* instructions)\r | |
300 | orr r0, r0, #0x00f00000\r | |
301 | # Write back CPACR (Coprocessor Access Control Register)\r | |
302 | mcr p15, 0, r0, c1, c0, 2\r | |
303 | isb\r | |
304 | # Set EN bit in FPEXC. The Advanced SIMD and VFP extensions are enabled and operate normally.\r | |
305 | mov r0, #0x40000000\r | |
306 | #ifndef __clang__\r | |
307 | mcr p10,#0x7,r0,c8,c0,#0\r | |
308 | #else\r | |
309 | vmsr fpexc, r0\r | |
310 | #endif\r | |
311 | bx lr\r | |
312 | \r | |
313 | ASM_PFX(ArmCallWFI):\r | |
314 | wfi\r | |
315 | bx lr\r | |
316 | \r | |
317 | #Note: Return 0 in Uniprocessor implementation\r | |
318 | ASM_PFX(ArmReadCbar):\r | |
319 | mrc p15, 4, r0, c15, c0, 0 @ Read Configuration Base Address Register\r | |
320 | bx lr\r | |
321 | \r | |
322 | ASM_PFX(ArmReadMpidr):\r | |
323 | mrc p15, 0, r0, c0, c0, 5 @ read MPIDR\r | |
324 | bx lr\r | |
325 | \r | |
326 | ASM_PFX(ArmReadTpidrurw):\r | |
327 | mrc p15, 0, r0, c13, c0, 2 @ read TPIDRURW\r | |
328 | bx lr\r | |
329 | \r | |
330 | ASM_PFX(ArmWriteTpidrurw):\r | |
331 | mcr p15, 0, r0, c13, c0, 2 @ write TPIDRURW\r | |
332 | bx lr\r | |
333 | \r | |
334 | ASM_PFX(ArmIsArchTimerImplemented):\r | |
335 | mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1\r | |
336 | and r0, r0, #0x000F0000\r | |
337 | bx lr\r | |
338 | \r | |
339 | ASM_PFX(ArmReadIdPfr1):\r | |
340 | mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1 Register\r | |
341 | bx lr\r | |
342 | \r | |
343 | ASM_PFX(ArmReadIdMmfr0):\r | |
344 | mrc p15, 0, r0, c0, c1, 4 @ Read ID_MMFR0 Register\r | |
345 | bx lr\r | |
346 | \r | |
347 | ASM_FUNCTION_REMOVE_IF_UNREFERENCED\r |