]> git.proxmox.com Git - mirror_edk2.git/blame - ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S
ArmPkg: Changed ARM CPU SetMemoryAttributes to always use strongly ordered for the...
[mirror_edk2.git] / ArmPkg / Library / ArmLib / ArmV7 / ArmV7Support.S
CommitLineData
bd6b9799 1#------------------------------------------------------------------------------ \r
2#\r
3# Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>\r
4# Copyright (c) 2011, ARM Limited. All rights reserved.\r
5#\r
6# This program and the accompanying materials\r
7# are licensed and made available under the terms and conditions of the BSD License\r
8# which accompanies this distribution. The full text of the license may be found at\r
9# http://opensource.org/licenses/bsd-license.php\r
10#\r
11# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
12# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
13#\r
14#------------------------------------------------------------------------------\r
15\r
16.text\r
17.align 2\r
18\r
19GCC_ASM_EXPORT (ArmInvalidateInstructionCache)\r
20GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)\r
21GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)\r
22GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)\r
23GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)\r
24GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)\r
25GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)\r
26GCC_ASM_EXPORT (ArmDrainWriteBuffer)\r
27GCC_ASM_EXPORT (ArmEnableMmu)\r
28GCC_ASM_EXPORT (ArmDisableMmu)\r
29GCC_ASM_EXPORT (ArmDisableCachesAndMmu)\r
30GCC_ASM_EXPORT (ArmMmuEnabled)\r
31GCC_ASM_EXPORT (ArmEnableDataCache)\r
32GCC_ASM_EXPORT (ArmDisableDataCache)\r
33GCC_ASM_EXPORT (ArmEnableInstructionCache)\r
34GCC_ASM_EXPORT (ArmDisableInstructionCache)\r
35GCC_ASM_EXPORT (ArmEnableSWPInstruction)\r
36GCC_ASM_EXPORT (ArmEnableBranchPrediction)\r
37GCC_ASM_EXPORT (ArmDisableBranchPrediction)\r
38GCC_ASM_EXPORT (ArmSetLowVectors)\r
39GCC_ASM_EXPORT (ArmSetHighVectors)\r
40GCC_ASM_EXPORT (ArmV7AllDataCachesOperation)\r
41GCC_ASM_EXPORT (ArmDataMemoryBarrier)\r
42GCC_ASM_EXPORT (ArmDataSyncronizationBarrier)\r
43GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier)\r
44GCC_ASM_EXPORT (ArmWriteVBar)\r
45GCC_ASM_EXPORT (ArmEnableVFP)\r
46GCC_ASM_EXPORT (ArmCallWFI)\r
47GCC_ASM_EXPORT (ArmReadCbar)\r
48GCC_ASM_EXPORT (ArmInvalidateInstructionAndDataTlb)\r
49GCC_ASM_EXPORT (ArmReadMpidr)\r
50GCC_ASM_EXPORT (ArmReadTpidrurw)\r
51GCC_ASM_EXPORT (ArmWriteTpidrurw)\r
52GCC_ASM_EXPORT (ArmIsArchTimerImplemented)\r
53GCC_ASM_EXPORT (ArmReadIdPfr1)\r
54\r
55.set DC_ON, (0x1<<2)\r
56.set IC_ON, (0x1<<12)\r
57.set CTRL_M_BIT, (1 << 0)\r
58.set CTRL_C_BIT, (1 << 2)\r
59.set CTRL_B_BIT, (1 << 7)\r
60.set CTRL_I_BIT, (1 << 12)\r
61\r
62\r
63ASM_PFX(ArmInvalidateDataCacheEntryByMVA):\r
64 mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line \r
65 dsb\r
66 isb\r
67 bx lr\r
68\r
69ASM_PFX(ArmCleanDataCacheEntryByMVA):\r
70 mcr p15, 0, r0, c7, c10, 1 @clean single data cache line \r
71 dsb\r
72 isb\r
73 bx lr\r
74\r
75\r
76ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):\r
77 mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line\r
78 dsb\r
79 isb\r
80 bx lr\r
81\r
82\r
83ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):\r
84 mcr p15, 0, r0, c7, c6, 2 @ Invalidate this line \r
85 dsb\r
86 isb\r
87 bx lr\r
88\r
89\r
90ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):\r
91 mcr p15, 0, r0, c7, c14, 2 @ Clean and Invalidate this line \r
92 dsb\r
93 isb\r
94 bx lr\r
95\r
96\r
97ASM_PFX(ArmCleanDataCacheEntryBySetWay):\r
98 mcr p15, 0, r0, c7, c10, 2 @ Clean this line \r
99 dsb\r
100 isb\r
101 bx lr\r
102\r
103ASM_PFX(ArmInvalidateInstructionCache):\r
104 mcr p15,0,R0,c7,c5,0 @Invalidate entire instruction cache\r
105 dsb\r
106 isb\r
107 bx LR\r
108\r
109ASM_PFX(ArmEnableMmu):\r
110 mrc p15,0,R0,c1,c0,0\r
111 orr R0,R0,#1\r
112 mcr p15,0,R0,c1,c0,0\r
113 dsb\r
114 isb\r
115 bx LR\r
116\r
117\r
118ASM_PFX(ArmDisableMmu):\r
119 mrc p15,0,R0,c1,c0,0\r
120 bic R0,R0,#1\r
121 mcr p15,0,R0,c1,c0,0 @Disable MMU\r
122\r
123 mcr p15,0,R0,c8,c7,0 @Invalidate TLB\r
124 mcr p15,0,R0,c7,c5,6 @Invalidate Branch predictor array\r
125 dsb\r
126 isb\r
127 bx LR\r
128\r
129ASM_PFX(ArmDisableCachesAndMmu):\r
130 mrc p15, 0, r0, c1, c0, 0 @ Get control register\r
131 bic r0, r0, #CTRL_M_BIT @ Disable MMU\r
132 bic r0, r0, #CTRL_C_BIT @ Disable D Cache\r
133 bic r0, r0, #CTRL_I_BIT @ Disable I Cache\r
134 mcr p15, 0, r0, c1, c0, 0 @ Write control register\r
135 dsb\r
136 isb\r
137 bx LR\r
138\r
139ASM_PFX(ArmMmuEnabled):\r
140 mrc p15,0,R0,c1,c0,0\r
141 and R0,R0,#1\r
142 bx LR \r
143\r
144ASM_PFX(ArmEnableDataCache):\r
145 ldr R1,=DC_ON\r
146 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
147 orr R0,R0,R1 @Set C bit\r
148 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
149 dsb\r
150 isb\r
151 bx LR\r
152 \r
153ASM_PFX(ArmDisableDataCache):\r
154 ldr R1,=DC_ON\r
155 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
156 bic R0,R0,R1 @Clear C bit\r
157 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
158 dsb\r
159 isb\r
160 bx LR\r
161\r
162ASM_PFX(ArmEnableInstructionCache):\r
163 ldr R1,=IC_ON\r
164 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
165 orr R0,R0,R1 @Set I bit\r
166 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
167 dsb\r
168 isb\r
169 bx LR\r
170 \r
171ASM_PFX(ArmDisableInstructionCache):\r
172 ldr R1,=IC_ON\r
173 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
174 bic R0,R0,R1 @Clear I bit.\r
175 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
176 dsb\r
177 isb\r
178 bx LR\r
179\r
180ASM_PFX(ArmEnableSWPInstruction):\r
181 mrc p15, 0, r0, c1, c0, 0\r
182 orr r0, r0, #0x00000400\r
183 mcr p15, 0, r0, c1, c0, 0\r
184 isb\r
185 bx LR\r
186\r
187ASM_PFX(ArmEnableBranchPrediction):\r
188 mrc p15, 0, r0, c1, c0, 0\r
189 orr r0, r0, #0x00000800\r
190 mcr p15, 0, r0, c1, c0, 0\r
191 dsb\r
192 isb\r
193 bx LR\r
194\r
195ASM_PFX(ArmDisableBranchPrediction):\r
196 mrc p15, 0, r0, c1, c0, 0\r
197 bic r0, r0, #0x00000800\r
198 mcr p15, 0, r0, c1, c0, 0\r
199 dsb\r
200 isb\r
201 bx LR\r
202\r
203ASM_PFX(ArmSetLowVectors):\r
204 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
205 bic r0, r0, #0x00002000 @ clear V bit\r
206 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
207 isb\r
208 bx LR\r
209\r
210ASM_PFX(ArmSetHighVectors):\r
211 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
212 orr r0, r0, #0x00002000 @ clear V bit\r
213 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
214 isb\r
215 bx LR\r
216\r
217ASM_PFX(ArmV7AllDataCachesOperation):\r
218 stmfd SP!,{r4-r12, LR}\r
219 mov R1, R0 @ Save Function call in R1\r
220 mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR\r
221 ands R3, R6, #0x7000000 @ Mask out all but Level of Coherency (LoC)\r
222 mov R3, R3, LSR #23 @ Cache level value (naturally aligned)\r
223 beq L_Finished\r
224 mov R10, #0\r
225\r
226Loop1: \r
227 add R2, R10, R10, LSR #1 @ Work out 3xcachelevel\r
228 mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level\r
229 and R12, R12, #7 @ get those 3 bits alone\r
230 cmp R12, #2\r
231 blt L_Skip @ no cache or only instruction cache at this level\r
232 mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction\r
233 isb @ isb to sync the change to the CacheSizeID reg \r
234 mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)\r
235 and R2, R12, #0x7 @ extract the line length field\r
236 add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)\r
237@ ldr R4, =0x3FF\r
238 mov R4, #0x400\r
239 sub R4, R4, #1\r
240 ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)\r
241 clz R5, R4 @ R5 is the bit position of the way size increment\r
242@ ldr R7, =0x00007FFF\r
243 mov R7, #0x00008000\r
244 sub R7, R7, #1\r
245 ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)\r
246\r
247Loop2: \r
248 mov R9, R4 @ R9 working copy of the max way size (right aligned)\r
249\r
250Loop3: \r
251 orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11\r
252 orr R0, R0, R7, LSL R2 @ factor in the index number\r
253\r
254 blx R1\r
255\r
256 subs R9, R9, #1 @ decrement the way number\r
257 bge Loop3\r
258 subs R7, R7, #1 @ decrement the index\r
259 bge Loop2\r
260L_Skip: \r
261 add R10, R10, #2 @ increment the cache number\r
262 cmp R3, R10\r
263 bgt Loop1\r
264 \r
265L_Finished:\r
266 dsb\r
267 ldmfd SP!, {r4-r12, lr}\r
268 bx LR\r
269\r
270ASM_PFX(ArmDataMemoryBarrier):\r
271 dmb\r
272 bx LR\r
273 \r
274ASM_PFX(ArmDataSyncronizationBarrier):\r
275ASM_PFX(ArmDrainWriteBuffer):\r
276 dsb\r
277 bx LR\r
278 \r
279ASM_PFX(ArmInstructionSynchronizationBarrier):\r
280 isb\r
281 bx LR\r
282\r
283ASM_PFX(ArmWriteVBar):\r
284 # Set the Address of the Vector Table in the VBAR register\r
285 mcr p15, 0, r0, c12, c0, 0 \r
286 # Ensure the SCTLR.V bit is clear\r
287 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
288 bic r0, r0, #0x00002000 @ clear V bit\r
289 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
290 isb\r
291 bx lr\r
292\r
293ASM_PFX(ArmEnableVFP):\r
294 # Read CPACR (Coprocessor Access Control Register)\r
295 mrc p15, 0, r0, c1, c0, 2\r
296 # Enable VPF access (Full Access to CP10, CP11) (V* instructions)\r
297 orr r0, r0, #0x00f00000\r
298 # Write back CPACR (Coprocessor Access Control Register)\r
299 mcr p15, 0, r0, c1, c0, 2\r
300 # Set EN bit in FPEXC. The Advanced SIMD and VFP extensions are enabled and operate normally.\r
301 mov r0, #0x40000000\r
302 mcr p10,#0x7,r0,c8,c0,#0\r
303 bx lr\r
304\r
305ASM_PFX(ArmCallWFI):\r
306 wfi\r
307 bx lr\r
308\r
309#Note: Return 0 in Uniprocessor implementation\r
310ASM_PFX(ArmReadCbar):\r
311 mrc p15, 4, r0, c15, c0, 0 @ Read Configuration Base Address Register\r
312 bx lr\r
313\r
314ASM_PFX(ArmInvalidateInstructionAndDataTlb):\r
315 mcr p15, 0, r0, c8, c7, 0 @ Invalidate Inst TLB and Data TLB\r
316 dsb\r
317 bx lr\r
318\r
319ASM_PFX(ArmReadMpidr):\r
320 mrc p15, 0, r0, c0, c0, 5 @ read MPIDR\r
321 bx lr\r
322 \r
323ASM_PFX(ArmReadTpidrurw):\r
324 mrc p15, 0, r0, c13, c0, 2 @ read TPIDRURW\r
325 bx lr\r
326\r
327ASM_PFX(ArmWriteTpidrurw):\r
328 mcr p15, 0, r0, c13, c0, 2 @ write TPIDRURW\r
329 bx lr\r
330\r
331ASM_PFX(ArmIsArchTimerImplemented):\r
332 mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1\r
333 and r0, r0, #0x000F0000\r
334 bx lr\r
335\r
336ASM_PFX(ArmReadIdPfr1):\r
337 mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1 Register\r
338 bx lr\r
339\r
340ASM_FUNCTION_REMOVE_IF_UNREFERENCED\r