]> git.proxmox.com Git - mirror_edk2.git/blame - ArmPkg/Library/ArmLib/ArmV7/ArmV7Support.S
ArmPkg: ArmLib: purge incorrect ArmDrainWriteBuffer () alias
[mirror_edk2.git] / ArmPkg / Library / ArmLib / ArmV7 / ArmV7Support.S
CommitLineData
3402aac7 1#------------------------------------------------------------------------------\r
bd6b9799 2#\r
3# Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>\r
9401d6f4 4# Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.\r
bd6b9799 5#\r
6# This program and the accompanying materials\r
7# are licensed and made available under the terms and conditions of the BSD License\r
8# which accompanies this distribution. The full text of the license may be found at\r
9# http://opensource.org/licenses/bsd-license.php\r
10#\r
11# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
12# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
13#\r
14#------------------------------------------------------------------------------\r
15\r
16.text\r
17.align 2\r
18\r
19GCC_ASM_EXPORT (ArmInvalidateInstructionCache)\r
20GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)\r
21GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)\r
22GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)\r
23GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)\r
24GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)\r
25GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)\r
bd6b9799 26GCC_ASM_EXPORT (ArmEnableMmu)\r
27GCC_ASM_EXPORT (ArmDisableMmu)\r
28GCC_ASM_EXPORT (ArmDisableCachesAndMmu)\r
29GCC_ASM_EXPORT (ArmMmuEnabled)\r
30GCC_ASM_EXPORT (ArmEnableDataCache)\r
31GCC_ASM_EXPORT (ArmDisableDataCache)\r
32GCC_ASM_EXPORT (ArmEnableInstructionCache)\r
33GCC_ASM_EXPORT (ArmDisableInstructionCache)\r
34GCC_ASM_EXPORT (ArmEnableSWPInstruction)\r
35GCC_ASM_EXPORT (ArmEnableBranchPrediction)\r
36GCC_ASM_EXPORT (ArmDisableBranchPrediction)\r
37GCC_ASM_EXPORT (ArmSetLowVectors)\r
38GCC_ASM_EXPORT (ArmSetHighVectors)\r
39GCC_ASM_EXPORT (ArmV7AllDataCachesOperation)\r
40GCC_ASM_EXPORT (ArmDataMemoryBarrier)\r
cf93a378 41GCC_ASM_EXPORT (ArmDataSynchronizationBarrier)\r
bd6b9799 42GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier)\r
836c3500 43GCC_ASM_EXPORT (ArmReadVBar)\r
bd6b9799 44GCC_ASM_EXPORT (ArmWriteVBar)\r
45GCC_ASM_EXPORT (ArmEnableVFP)\r
46GCC_ASM_EXPORT (ArmCallWFI)\r
47GCC_ASM_EXPORT (ArmReadCbar)\r
bd6b9799 48GCC_ASM_EXPORT (ArmReadMpidr)\r
49GCC_ASM_EXPORT (ArmReadTpidrurw)\r
50GCC_ASM_EXPORT (ArmWriteTpidrurw)\r
51GCC_ASM_EXPORT (ArmIsArchTimerImplemented)\r
52GCC_ASM_EXPORT (ArmReadIdPfr1)\r
63dbd629 53GCC_ASM_EXPORT (ArmReadIdMmfr0)\r
bd6b9799 54\r
55.set DC_ON, (0x1<<2)\r
56.set IC_ON, (0x1<<12)\r
57.set CTRL_M_BIT, (1 << 0)\r
58.set CTRL_C_BIT, (1 << 2)\r
59.set CTRL_B_BIT, (1 << 7)\r
60.set CTRL_I_BIT, (1 << 12)\r
61\r
62\r
63ASM_PFX(ArmInvalidateDataCacheEntryByMVA):\r
3402aac7 64 mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line\r
bd6b9799 65 bx lr\r
66\r
67ASM_PFX(ArmCleanDataCacheEntryByMVA):\r
3402aac7 68 mcr p15, 0, r0, c7, c10, 1 @clean single data cache line\r
bd6b9799 69 bx lr\r
70\r
71\r
72ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):\r
73 mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line\r
bd6b9799 74 bx lr\r
75\r
76\r
77ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):\r
3402aac7 78 mcr p15, 0, r0, c7, c6, 2 @ Invalidate this line\r
bd6b9799 79 bx lr\r
80\r
81\r
82ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):\r
3402aac7 83 mcr p15, 0, r0, c7, c14, 2 @ Clean and Invalidate this line\r
bd6b9799 84 bx lr\r
85\r
86\r
87ASM_PFX(ArmCleanDataCacheEntryBySetWay):\r
3402aac7 88 mcr p15, 0, r0, c7, c10, 2 @ Clean this line\r
bd6b9799 89 bx lr\r
90\r
91ASM_PFX(ArmInvalidateInstructionCache):\r
92 mcr p15,0,R0,c7,c5,0 @Invalidate entire instruction cache\r
93 dsb\r
94 isb\r
95 bx LR\r
96\r
97ASM_PFX(ArmEnableMmu):\r
98 mrc p15,0,R0,c1,c0,0\r
99 orr R0,R0,#1\r
100 mcr p15,0,R0,c1,c0,0\r
101 dsb\r
102 isb\r
103 bx LR\r
104\r
105\r
106ASM_PFX(ArmDisableMmu):\r
107 mrc p15,0,R0,c1,c0,0\r
108 bic R0,R0,#1\r
109 mcr p15,0,R0,c1,c0,0 @Disable MMU\r
110\r
111 mcr p15,0,R0,c8,c7,0 @Invalidate TLB\r
112 mcr p15,0,R0,c7,c5,6 @Invalidate Branch predictor array\r
113 dsb\r
114 isb\r
115 bx LR\r
116\r
117ASM_PFX(ArmDisableCachesAndMmu):\r
118 mrc p15, 0, r0, c1, c0, 0 @ Get control register\r
119 bic r0, r0, #CTRL_M_BIT @ Disable MMU\r
120 bic r0, r0, #CTRL_C_BIT @ Disable D Cache\r
121 bic r0, r0, #CTRL_I_BIT @ Disable I Cache\r
122 mcr p15, 0, r0, c1, c0, 0 @ Write control register\r
123 dsb\r
124 isb\r
125 bx LR\r
126\r
127ASM_PFX(ArmMmuEnabled):\r
128 mrc p15,0,R0,c1,c0,0\r
129 and R0,R0,#1\r
3402aac7 130 bx LR\r
bd6b9799 131\r
132ASM_PFX(ArmEnableDataCache):\r
133 ldr R1,=DC_ON\r
134 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
135 orr R0,R0,R1 @Set C bit\r
136 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
137 dsb\r
138 isb\r
139 bx LR\r
3402aac7 140\r
bd6b9799 141ASM_PFX(ArmDisableDataCache):\r
142 ldr R1,=DC_ON\r
143 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
144 bic R0,R0,R1 @Clear C bit\r
145 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
146 dsb\r
147 isb\r
148 bx LR\r
149\r
150ASM_PFX(ArmEnableInstructionCache):\r
151 ldr R1,=IC_ON\r
152 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
153 orr R0,R0,R1 @Set I bit\r
154 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
155 dsb\r
156 isb\r
157 bx LR\r
3402aac7 158\r
bd6b9799 159ASM_PFX(ArmDisableInstructionCache):\r
160 ldr R1,=IC_ON\r
161 mrc p15,0,R0,c1,c0,0 @Read control register configuration data\r
162 bic R0,R0,R1 @Clear I bit.\r
163 mcr p15,0,r0,c1,c0,0 @Write control register configuration data\r
164 dsb\r
165 isb\r
166 bx LR\r
167\r
168ASM_PFX(ArmEnableSWPInstruction):\r
169 mrc p15, 0, r0, c1, c0, 0\r
170 orr r0, r0, #0x00000400\r
171 mcr p15, 0, r0, c1, c0, 0\r
172 isb\r
173 bx LR\r
174\r
175ASM_PFX(ArmEnableBranchPrediction):\r
176 mrc p15, 0, r0, c1, c0, 0\r
177 orr r0, r0, #0x00000800\r
178 mcr p15, 0, r0, c1, c0, 0\r
179 dsb\r
180 isb\r
181 bx LR\r
182\r
183ASM_PFX(ArmDisableBranchPrediction):\r
184 mrc p15, 0, r0, c1, c0, 0\r
185 bic r0, r0, #0x00000800\r
186 mcr p15, 0, r0, c1, c0, 0\r
187 dsb\r
188 isb\r
189 bx LR\r
190\r
191ASM_PFX(ArmSetLowVectors):\r
192 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
193 bic r0, r0, #0x00002000 @ clear V bit\r
194 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
195 isb\r
196 bx LR\r
197\r
198ASM_PFX(ArmSetHighVectors):\r
199 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
c6ba1c12 200 orr r0, r0, #0x00002000 @ Set V bit\r
bd6b9799 201 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
202 isb\r
203 bx LR\r
204\r
205ASM_PFX(ArmV7AllDataCachesOperation):\r
206 stmfd SP!,{r4-r12, LR}\r
207 mov R1, R0 @ Save Function call in R1\r
208 mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR\r
209 ands R3, R6, #0x7000000 @ Mask out all but Level of Coherency (LoC)\r
210 mov R3, R3, LSR #23 @ Cache level value (naturally aligned)\r
211 beq L_Finished\r
212 mov R10, #0\r
213\r
3402aac7 214Loop1:\r
bd6b9799 215 add R2, R10, R10, LSR #1 @ Work out 3xcachelevel\r
216 mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level\r
217 and R12, R12, #7 @ get those 3 bits alone\r
218 cmp R12, #2\r
219 blt L_Skip @ no cache or only instruction cache at this level\r
220 mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction\r
3402aac7 221 isb @ isb to sync the change to the CacheSizeID reg\r
bd6b9799 222 mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR)\r
223 and R2, R12, #0x7 @ extract the line length field\r
224 add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes)\r
225@ ldr R4, =0x3FF\r
226 mov R4, #0x400\r
227 sub R4, R4, #1\r
228 ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned)\r
229 clz R5, R4 @ R5 is the bit position of the way size increment\r
230@ ldr R7, =0x00007FFF\r
231 mov R7, #0x00008000\r
232 sub R7, R7, #1\r
233 ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned)\r
234\r
3402aac7 235Loop2:\r
bd6b9799 236 mov R9, R4 @ R9 working copy of the max way size (right aligned)\r
237\r
3402aac7 238Loop3:\r
bd6b9799 239 orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11\r
240 orr R0, R0, R7, LSL R2 @ factor in the index number\r
241\r
242 blx R1\r
243\r
244 subs R9, R9, #1 @ decrement the way number\r
245 bge Loop3\r
246 subs R7, R7, #1 @ decrement the index\r
247 bge Loop2\r
3402aac7 248L_Skip:\r
bd6b9799 249 add R10, R10, #2 @ increment the cache number\r
250 cmp R3, R10\r
251 bgt Loop1\r
3402aac7 252\r
bd6b9799 253L_Finished:\r
254 dsb\r
255 ldmfd SP!, {r4-r12, lr}\r
256 bx LR\r
257\r
258ASM_PFX(ArmDataMemoryBarrier):\r
259 dmb\r
260 bx LR\r
3402aac7 261\r
cf93a378 262ASM_PFX(ArmDataSynchronizationBarrier):\r
bd6b9799 263 dsb\r
264 bx LR\r
3402aac7 265\r
bd6b9799 266ASM_PFX(ArmInstructionSynchronizationBarrier):\r
267 isb\r
268 bx LR\r
269\r
836c3500 270ASM_PFX(ArmReadVBar):\r
271 # Set the Address of the Vector Table in the VBAR register\r
272 mrc p15, 0, r0, c12, c0, 0\r
273 bx lr\r
274\r
bd6b9799 275ASM_PFX(ArmWriteVBar):\r
276 # Set the Address of the Vector Table in the VBAR register\r
3402aac7 277 mcr p15, 0, r0, c12, c0, 0\r
bd6b9799 278 # Ensure the SCTLR.V bit is clear\r
279 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR into R0 (Read control register configuration data)\r
280 bic r0, r0, #0x00002000 @ clear V bit\r
281 mcr p15, 0, r0, c1, c0, 0 @ Write R0 into SCTLR (Write control register configuration data)\r
282 isb\r
283 bx lr\r
284\r
285ASM_PFX(ArmEnableVFP):\r
286 # Read CPACR (Coprocessor Access Control Register)\r
287 mrc p15, 0, r0, c1, c0, 2\r
288 # Enable VPF access (Full Access to CP10, CP11) (V* instructions)\r
289 orr r0, r0, #0x00f00000\r
290 # Write back CPACR (Coprocessor Access Control Register)\r
291 mcr p15, 0, r0, c1, c0, 2\r
18029bb9 292 isb\r
bd6b9799 293 # Set EN bit in FPEXC. The Advanced SIMD and VFP extensions are enabled and operate normally.\r
294 mov r0, #0x40000000\r
295 mcr p10,#0x7,r0,c8,c0,#0\r
296 bx lr\r
297\r
298ASM_PFX(ArmCallWFI):\r
299 wfi\r
300 bx lr\r
301\r
302#Note: Return 0 in Uniprocessor implementation\r
303ASM_PFX(ArmReadCbar):\r
304 mrc p15, 4, r0, c15, c0, 0 @ Read Configuration Base Address Register\r
305 bx lr\r
306\r
bd6b9799 307ASM_PFX(ArmReadMpidr):\r
308 mrc p15, 0, r0, c0, c0, 5 @ read MPIDR\r
309 bx lr\r
3402aac7 310\r
bd6b9799 311ASM_PFX(ArmReadTpidrurw):\r
312 mrc p15, 0, r0, c13, c0, 2 @ read TPIDRURW\r
313 bx lr\r
314\r
315ASM_PFX(ArmWriteTpidrurw):\r
316 mcr p15, 0, r0, c13, c0, 2 @ write TPIDRURW\r
317 bx lr\r
318\r
319ASM_PFX(ArmIsArchTimerImplemented):\r
320 mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1\r
321 and r0, r0, #0x000F0000\r
322 bx lr\r
323\r
324ASM_PFX(ArmReadIdPfr1):\r
325 mrc p15, 0, r0, c0, c1, 1 @ Read ID_PFR1 Register\r
326 bx lr\r
327\r
63dbd629
AB
328ASM_PFX(ArmReadIdMmfr0):\r
329 mrc p15, 0, r0, c0, c1, 4 @ Read ID_MMFR0 Register\r
330 bx lr\r
331\r
bd6b9799 332ASM_FUNCTION_REMOVE_IF_UNREFERENCED\r