]>
Commit | Line | Data |
---|---|---|
2ef2b01e A |
1 | #------------------------------------------------------------------------------ |
2 | # | |
3 | # Copyright (c) 2008-2009 Apple Inc. All rights reserved. | |
4 | # | |
5 | # All rights reserved. This program and the accompanying materials | |
6 | # are licensed and made available under the terms and conditions of the BSD License | |
7 | # which accompanies this distribution. The full text of the license may be found at | |
8 | # http://opensource.org/licenses/bsd-license.php | |
9 | # | |
10 | # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, | |
11 | # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. | |
12 | # | |
13 | #------------------------------------------------------------------------------ | |
14 | ||
2ef2b01e A |
15 | .globl ASM_PFX(ArmInvalidateInstructionCache) |
16 | .globl ASM_PFX(ArmInvalidateDataCacheEntryByMVA) | |
17 | .globl ASM_PFX(ArmCleanDataCacheEntryByMVA) | |
18 | .globl ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA) | |
19 | .globl ASM_PFX(ArmInvalidateDataCacheEntryBySetWay) | |
20 | .globl ASM_PFX(ArmCleanDataCacheEntryBySetWay) | |
21 | .globl ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay) | |
22 | .globl ASM_PFX(ArmDrainWriteBuffer) | |
23 | .globl ASM_PFX(ArmEnableMmu) | |
24 | .globl ASM_PFX(ArmDisableMmu) | |
c2b5ca8b | 25 | .globl ASM_PFX(ArmMmuEnabled) |
2ef2b01e A |
26 | .globl ASM_PFX(ArmEnableDataCache) |
27 | .globl ASM_PFX(ArmDisableDataCache) | |
28 | .globl ASM_PFX(ArmEnableInstructionCache) | |
29 | .globl ASM_PFX(ArmDisableInstructionCache) | |
2ef2b01e A |
30 | .globl ASM_PFX(ArmEnableBranchPrediction) |
31 | .globl ASM_PFX(ArmDisableBranchPrediction) | |
98bc0c8c | 32 | .globl ASM_PFX(ArmV7AllDataCachesOperation) |
026c3d34 | 33 | .globl ASM_PFX(ArmDataMemoryBarrier) |
34 | .globl ASM_PFX(ArmDataSyncronizationBarrier) | |
35 | .globl ASM_PFX(ArmInstructionSynchronizationBarrier) | |
36 | ||
548af3e7 | 37 | .text |
38 | .align 2 | |
2ef2b01e A |
39 | |
40 | .set DC_ON, (0x1<<2) | |
41 | .set IC_ON, (0x1<<12) | |
98bc0c8c | 42 | |
2ef2b01e A |
43 | |
44 | ASM_PFX(ArmInvalidateDataCacheEntryByMVA): | |
98bc0c8c | 45 | mcr p15, 0, r0, c7, c6, 1 @invalidate single data cache line |
46 | dsb | |
47 | isb | |
2ef2b01e A |
48 | bx lr |
49 | ||
50 | ||
51 | ASM_PFX(ArmCleanDataCacheEntryByMVA): | |
52 | mcr p15, 0, r0, c7, c10, 1 @clean single data cache line | |
98bc0c8c | 53 | dsb |
54 | isb | |
2ef2b01e A |
55 | bx lr |
56 | ||
57 | ||
58 | ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA): | |
59 | mcr p15, 0, r0, c7, c14, 1 @clean and invalidate single data cache line | |
98bc0c8c | 60 | dsb |
61 | isb | |
2ef2b01e A |
62 | bx lr |
63 | ||
64 | ||
65 | ASM_PFX(ArmInvalidateDataCacheEntryBySetWay): | |
66 | mcr p15, 0, r0, c7, c6, 2 @ Invalidate this line | |
98bc0c8c | 67 | dsb |
68 | isb | |
2ef2b01e A |
69 | bx lr |
70 | ||
71 | ||
72 | ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay): | |
73 | mcr p15, 0, r0, c7, c14, 2 @ Clean and Invalidate this line | |
98bc0c8c | 74 | dsb |
75 | isb | |
2ef2b01e A |
76 | bx lr |
77 | ||
78 | ||
79 | ASM_PFX(ArmCleanDataCacheEntryBySetWay): | |
80 | mcr p15, 0, r0, c7, c10, 2 @ Clean this line | |
98bc0c8c | 81 | dsb |
82 | isb | |
2ef2b01e A |
83 | bx lr |
84 | ||
85 | ||
86 | ASM_PFX(ArmDrainWriteBuffer): | |
87 | mcr p15, 0, r0, c7, c10, 4 @ Drain write buffer for sync | |
98bc0c8c | 88 | dsb |
89 | isb | |
2ef2b01e A |
90 | bx lr |
91 | ||
92 | ||
93 | ASM_PFX(ArmInvalidateInstructionCache): | |
94 | mov R0,#0 | |
95 | mcr p15,0,R0,c7,c5,0 @Invalidate entire instruction cache | |
96 | mov R0,#0 | |
98bc0c8c | 97 | dsb |
98 | isb | |
2ef2b01e A |
99 | bx LR |
100 | ||
101 | ASM_PFX(ArmEnableMmu): | |
102 | mrc p15,0,R0,c1,c0,0 | |
103 | orr R0,R0,#1 | |
104 | mcr p15,0,R0,c1,c0,0 | |
548af3e7 | 105 | dsb |
106 | isb | |
2ef2b01e A |
107 | bx LR |
108 | ||
c2b5ca8b A |
109 | ASM_PFX(ArmMmuEnabled): |
110 | mrc p15,0,R0,c1,c0,0 | |
111 | and R0,R0,#1 | |
548af3e7 | 112 | isb |
c2b5ca8b A |
113 | bx LR |
114 | ||
115 | ||
2ef2b01e A |
116 | ASM_PFX(ArmDisableMmu): |
117 | mov R0,#0 | |
118 | mcr p15,0,R0,c13,c0,0 @FCSE PID register must be cleared before disabling MMU | |
119 | mrc p15,0,R0,c1,c0,0 | |
120 | bic R0,R0,#1 | |
121 | mcr p15,0,R0,c1,c0,0 @Disable MMU | |
98bc0c8c | 122 | dsb |
123 | isb | |
2ef2b01e A |
124 | bx LR |
125 | ||
126 | ASM_PFX(ArmEnableDataCache): | |
127 | ldr R1,=DC_ON | |
128 | mrc p15,0,R0,c1,c0,0 @Read control register configuration data | |
129 | orr R0,R0,R1 @Set C bit | |
130 | mcr p15,0,r0,c1,c0,0 @Write control register configuration data | |
98bc0c8c | 131 | dsb |
132 | isb | |
2ef2b01e A |
133 | bx LR |
134 | ||
135 | ASM_PFX(ArmDisableDataCache): | |
136 | ldr R1,=DC_ON | |
137 | mrc p15,0,R0,c1,c0,0 @Read control register configuration data | |
138 | bic R0,R0,R1 @Clear C bit | |
139 | mcr p15,0,r0,c1,c0,0 @Write control register configuration data | |
98bc0c8c | 140 | dsb |
141 | isb | |
2ef2b01e A |
142 | bx LR |
143 | ||
144 | ASM_PFX(ArmEnableInstructionCache): | |
145 | ldr R1,=IC_ON | |
146 | mrc p15,0,R0,c1,c0,0 @Read control register configuration data | |
147 | orr R0,R0,R1 @Set I bit | |
148 | mcr p15,0,r0,c1,c0,0 @Write control register configuration data | |
98bc0c8c | 149 | dsb |
150 | isb | |
2ef2b01e A |
151 | bx LR |
152 | ||
153 | ASM_PFX(ArmDisableInstructionCache): | |
154 | ldr R1,=IC_ON | |
155 | mrc p15,0,R0,c1,c0,0 @Read control register configuration data | |
156 | bic R0,R0,R1 @Clear I bit. | |
157 | mcr p15,0,r0,c1,c0,0 @Write control register configuration data | |
98bc0c8c | 158 | dsb |
159 | isb | |
2ef2b01e A |
160 | bx LR |
161 | ||
162 | ASM_PFX(ArmEnableBranchPrediction): | |
163 | mrc p15, 0, r0, c1, c0, 0 | |
164 | orr r0, r0, #0x00000800 | |
165 | mcr p15, 0, r0, c1, c0, 0 | |
98bc0c8c | 166 | dsb |
167 | isb | |
2ef2b01e A |
168 | bx LR |
169 | ||
170 | ASM_PFX(ArmDisableBranchPrediction): | |
171 | mrc p15, 0, r0, c1, c0, 0 | |
172 | bic r0, r0, #0x00000800 | |
173 | mcr p15, 0, r0, c1, c0, 0 | |
98bc0c8c | 174 | dsb |
175 | isb | |
2ef2b01e A |
176 | bx LR |
177 | ||
98bc0c8c | 178 | |
179 | ASM_PFX(ArmV7AllDataCachesOperation): | |
180 | stmfd SP!,{r4-r12, LR} | |
181 | mov R1, R0 @ Save Function call in R1 | |
182 | mrc p15, 1, R6, c0, c0, 1 @ Read CLIDR | |
183 | ands R3, R6, #0x7000000 @ Mask out all but Level of Coherency (LoC) | |
184 | mov R3, R3, LSR #23 @ Cache level value (naturally aligned) | |
185 | beq L_Finished | |
186 | mov R10, #0 | |
187 | ||
188 | Loop1: | |
189 | add R2, R10, R10, LSR #1 @ Work out 3xcachelevel | |
190 | mov R12, R6, LSR R2 @ bottom 3 bits are the Cache type for this level | |
191 | and R12, R12, #7 @ get those 3 bits alone | |
192 | cmp R12, #2 | |
193 | blt L_Skip @ no cache or only instruction cache at this level | |
194 | mcr p15, 2, R10, c0, c0, 0 @ write the Cache Size selection register (CSSELR) // OR in 1 for Instruction | |
548af3e7 | 195 | isb @ isb to sync the change to the CacheSizeID reg |
196 | mrc p15, 1, R12, c0, c0, 0 @ reads current Cache Size ID register (CCSIDR) | |
98bc0c8c | 197 | and R2, R12, #0x7 @ extract the line length field |
548af3e7 | 198 | add R2, R2, #4 @ add 4 for the line length offset (log2 16 bytes) |
199 | @ ldr R4, =0x3FF | |
98bc0c8c | 200 | mov R4, #0x400 |
201 | sub R4, R4, #1 | |
202 | ands R4, R4, R12, LSR #3 @ R4 is the max number on the way size (right aligned) | |
203 | clz R5, R4 @ R5 is the bit position of the way size increment | |
548af3e7 | 204 | @ ldr R7, =0x00007FFF |
98bc0c8c | 205 | mov R7, #0x00008000 |
206 | sub R7, R7, #1 | |
207 | ands R7, R7, R12, LSR #13 @ R7 is the max number of the index size (right aligned) | |
208 | ||
209 | Loop2: | |
210 | mov R9, R4 @ R9 working copy of the max way size (right aligned) | |
211 | ||
212 | Loop3: | |
213 | orr R0, R10, R9, LSL R5 @ factor in the way number and cache number into R11 | |
214 | orr R0, R0, R7, LSL R2 @ factor in the index number | |
215 | ||
216 | blx R1 | |
217 | ||
218 | subs R9, R9, #1 @ decrement the way number | |
219 | bge Loop3 | |
220 | subs R7, R7, #1 @ decrement the index | |
221 | bge Loop2 | |
222 | L_Skip: | |
223 | add R10, R10, #2 @ increment the cache number | |
224 | cmp R3, R10 | |
225 | bgt Loop1 | |
226 | ||
227 | L_Finished: | |
228 | ldmfd SP!, {r4-r12, lr} | |
229 | bx LR | |
230 | ||
026c3d34 | 231 | ASM_PFX(ArmDataMemoryBarrier): |
232 | dmb | |
233 | bx LR | |
234 | ||
235 | ASM_PFX(ArmDataSyncronizationBarrier): | |
236 | dsb | |
237 | bx LR | |
238 | ||
239 | ASM_PFX(ArmInstructionSynchronizationBarrier): | |
240 | isb | |
241 | bx LR | |
242 | ||
98bc0c8c | 243 | |
2ef2b01e | 244 | ASM_FUNCTION_REMOVE_IF_UNREFERENCED |