]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - arch/ppc/platforms/pmac_cache.S
Linux-2.6.12-rc2
[mirror_ubuntu-bionic-kernel.git] / arch / ppc / platforms / pmac_cache.S
1 /*
2 * This file contains low-level cache management functions
3 * used for sleep and CPU speed changes on Apple machines.
4 * (In fact the only thing that is Apple-specific is that we assume
5 * that we can read from ROM at physical address 0xfff00000.)
6 *
7 * Copyright (C) 2004 Paul Mackerras (paulus@samba.org) and
8 * Benjamin Herrenschmidt (benh@kernel.crashing.org)
9 *
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License
12 * as published by the Free Software Foundation; either version
13 * 2 of the License, or (at your option) any later version.
14 *
15 */
16
17 #include <linux/config.h>
18 #include <asm/processor.h>
19 #include <asm/ppc_asm.h>
20 #include <asm/cputable.h>
21
22 /*
23 * Flush and disable all data caches (dL1, L2, L3). This is used
24 * when going to sleep, when doing a PMU based cpufreq transition,
25 * or when "offlining" a CPU on SMP machines. This code is over
26 * paranoid, but I've had enough issues with various CPU revs and
27 * bugs that I decided it was worth beeing over cautious
28 */
29
30 _GLOBAL(flush_disable_caches)
31 BEGIN_FTR_SECTION
32 b flush_disable_745x
33 END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450)
34 BEGIN_FTR_SECTION
35 b flush_disable_75x
36 END_FTR_SECTION_IFSET(CPU_FTR_L2CR)
37 b __flush_disable_L1
38
39 /* This is the code for G3 and 74[01]0 */
40 flush_disable_75x:
41 mflr r10
42
43 /* Turn off EE and DR in MSR */
44 mfmsr r11
45 rlwinm r0,r11,0,~MSR_EE
46 rlwinm r0,r0,0,~MSR_DR
47 sync
48 mtmsr r0
49 isync
50
51 /* Stop DST streams */
52 BEGIN_FTR_SECTION
53 DSSALL
54 sync
55 END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
56
57 /* Stop DPM */
58 mfspr r8,SPRN_HID0 /* Save SPRN_HID0 in r8 */
59 rlwinm r4,r8,0,12,10 /* Turn off HID0[DPM] */
60 sync
61 mtspr SPRN_HID0,r4 /* Disable DPM */
62 sync
63
64 /* disp-flush L1 */
65 li r4,0x4000
66 mtctr r4
67 lis r4,0xfff0
68 1: lwzx r0,r0,r4
69 addi r4,r4,32
70 bdnz 1b
71 sync
72 isync
73
74 /* disable / invalidate / enable L1 data */
75 mfspr r3,SPRN_HID0
76 rlwinm r0,r0,0,~HID0_DCE
77 mtspr SPRN_HID0,r3
78 sync
79 isync
80 ori r3,r3,HID0_DCE|HID0_DCI
81 sync
82 isync
83 mtspr SPRN_HID0,r3
84 xori r3,r3,HID0_DCI
85 mtspr SPRN_HID0,r3
86 sync
87
88 /* Get the current enable bit of the L2CR into r4 */
89 mfspr r5,SPRN_L2CR
90 /* Set to data-only (pre-745x bit) */
91 oris r3,r5,L2CR_L2DO@h
92 b 2f
93 /* When disabling L2, code must be in L1 */
94 .balign 32
95 1: mtspr SPRN_L2CR,r3
96 3: sync
97 isync
98 b 1f
99 2: b 3f
100 3: sync
101 isync
102 b 1b
103 1: /* disp-flush L2. The interesting thing here is that the L2 can be
104 * up to 2Mb ... so using the ROM, we'll end up wrapping back to memory
105 * but that is probbaly fine. We disp-flush over 4Mb to be safe
106 */
107 lis r4,2
108 mtctr r4
109 lis r4,0xfff0
110 1: lwzx r0,r0,r4
111 addi r4,r4,32
112 bdnz 1b
113 sync
114 isync
115 /* now disable L2 */
116 rlwinm r5,r5,0,~L2CR_L2E
117 b 2f
118 /* When disabling L2, code must be in L1 */
119 .balign 32
120 1: mtspr SPRN_L2CR,r5
121 3: sync
122 isync
123 b 1f
124 2: b 3f
125 3: sync
126 isync
127 b 1b
128 1: sync
129 isync
130 /* Invalidate L2. This is pre-745x, we clear the L2I bit ourselves */
131 oris r4,r5,L2CR_L2I@h
132 mtspr SPRN_L2CR,r4
133 sync
134 isync
135 xoris r4,r4,L2CR_L2I@h
136 sync
137 mtspr SPRN_L2CR,r4
138 sync
139
140 /* now disable the L1 data cache */
141 mfspr r0,SPRN_HID0
142 rlwinm r0,r0,0,~HID0_DCE
143 mtspr SPRN_HID0,r0
144 sync
145 isync
146
147 /* Restore HID0[DPM] to whatever it was before */
148 sync
149 mtspr SPRN_HID0,r8
150 sync
151
152 /* restore DR and EE */
153 sync
154 mtmsr r11
155 isync
156
157 mtlr r10
158 blr
159
160 /* This code is for 745x processors */
161 flush_disable_745x:
162 /* Turn off EE and DR in MSR */
163 mfmsr r11
164 rlwinm r0,r11,0,~MSR_EE
165 rlwinm r0,r0,0,~MSR_DR
166 sync
167 mtmsr r0
168 isync
169
170 /* Stop prefetch streams */
171 DSSALL
172 sync
173
174 /* Disable L2 prefetching */
175 mfspr r0,SPRN_MSSCR0
176 rlwinm r0,r0,0,0,29
177 mtspr SPRN_MSSCR0,r0
178 sync
179 isync
180 lis r4,0
181 dcbf 0,r4
182 dcbf 0,r4
183 dcbf 0,r4
184 dcbf 0,r4
185 dcbf 0,r4
186 dcbf 0,r4
187 dcbf 0,r4
188 dcbf 0,r4
189
190 /* Due to a bug with the HW flush on some CPU revs, we occasionally
191 * experience data corruption. I'm adding a displacement flush along
192 * with a dcbf loop over a few Mb to "help". The problem isn't totally
193 * fixed by this in theory, but at least, in practice, I couldn't reproduce
194 * it even with a big hammer...
195 */
196
197 lis r4,0x0002
198 mtctr r4
199 li r4,0
200 1:
201 lwzx r0,r0,r4
202 addi r4,r4,32 /* Go to start of next cache line */
203 bdnz 1b
204 isync
205
206 /* Now, flush the first 4MB of memory */
207 lis r4,0x0002
208 mtctr r4
209 li r4,0
210 sync
211 1:
212 dcbf 0,r4
213 addi r4,r4,32 /* Go to start of next cache line */
214 bdnz 1b
215
216 /* Flush and disable the L1 data cache */
217 mfspr r6,SPRN_LDSTCR
218 lis r3,0xfff0 /* read from ROM for displacement flush */
219 li r4,0xfe /* start with only way 0 unlocked */
220 li r5,128 /* 128 lines in each way */
221 1: mtctr r5
222 rlwimi r6,r4,0,24,31
223 mtspr SPRN_LDSTCR,r6
224 sync
225 isync
226 2: lwz r0,0(r3) /* touch each cache line */
227 addi r3,r3,32
228 bdnz 2b
229 rlwinm r4,r4,1,24,30 /* move on to the next way */
230 ori r4,r4,1
231 cmpwi r4,0xff /* all done? */
232 bne 1b
233 /* now unlock the L1 data cache */
234 li r4,0
235 rlwimi r6,r4,0,24,31
236 sync
237 mtspr SPRN_LDSTCR,r6
238 sync
239 isync
240
241 /* Flush the L2 cache using the hardware assist */
242 mfspr r3,SPRN_L2CR
243 cmpwi r3,0 /* check if it is enabled first */
244 bge 4f
245 oris r0,r3,(L2CR_L2IO_745x|L2CR_L2DO_745x)@h
246 b 2f
247 /* When disabling/locking L2, code must be in L1 */
248 .balign 32
249 1: mtspr SPRN_L2CR,r0 /* lock the L2 cache */
250 3: sync
251 isync
252 b 1f
253 2: b 3f
254 3: sync
255 isync
256 b 1b
257 1: sync
258 isync
259 ori r0,r3,L2CR_L2HWF_745x
260 sync
261 mtspr SPRN_L2CR,r0 /* set the hardware flush bit */
262 3: mfspr r0,SPRN_L2CR /* wait for it to go to 0 */
263 andi. r0,r0,L2CR_L2HWF_745x
264 bne 3b
265 sync
266 rlwinm r3,r3,0,~L2CR_L2E
267 b 2f
268 /* When disabling L2, code must be in L1 */
269 .balign 32
270 1: mtspr SPRN_L2CR,r3 /* disable the L2 cache */
271 3: sync
272 isync
273 b 1f
274 2: b 3f
275 3: sync
276 isync
277 b 1b
278 1: sync
279 isync
280 oris r4,r3,L2CR_L2I@h
281 mtspr SPRN_L2CR,r4
282 sync
283 isync
284 1: mfspr r4,SPRN_L2CR
285 andis. r0,r4,L2CR_L2I@h
286 bne 1b
287 sync
288
289 BEGIN_FTR_SECTION
290 /* Flush the L3 cache using the hardware assist */
291 4: mfspr r3,SPRN_L3CR
292 cmpwi r3,0 /* check if it is enabled */
293 bge 6f
294 oris r0,r3,L3CR_L3IO@h
295 ori r0,r0,L3CR_L3DO
296 sync
297 mtspr SPRN_L3CR,r0 /* lock the L3 cache */
298 sync
299 isync
300 ori r0,r0,L3CR_L3HWF
301 sync
302 mtspr SPRN_L3CR,r0 /* set the hardware flush bit */
303 5: mfspr r0,SPRN_L3CR /* wait for it to go to zero */
304 andi. r0,r0,L3CR_L3HWF
305 bne 5b
306 rlwinm r3,r3,0,~L3CR_L3E
307 sync
308 mtspr SPRN_L3CR,r3 /* disable the L3 cache */
309 sync
310 ori r4,r3,L3CR_L3I
311 mtspr SPRN_L3CR,r4
312 1: mfspr r4,SPRN_L3CR
313 andi. r0,r4,L3CR_L3I
314 bne 1b
315 sync
316 END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
317
318 6: mfspr r0,SPRN_HID0 /* now disable the L1 data cache */
319 rlwinm r0,r0,0,~HID0_DCE
320 mtspr SPRN_HID0,r0
321 sync
322 isync
323 mtmsr r11 /* restore DR and EE */
324 isync
325 blr