]>
Commit | Line | Data |
---|---|---|
1da177e4 | 1 | /* |
1da177e4 LT |
2 | * Useful bootup functions, which are more easily done in asm than C. |
3 | * | |
4 | * NOTE: Be very very careful about the registers you use here. | |
5 | * We don't follow any ABI calling convention among the | |
6 | * assembler functions that call each other, especially early | |
7 | * in the initialization. Please preserve at least r3 and r4 | |
8 | * for these early functions, as they often contain information | |
9 | * passed from boot roms into the C decompress function. | |
10 | * | |
11 | * Author: Tom Rini | |
12 | * trini@mvista.com | |
13 | * Derived from arch/ppc/boot/prep/head.S (Cort Dougan, many others). | |
14 | * | |
15 | * 2001-2004 (c) MontaVista, Software, Inc. This file is licensed under | |
16 | * the terms of the GNU General Public License version 2. This program | |
17 | * is licensed "as is" without any warranty of any kind, whether express | |
18 | * or implied. | |
19 | */ | |
20 | ||
21 | #include <asm/processor.h> | |
22 | #include <asm/cache.h> | |
23 | #include <asm/ppc_asm.h> | |
24 | ||
25 | ||
26 | .text | |
27 | ||
28 | #ifdef CONFIG_6xx | |
29 | .globl disable_6xx_mmu | |
30 | disable_6xx_mmu: | |
31 | /* Establish default MSR value, exception prefix 0xFFF. | |
32 | * If necessary, this function must fix up the LR if we | |
33 | * return to a different address space once the MMU is | |
34 | * disabled. | |
35 | */ | |
36 | li r8,MSR_IP|MSR_FP | |
37 | mtmsr r8 | |
38 | isync | |
39 | ||
40 | /* Test for a 601 */ | |
41 | mfpvr r10 | |
42 | srwi r10,r10,16 | |
43 | cmpwi 0,r10,1 /* 601 ? */ | |
44 | beq .clearbats_601 | |
45 | ||
46 | /* Clear BATs */ | |
47 | li r8,0 | |
48 | mtspr SPRN_DBAT0U,r8 | |
49 | mtspr SPRN_DBAT0L,r8 | |
50 | mtspr SPRN_DBAT1U,r8 | |
51 | mtspr SPRN_DBAT1L,r8 | |
52 | mtspr SPRN_DBAT2U,r8 | |
53 | mtspr SPRN_DBAT2L,r8 | |
54 | mtspr SPRN_DBAT3U,r8 | |
55 | mtspr SPRN_DBAT3L,r8 | |
56 | .clearbats_601: | |
57 | mtspr SPRN_IBAT0U,r8 | |
58 | mtspr SPRN_IBAT0L,r8 | |
59 | mtspr SPRN_IBAT1U,r8 | |
60 | mtspr SPRN_IBAT1L,r8 | |
61 | mtspr SPRN_IBAT2U,r8 | |
62 | mtspr SPRN_IBAT2L,r8 | |
63 | mtspr SPRN_IBAT3U,r8 | |
64 | mtspr SPRN_IBAT3L,r8 | |
65 | isync | |
66 | sync | |
67 | sync | |
68 | ||
69 | /* Set segment registers */ | |
70 | li r8,16 /* load up segment register values */ | |
71 | mtctr r8 /* for context 0 */ | |
72 | lis r8,0x2000 /* Ku = 1, VSID = 0 */ | |
73 | li r10,0 | |
74 | 3: mtsrin r8,r10 | |
75 | addi r8,r8,0x111 /* increment VSID */ | |
76 | addis r10,r10,0x1000 /* address of next segment */ | |
77 | bdnz 3b | |
78 | blr | |
79 | ||
80 | .globl disable_6xx_l1cache | |
81 | disable_6xx_l1cache: | |
82 | /* Enable, invalidate and then disable the L1 icache/dcache. */ | |
83 | li r8,0 | |
84 | ori r8,r8,(HID0_ICE|HID0_DCE|HID0_ICFI|HID0_DCI) | |
85 | mfspr r11,SPRN_HID0 | |
86 | or r11,r11,r8 | |
87 | andc r10,r11,r8 | |
88 | isync | |
89 | mtspr SPRN_HID0,r8 | |
90 | sync | |
91 | isync | |
92 | mtspr SPRN_HID0,r10 | |
93 | sync | |
94 | isync | |
95 | blr | |
96 | #endif | |
97 | ||
98 | .globl _setup_L2CR | |
99 | _setup_L2CR: | |
100 | /* | |
101 | * We should be skipping this section on CPUs where this results in an | |
102 | * illegal instruction. If not, please send trini@kernel.crashing.org | |
103 | * the PVR of your CPU. | |
104 | */ | |
105 | /* Invalidate/disable L2 cache */ | |
106 | sync | |
107 | isync | |
108 | mfspr r8,SPRN_L2CR | |
109 | rlwinm r8,r8,0,1,31 | |
110 | oris r8,r8,L2CR_L2I@h | |
111 | sync | |
112 | isync | |
113 | mtspr SPRN_L2CR,r8 | |
114 | sync | |
115 | isync | |
116 | ||
117 | /* Wait for the invalidation to complete */ | |
118 | mfspr r8,SPRN_PVR | |
119 | srwi r8,r8,16 | |
120 | cmplwi cr0,r8,0x8000 /* 7450 */ | |
121 | cmplwi cr1,r8,0x8001 /* 7455 */ | |
122 | cmplwi cr2,r8,0x8002 /* 7457 */ | |
123 | cror 4*cr0+eq,4*cr0+eq,4*cr1+eq /* Now test if any are true. */ | |
124 | cror 4*cr0+eq,4*cr0+eq,4*cr2+eq | |
125 | bne 2f | |
126 | ||
127 | 1: mfspr r8,SPRN_L2CR /* On 745x, poll L2I bit (bit 10) */ | |
128 | rlwinm. r9,r8,0,10,10 | |
129 | bne 1b | |
130 | b 3f | |
131 | ||
132 | 2: mfspr r8,SPRN_L2CR /* On 75x & 74[01]0, poll L2IP bit (bit 31) */ | |
133 | rlwinm. r9,r8,0,31,31 | |
134 | bne 2b | |
135 | ||
136 | 3: rlwinm r8,r8,0,11,9 /* Turn off L2I bit */ | |
137 | sync | |
138 | isync | |
139 | mtspr SPRN_L2CR,r8 | |
140 | sync | |
141 | isync | |
142 | blr | |
143 | ||
144 | .globl _setup_L3CR | |
145 | _setup_L3CR: | |
146 | /* Invalidate/disable L3 cache */ | |
147 | sync | |
148 | isync | |
149 | mfspr r8,SPRN_L3CR | |
150 | rlwinm r8,r8,0,1,31 | |
151 | ori r8,r8,L3CR_L3I@l | |
152 | sync | |
153 | isync | |
154 | mtspr SPRN_L3CR,r8 | |
155 | sync | |
156 | isync | |
157 | ||
158 | /* Wait for the invalidation to complete */ | |
159 | 1: mfspr r8,SPRN_L3CR | |
160 | rlwinm. r9,r8,0,21,21 | |
161 | bne 1b | |
162 | ||
163 | rlwinm r8,r8,0,22,20 /* Turn off L3I bit */ | |
164 | sync | |
165 | isync | |
166 | mtspr SPRN_L3CR,r8 | |
167 | sync | |
168 | isync | |
169 | blr | |
170 | ||
171 | ||
172 | /* udelay (on non-601 processors) needs to know the period of the | |
173 | * timebase in nanoseconds. This used to be hardcoded to be 60ns | |
174 | * (period of 66MHz/4). Now a variable is used that is initialized to | |
175 | * 60 for backward compatibility, but it can be overridden as necessary | |
176 | * with code something like this: | |
177 | * extern unsigned long timebase_period_ns; | |
178 | * timebase_period_ns = 1000000000 / bd->bi_tbfreq; | |
179 | */ | |
180 | .data | |
181 | .globl timebase_period_ns | |
182 | timebase_period_ns: | |
183 | .long 60 | |
184 | ||
185 | .text | |
186 | /* | |
187 | * Delay for a number of microseconds | |
188 | */ | |
189 | .globl udelay | |
190 | udelay: | |
191 | mfspr r4,SPRN_PVR | |
192 | srwi r4,r4,16 | |
193 | cmpwi 0,r4,1 /* 601 ? */ | |
194 | bne .udelay_not_601 | |
195 | 00: li r0,86 /* Instructions / microsecond? */ | |
196 | mtctr r0 | |
197 | 10: addi r0,r0,0 /* NOP */ | |
198 | bdnz 10b | |
199 | subic. r3,r3,1 | |
200 | bne 00b | |
201 | blr | |
202 | ||
203 | .udelay_not_601: | |
204 | mulli r4,r3,1000 /* nanoseconds */ | |
205 | /* Change r4 to be the number of ticks using: | |
206 | * (nanoseconds + (timebase_period_ns - 1 )) / timebase_period_ns | |
207 | * timebase_period_ns defaults to 60 (16.6MHz) */ | |
208 | lis r5,timebase_period_ns@ha | |
209 | lwz r5,timebase_period_ns@l(r5) | |
210 | add r4,r4,r5 | |
211 | addi r4,r4,-1 | |
212 | divw r4,r4,r5 /* BUS ticks */ | |
213 | 1: mftbu r5 | |
214 | mftb r6 | |
215 | mftbu r7 | |
216 | cmpw 0,r5,r7 | |
217 | bne 1b /* Get [synced] base time */ | |
218 | addc r9,r6,r4 /* Compute end time */ | |
219 | addze r8,r5 | |
220 | 2: mftbu r5 | |
221 | cmpw 0,r5,r8 | |
222 | blt 2b | |
223 | bgt 3f | |
224 | mftb r6 | |
225 | cmpw 0,r6,r9 | |
226 | blt 2b | |
227 | 3: blr | |
228 | ||
229 | .section ".relocate_code","xa" | |
230 | /* | |
231 | * Flush and enable instruction cache | |
232 | * First, flush the data cache in case it was enabled and may be | |
233 | * holding instructions for copy back. | |
234 | */ | |
aed9c6cc PJ |
235 | .globl flush_instruction_cache |
236 | flush_instruction_cache: | |
1da177e4 LT |
237 | mflr r6 |
238 | bl flush_data_cache | |
239 | ||
240 | #ifdef CONFIG_8xx | |
241 | lis r3, IDC_INVALL@h | |
242 | mtspr SPRN_IC_CST, r3 | |
243 | lis r3, IDC_ENABLE@h | |
244 | mtspr SPRN_IC_CST, r3 | |
245 | lis r3, IDC_DISABLE@h | |
246 | mtspr SPRN_DC_CST, r3 | |
247 | #elif CONFIG_4xx | |
248 | lis r3,start@h # r9 = &_start | |
249 | lis r4,_etext@ha | |
250 | addi r4,r4,_etext@l # r8 = &_etext | |
251 | 1: dcbf r0,r3 # Flush the data cache | |
252 | icbi r0,r3 # Invalidate the instruction cache | |
253 | addi r3,r3,0x10 # Increment by one cache line | |
99cc2192 | 254 | cmplw cr0,r3,r4 # Are we at the end yet? |
1da177e4 LT |
255 | blt 1b # No, keep flushing and invalidating |
256 | #else | |
257 | /* Enable, invalidate and then disable the L1 icache/dcache. */ | |
258 | li r3,0 | |
259 | ori r3,r3,(HID0_ICE|HID0_DCE|HID0_ICFI|HID0_DCI) | |
260 | mfspr r4,SPRN_HID0 | |
261 | or r5,r4,r3 | |
262 | isync | |
263 | mtspr SPRN_HID0,r5 | |
264 | sync | |
265 | isync | |
266 | ori r5,r4,HID0_ICE /* Enable cache */ | |
267 | mtspr SPRN_HID0,r5 | |
268 | sync | |
269 | isync | |
270 | #endif | |
271 | mtlr r6 | |
272 | blr | |
273 | ||
274 | #define NUM_CACHE_LINES 128*8 | |
275 | #define cache_flush_buffer 0x1000 | |
276 | ||
277 | /* | |
278 | * Flush data cache | |
279 | * Do this by just reading lots of stuff into the cache. | |
280 | */ | |
aed9c6cc PJ |
281 | .globl flush_data_cache |
282 | flush_data_cache: | |
1da177e4 LT |
283 | lis r3,cache_flush_buffer@h |
284 | ori r3,r3,cache_flush_buffer@l | |
285 | li r4,NUM_CACHE_LINES | |
286 | mtctr r4 | |
287 | 00: lwz r4,0(r3) | |
288 | addi r3,r3,L1_CACHE_BYTES /* Next line, please */ | |
289 | bdnz 00b | |
290 | 10: blr | |
291 | ||
292 | .previous | |
293 |