]>
Commit | Line | Data |
---|---|---|
2874c5fd | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
9d02a428 JB |
2 | /* |
3 | * OpenRISC head.S | |
4 | * | |
5 | * Linux architectural port borrowing liberally from similar works of | |
6 | * others. All original copyrights apply as per the original source | |
7 | * declaration. | |
8 | * | |
9 | * Modifications for the OpenRISC architecture: | |
10 | * Copyright (C) 2003 Matjaz Breskvar <phoenix@bsemi.com> | |
11 | * Copyright (C) 2010-2011 Jonas Bonn <jonas@southpole.se> | |
9d02a428 JB |
12 | */ |
13 | ||
14 | #include <linux/linkage.h> | |
15 | #include <linux/threads.h> | |
16 | #include <linux/errno.h> | |
17 | #include <linux/init.h> | |
160d8378 | 18 | #include <linux/serial_reg.h> |
9d02a428 JB |
19 | #include <asm/processor.h> |
20 | #include <asm/page.h> | |
21 | #include <asm/mmu.h> | |
22 | #include <asm/pgtable.h> | |
c2dc7243 | 23 | #include <asm/thread_info.h> |
9d02a428 JB |
24 | #include <asm/cache.h> |
25 | #include <asm/spr_defs.h> | |
26 | #include <asm/asm-offsets.h> | |
dec83018 | 27 | #include <linux/of_fdt.h> |
9d02a428 JB |
28 | |
29 | #define tophys(rd,rs) \ | |
30 | l.movhi rd,hi(-KERNELBASE) ;\ | |
31 | l.add rd,rd,rs | |
32 | ||
33 | #define CLEAR_GPR(gpr) \ | |
a4d44266 | 34 | l.movhi gpr,0x0 |
9d02a428 JB |
35 | |
36 | #define LOAD_SYMBOL_2_GPR(gpr,symbol) \ | |
37 | l.movhi gpr,hi(symbol) ;\ | |
38 | l.ori gpr,gpr,lo(symbol) | |
39 | ||
40 | ||
41 | #define UART_BASE_ADD 0x90000000 | |
42 | ||
43 | #define EXCEPTION_SR (SPR_SR_DME | SPR_SR_IME | SPR_SR_DCE | SPR_SR_ICE | SPR_SR_SM) | |
44 | #define SYSCALL_SR (SPR_SR_DME | SPR_SR_IME | SPR_SR_DCE | SPR_SR_ICE | SPR_SR_IEE | SPR_SR_TEE | SPR_SR_SM) | |
45 | ||
46 | /* ============================================[ tmp store locations ]=== */ | |
47 | ||
91993c8c SK |
48 | #define SPR_SHADOW_GPR(x) ((x) + SPR_GPR_BASE + 32) |
49 | ||
9d02a428 JB |
50 | /* |
51 | * emergency_print temporary stores | |
52 | */ | |
91993c8c SK |
53 | #ifdef CONFIG_OPENRISC_HAVE_SHADOW_GPRS |
54 | #define EMERGENCY_PRINT_STORE_GPR4 l.mtspr r0,r4,SPR_SHADOW_GPR(14) | |
55 | #define EMERGENCY_PRINT_LOAD_GPR4 l.mfspr r4,r0,SPR_SHADOW_GPR(14) | |
56 | ||
57 | #define EMERGENCY_PRINT_STORE_GPR5 l.mtspr r0,r5,SPR_SHADOW_GPR(15) | |
58 | #define EMERGENCY_PRINT_LOAD_GPR5 l.mfspr r5,r0,SPR_SHADOW_GPR(15) | |
59 | ||
60 | #define EMERGENCY_PRINT_STORE_GPR6 l.mtspr r0,r6,SPR_SHADOW_GPR(16) | |
61 | #define EMERGENCY_PRINT_LOAD_GPR6 l.mfspr r6,r0,SPR_SHADOW_GPR(16) | |
62 | ||
63 | #define EMERGENCY_PRINT_STORE_GPR7 l.mtspr r0,r7,SPR_SHADOW_GPR(7) | |
64 | #define EMERGENCY_PRINT_LOAD_GPR7 l.mfspr r7,r0,SPR_SHADOW_GPR(7) | |
65 | ||
66 | #define EMERGENCY_PRINT_STORE_GPR8 l.mtspr r0,r8,SPR_SHADOW_GPR(8) | |
67 | #define EMERGENCY_PRINT_LOAD_GPR8 l.mfspr r8,r0,SPR_SHADOW_GPR(8) | |
68 | ||
69 | #define EMERGENCY_PRINT_STORE_GPR9 l.mtspr r0,r9,SPR_SHADOW_GPR(9) | |
70 | #define EMERGENCY_PRINT_LOAD_GPR9 l.mfspr r9,r0,SPR_SHADOW_GPR(9) | |
71 | ||
72 | #else /* !CONFIG_OPENRISC_HAVE_SHADOW_GPRS */ | |
9d02a428 JB |
73 | #define EMERGENCY_PRINT_STORE_GPR4 l.sw 0x20(r0),r4 |
74 | #define EMERGENCY_PRINT_LOAD_GPR4 l.lwz r4,0x20(r0) | |
75 | ||
76 | #define EMERGENCY_PRINT_STORE_GPR5 l.sw 0x24(r0),r5 | |
77 | #define EMERGENCY_PRINT_LOAD_GPR5 l.lwz r5,0x24(r0) | |
78 | ||
79 | #define EMERGENCY_PRINT_STORE_GPR6 l.sw 0x28(r0),r6 | |
80 | #define EMERGENCY_PRINT_LOAD_GPR6 l.lwz r6,0x28(r0) | |
81 | ||
82 | #define EMERGENCY_PRINT_STORE_GPR7 l.sw 0x2c(r0),r7 | |
83 | #define EMERGENCY_PRINT_LOAD_GPR7 l.lwz r7,0x2c(r0) | |
84 | ||
85 | #define EMERGENCY_PRINT_STORE_GPR8 l.sw 0x30(r0),r8 | |
86 | #define EMERGENCY_PRINT_LOAD_GPR8 l.lwz r8,0x30(r0) | |
87 | ||
88 | #define EMERGENCY_PRINT_STORE_GPR9 l.sw 0x34(r0),r9 | |
89 | #define EMERGENCY_PRINT_LOAD_GPR9 l.lwz r9,0x34(r0) | |
90 | ||
91993c8c | 91 | #endif |
9d02a428 JB |
92 | |
93 | /* | |
94 | * TLB miss handlers temorary stores | |
95 | */ | |
91993c8c SK |
96 | #ifdef CONFIG_OPENRISC_HAVE_SHADOW_GPRS |
97 | #define EXCEPTION_STORE_GPR2 l.mtspr r0,r2,SPR_SHADOW_GPR(2) | |
98 | #define EXCEPTION_LOAD_GPR2 l.mfspr r2,r0,SPR_SHADOW_GPR(2) | |
99 | ||
100 | #define EXCEPTION_STORE_GPR3 l.mtspr r0,r3,SPR_SHADOW_GPR(3) | |
101 | #define EXCEPTION_LOAD_GPR3 l.mfspr r3,r0,SPR_SHADOW_GPR(3) | |
9d02a428 | 102 | |
91993c8c SK |
103 | #define EXCEPTION_STORE_GPR4 l.mtspr r0,r4,SPR_SHADOW_GPR(4) |
104 | #define EXCEPTION_LOAD_GPR4 l.mfspr r4,r0,SPR_SHADOW_GPR(4) | |
105 | ||
106 | #define EXCEPTION_STORE_GPR5 l.mtspr r0,r5,SPR_SHADOW_GPR(5) | |
107 | #define EXCEPTION_LOAD_GPR5 l.mfspr r5,r0,SPR_SHADOW_GPR(5) | |
108 | ||
109 | #define EXCEPTION_STORE_GPR6 l.mtspr r0,r6,SPR_SHADOW_GPR(6) | |
110 | #define EXCEPTION_LOAD_GPR6 l.mfspr r6,r0,SPR_SHADOW_GPR(6) | |
111 | ||
112 | #else /* !CONFIG_OPENRISC_HAVE_SHADOW_GPRS */ | |
9d02a428 JB |
113 | #define EXCEPTION_STORE_GPR2 l.sw 0x64(r0),r2 |
114 | #define EXCEPTION_LOAD_GPR2 l.lwz r2,0x64(r0) | |
115 | ||
116 | #define EXCEPTION_STORE_GPR3 l.sw 0x68(r0),r3 | |
117 | #define EXCEPTION_LOAD_GPR3 l.lwz r3,0x68(r0) | |
118 | ||
119 | #define EXCEPTION_STORE_GPR4 l.sw 0x6c(r0),r4 | |
120 | #define EXCEPTION_LOAD_GPR4 l.lwz r4,0x6c(r0) | |
121 | ||
122 | #define EXCEPTION_STORE_GPR5 l.sw 0x70(r0),r5 | |
123 | #define EXCEPTION_LOAD_GPR5 l.lwz r5,0x70(r0) | |
124 | ||
125 | #define EXCEPTION_STORE_GPR6 l.sw 0x74(r0),r6 | |
126 | #define EXCEPTION_LOAD_GPR6 l.lwz r6,0x74(r0) | |
127 | ||
91993c8c | 128 | #endif |
9d02a428 JB |
129 | |
130 | /* | |
131 | * EXCEPTION_HANDLE temporary stores | |
132 | */ | |
133 | ||
91993c8c SK |
134 | #ifdef CONFIG_OPENRISC_HAVE_SHADOW_GPRS |
135 | #define EXCEPTION_T_STORE_GPR30 l.mtspr r0,r30,SPR_SHADOW_GPR(30) | |
136 | #define EXCEPTION_T_LOAD_GPR30(reg) l.mfspr reg,r0,SPR_SHADOW_GPR(30) | |
137 | ||
138 | #define EXCEPTION_T_STORE_GPR10 l.mtspr r0,r10,SPR_SHADOW_GPR(10) | |
139 | #define EXCEPTION_T_LOAD_GPR10(reg) l.mfspr reg,r0,SPR_SHADOW_GPR(10) | |
140 | ||
141 | #define EXCEPTION_T_STORE_SP l.mtspr r0,r1,SPR_SHADOW_GPR(1) | |
142 | #define EXCEPTION_T_LOAD_SP(reg) l.mfspr reg,r0,SPR_SHADOW_GPR(1) | |
143 | ||
144 | #else /* !CONFIG_OPENRISC_HAVE_SHADOW_GPRS */ | |
9d02a428 JB |
145 | #define EXCEPTION_T_STORE_GPR30 l.sw 0x78(r0),r30 |
146 | #define EXCEPTION_T_LOAD_GPR30(reg) l.lwz reg,0x78(r0) | |
147 | ||
148 | #define EXCEPTION_T_STORE_GPR10 l.sw 0x7c(r0),r10 | |
149 | #define EXCEPTION_T_LOAD_GPR10(reg) l.lwz reg,0x7c(r0) | |
150 | ||
91993c8c | 151 | #define EXCEPTION_T_STORE_SP l.sw 0x80(r0),r1 |
9d02a428 | 152 | #define EXCEPTION_T_LOAD_SP(reg) l.lwz reg,0x80(r0) |
91993c8c | 153 | #endif |
9d02a428 JB |
154 | |
155 | /* =========================================================[ macros ]=== */ | |
156 | ||
8e6d08e0 | 157 | #ifdef CONFIG_SMP |
9d02a428 JB |
158 | #define GET_CURRENT_PGD(reg,t1) \ |
159 | LOAD_SYMBOL_2_GPR(reg,current_pgd) ;\ | |
8e6d08e0 SK |
160 | l.mfspr t1,r0,SPR_COREID ;\ |
161 | l.slli t1,t1,2 ;\ | |
162 | l.add reg,reg,t1 ;\ | |
9d02a428 JB |
163 | tophys (t1,reg) ;\ |
164 | l.lwz reg,0(t1) | |
8e6d08e0 SK |
165 | #else |
166 | #define GET_CURRENT_PGD(reg,t1) \ | |
167 | LOAD_SYMBOL_2_GPR(reg,current_pgd) ;\ | |
168 | tophys (t1,reg) ;\ | |
169 | l.lwz reg,0(t1) | |
170 | #endif | |
9d02a428 | 171 | |
8e6d08e0 SK |
172 | /* Load r10 from current_thread_info_set - clobbers r1 and r30 */ |
173 | #ifdef CONFIG_SMP | |
174 | #define GET_CURRENT_THREAD_INFO \ | |
175 | LOAD_SYMBOL_2_GPR(r1,current_thread_info_set) ;\ | |
176 | tophys (r30,r1) ;\ | |
177 | l.mfspr r10,r0,SPR_COREID ;\ | |
178 | l.slli r10,r10,2 ;\ | |
179 | l.add r30,r30,r10 ;\ | |
180 | /* r10: current_thread_info */ ;\ | |
181 | l.lwz r10,0(r30) | |
182 | #else | |
183 | #define GET_CURRENT_THREAD_INFO \ | |
184 | LOAD_SYMBOL_2_GPR(r1,current_thread_info_set) ;\ | |
185 | tophys (r30,r1) ;\ | |
186 | /* r10: current_thread_info */ ;\ | |
187 | l.lwz r10,0(r30) | |
188 | #endif | |
9d02a428 JB |
189 | |
190 | /* | |
191 | * DSCR: this is a common hook for handling exceptions. it will save | |
192 | * the needed registers, set up stack and pointer to current | |
193 | * then jump to the handler while enabling MMU | |
194 | * | |
195 | * PRMS: handler - a function to jump to. it has to save the | |
196 | * remaining registers to kernel stack, call | |
197 | * appropriate arch-independant exception handler | |
198 | * and finaly jump to ret_from_except | |
199 | * | |
200 | * PREQ: unchanged state from the time exception happened | |
201 | * | |
202 | * POST: SAVED the following registers original value | |
203 | * to the new created exception frame pointed to by r1 | |
204 | * | |
205 | * r1 - ksp pointing to the new (exception) frame | |
206 | * r4 - EEAR exception EA | |
207 | * r10 - current pointing to current_thread_info struct | |
208 | * r12 - syscall 0, since we didn't come from syscall | |
ae15a41a | 209 | * r30 - handler address of the handler we'll jump to |
9d02a428 JB |
210 | * |
211 | * handler has to save remaining registers to the exception | |
212 | * ksp frame *before* tainting them! | |
213 | * | |
214 | * NOTE: this function is not reentrant per se. reentrancy is guaranteed | |
215 | * by processor disabling all exceptions/interrupts when exception | |
216 | * accours. | |
217 | * | |
218 | * OPTM: no need to make it so wasteful to extract ksp when in user mode | |
219 | */ | |
220 | ||
221 | #define EXCEPTION_HANDLE(handler) \ | |
222 | EXCEPTION_T_STORE_GPR30 ;\ | |
223 | l.mfspr r30,r0,SPR_ESR_BASE ;\ | |
224 | l.andi r30,r30,SPR_SR_SM ;\ | |
225 | l.sfeqi r30,0 ;\ | |
226 | EXCEPTION_T_STORE_GPR10 ;\ | |
227 | l.bnf 2f /* kernel_mode */ ;\ | |
228 | EXCEPTION_T_STORE_SP /* delay slot */ ;\ | |
229 | 1: /* user_mode: */ ;\ | |
8e6d08e0 | 230 | GET_CURRENT_THREAD_INFO ;\ |
9d02a428 JB |
231 | tophys (r30,r10) ;\ |
232 | l.lwz r1,(TI_KSP)(r30) ;\ | |
233 | /* fall through */ ;\ | |
234 | 2: /* kernel_mode: */ ;\ | |
235 | /* create new stack frame, save only needed gprs */ ;\ | |
236 | /* r1: KSP, r10: current, r4: EEAR, r31: __pa(KSP) */ ;\ | |
237 | /* r12: temp, syscall indicator */ ;\ | |
238 | l.addi r1,r1,-(INT_FRAME_SIZE) ;\ | |
239 | /* r1 is KSP, r30 is __pa(KSP) */ ;\ | |
240 | tophys (r30,r1) ;\ | |
241 | l.sw PT_GPR12(r30),r12 ;\ | |
ae15a41a | 242 | /* r4 use for tmp before EA */ ;\ |
9d02a428 JB |
243 | l.mfspr r12,r0,SPR_EPCR_BASE ;\ |
244 | l.sw PT_PC(r30),r12 ;\ | |
245 | l.mfspr r12,r0,SPR_ESR_BASE ;\ | |
246 | l.sw PT_SR(r30),r12 ;\ | |
247 | /* save r30 */ ;\ | |
248 | EXCEPTION_T_LOAD_GPR30(r12) ;\ | |
249 | l.sw PT_GPR30(r30),r12 ;\ | |
250 | /* save r10 as was prior to exception */ ;\ | |
251 | EXCEPTION_T_LOAD_GPR10(r12) ;\ | |
252 | l.sw PT_GPR10(r30),r12 ;\ | |
253 | /* save PT_SP as was prior to exception */ ;\ | |
254 | EXCEPTION_T_LOAD_SP(r12) ;\ | |
255 | l.sw PT_SP(r30),r12 ;\ | |
256 | /* save exception r4, set r4 = EA */ ;\ | |
257 | l.sw PT_GPR4(r30),r4 ;\ | |
258 | l.mfspr r4,r0,SPR_EEAR_BASE ;\ | |
259 | /* r12 == 1 if we come from syscall */ ;\ | |
260 | CLEAR_GPR(r12) ;\ | |
261 | /* ----- turn on MMU ----- */ ;\ | |
ae15a41a SH |
262 | /* Carry DSX into exception SR */ ;\ |
263 | l.mfspr r30,r0,SPR_SR ;\ | |
264 | l.andi r30,r30,SPR_SR_DSX ;\ | |
265 | l.ori r30,r30,(EXCEPTION_SR) ;\ | |
9d02a428 JB |
266 | l.mtspr r0,r30,SPR_ESR_BASE ;\ |
267 | /* r30: EA address of handler */ ;\ | |
268 | LOAD_SYMBOL_2_GPR(r30,handler) ;\ | |
269 | l.mtspr r0,r30,SPR_EPCR_BASE ;\ | |
270 | l.rfe | |
271 | ||
272 | /* | |
273 | * this doesn't work | |
274 | * | |
275 | * | |
276 | * #ifdef CONFIG_JUMP_UPON_UNHANDLED_EXCEPTION | |
277 | * #define UNHANDLED_EXCEPTION(handler) \ | |
278 | * l.ori r3,r0,0x1 ;\ | |
279 | * l.mtspr r0,r3,SPR_SR ;\ | |
280 | * l.movhi r3,hi(0xf0000100) ;\ | |
281 | * l.ori r3,r3,lo(0xf0000100) ;\ | |
282 | * l.jr r3 ;\ | |
283 | * l.nop 1 | |
284 | * | |
285 | * #endif | |
286 | */ | |
287 | ||
288 | /* DSCR: this is the same as EXCEPTION_HANDLE(), we are just | |
289 | * a bit more carefull (if we have a PT_SP or current pointer | |
290 | * corruption) and set them up from 'current_set' | |
291 | * | |
292 | */ | |
293 | #define UNHANDLED_EXCEPTION(handler) \ | |
91993c8c | 294 | EXCEPTION_T_STORE_GPR30 ;\ |
9d02a428 JB |
295 | EXCEPTION_T_STORE_GPR10 ;\ |
296 | EXCEPTION_T_STORE_SP ;\ | |
297 | /* temporary store r3, r9 into r1, r10 */ ;\ | |
298 | l.addi r1,r3,0x0 ;\ | |
299 | l.addi r10,r9,0x0 ;\ | |
300 | /* the string referenced by r3 must be low enough */ ;\ | |
301 | l.jal _emergency_print ;\ | |
302 | l.ori r3,r0,lo(_string_unhandled_exception) ;\ | |
303 | l.mfspr r3,r0,SPR_NPC ;\ | |
304 | l.jal _emergency_print_nr ;\ | |
305 | l.andi r3,r3,0x1f00 ;\ | |
306 | /* the string referenced by r3 must be low enough */ ;\ | |
307 | l.jal _emergency_print ;\ | |
308 | l.ori r3,r0,lo(_string_epc_prefix) ;\ | |
309 | l.jal _emergency_print_nr ;\ | |
310 | l.mfspr r3,r0,SPR_EPCR_BASE ;\ | |
311 | l.jal _emergency_print ;\ | |
312 | l.ori r3,r0,lo(_string_nl) ;\ | |
313 | /* end of printing */ ;\ | |
314 | l.addi r3,r1,0x0 ;\ | |
315 | l.addi r9,r10,0x0 ;\ | |
316 | /* extract current, ksp from current_set */ ;\ | |
317 | LOAD_SYMBOL_2_GPR(r1,_unhandled_stack_top) ;\ | |
318 | LOAD_SYMBOL_2_GPR(r10,init_thread_union) ;\ | |
319 | /* create new stack frame, save only needed gprs */ ;\ | |
320 | /* r1: KSP, r10: current, r31: __pa(KSP) */ ;\ | |
321 | /* r12: temp, syscall indicator, r13 temp */ ;\ | |
322 | l.addi r1,r1,-(INT_FRAME_SIZE) ;\ | |
91993c8c SK |
323 | /* r1 is KSP, r30 is __pa(KSP) */ ;\ |
324 | tophys (r30,r1) ;\ | |
325 | l.sw PT_GPR12(r30),r12 ;\ | |
9d02a428 | 326 | l.mfspr r12,r0,SPR_EPCR_BASE ;\ |
91993c8c | 327 | l.sw PT_PC(r30),r12 ;\ |
9d02a428 | 328 | l.mfspr r12,r0,SPR_ESR_BASE ;\ |
91993c8c | 329 | l.sw PT_SR(r30),r12 ;\ |
9d02a428 | 330 | /* save r31 */ ;\ |
91993c8c SK |
331 | EXCEPTION_T_LOAD_GPR30(r12) ;\ |
332 | l.sw PT_GPR30(r30),r12 ;\ | |
9d02a428 JB |
333 | /* save r10 as was prior to exception */ ;\ |
334 | EXCEPTION_T_LOAD_GPR10(r12) ;\ | |
91993c8c | 335 | l.sw PT_GPR10(r30),r12 ;\ |
9d02a428 JB |
336 | /* save PT_SP as was prior to exception */ ;\ |
337 | EXCEPTION_T_LOAD_SP(r12) ;\ | |
91993c8c SK |
338 | l.sw PT_SP(r30),r12 ;\ |
339 | l.sw PT_GPR13(r30),r13 ;\ | |
9d02a428 JB |
340 | /* --> */ ;\ |
341 | /* save exception r4, set r4 = EA */ ;\ | |
91993c8c | 342 | l.sw PT_GPR4(r30),r4 ;\ |
9d02a428 JB |
343 | l.mfspr r4,r0,SPR_EEAR_BASE ;\ |
344 | /* r12 == 1 if we come from syscall */ ;\ | |
345 | CLEAR_GPR(r12) ;\ | |
346 | /* ----- play a MMU trick ----- */ ;\ | |
91993c8c SK |
347 | l.ori r30,r0,(EXCEPTION_SR) ;\ |
348 | l.mtspr r0,r30,SPR_ESR_BASE ;\ | |
9d02a428 | 349 | /* r31: EA address of handler */ ;\ |
91993c8c SK |
350 | LOAD_SYMBOL_2_GPR(r30,handler) ;\ |
351 | l.mtspr r0,r30,SPR_EPCR_BASE ;\ | |
9d02a428 JB |
352 | l.rfe |
353 | ||
354 | /* =====================================================[ exceptions] === */ | |
355 | ||
356 | /* ---[ 0x100: RESET exception ]----------------------------------------- */ | |
357 | .org 0x100 | |
358 | /* Jump to .init code at _start which lives in the .head section | |
359 | * and will be discarded after boot. | |
360 | */ | |
54bd7c51 SK |
361 | LOAD_SYMBOL_2_GPR(r15, _start) |
362 | tophys (r13,r15) /* MMU disabled */ | |
363 | l.jr r13 | |
9d02a428 JB |
364 | l.nop |
365 | ||
366 | /* ---[ 0x200: BUS exception ]------------------------------------------- */ | |
367 | .org 0x200 | |
368 | _dispatch_bus_fault: | |
369 | EXCEPTION_HANDLE(_bus_fault_handler) | |
370 | ||
371 | /* ---[ 0x300: Data Page Fault exception ]------------------------------- */ | |
372 | .org 0x300 | |
373 | _dispatch_do_dpage_fault: | |
374 | // totaly disable timer interrupt | |
375 | // l.mtspr r0,r0,SPR_TTMR | |
376 | // DEBUG_TLB_PROBE(0x300) | |
377 | // EXCEPTION_DEBUG_VALUE_ER_ENABLED(0x300) | |
378 | EXCEPTION_HANDLE(_data_page_fault_handler) | |
379 | ||
380 | /* ---[ 0x400: Insn Page Fault exception ]------------------------------- */ | |
381 | .org 0x400 | |
382 | _dispatch_do_ipage_fault: | |
383 | // totaly disable timer interrupt | |
384 | // l.mtspr r0,r0,SPR_TTMR | |
385 | // DEBUG_TLB_PROBE(0x400) | |
386 | // EXCEPTION_DEBUG_VALUE_ER_ENABLED(0x400) | |
387 | EXCEPTION_HANDLE(_insn_page_fault_handler) | |
388 | ||
389 | /* ---[ 0x500: Timer exception ]----------------------------------------- */ | |
390 | .org 0x500 | |
391 | EXCEPTION_HANDLE(_timer_handler) | |
392 | ||
550116d2 | 393 | /* ---[ 0x600: Alignment exception ]-------------------------------------- */ |
9d02a428 JB |
394 | .org 0x600 |
395 | EXCEPTION_HANDLE(_alignment_handler) | |
396 | ||
397 | /* ---[ 0x700: Illegal insn exception ]---------------------------------- */ | |
398 | .org 0x700 | |
399 | EXCEPTION_HANDLE(_illegal_instruction_handler) | |
400 | ||
401 | /* ---[ 0x800: External interrupt exception ]---------------------------- */ | |
402 | .org 0x800 | |
403 | EXCEPTION_HANDLE(_external_irq_handler) | |
404 | ||
405 | /* ---[ 0x900: DTLB miss exception ]------------------------------------- */ | |
406 | .org 0x900 | |
407 | l.j boot_dtlb_miss_handler | |
408 | l.nop | |
409 | ||
410 | /* ---[ 0xa00: ITLB miss exception ]------------------------------------- */ | |
411 | .org 0xa00 | |
412 | l.j boot_itlb_miss_handler | |
413 | l.nop | |
414 | ||
415 | /* ---[ 0xb00: Range exception ]----------------------------------------- */ | |
416 | .org 0xb00 | |
417 | UNHANDLED_EXCEPTION(_vector_0xb00) | |
418 | ||
419 | /* ---[ 0xc00: Syscall exception ]--------------------------------------- */ | |
420 | .org 0xc00 | |
421 | EXCEPTION_HANDLE(_sys_call_handler) | |
422 | ||
423 | /* ---[ 0xd00: Trap exception ]------------------------------------------ */ | |
424 | .org 0xd00 | |
425 | UNHANDLED_EXCEPTION(_vector_0xd00) | |
426 | ||
427 | /* ---[ 0xe00: Trap exception ]------------------------------------------ */ | |
428 | .org 0xe00 | |
429 | // UNHANDLED_EXCEPTION(_vector_0xe00) | |
430 | EXCEPTION_HANDLE(_trap_handler) | |
431 | ||
432 | /* ---[ 0xf00: Reserved exception ]-------------------------------------- */ | |
433 | .org 0xf00 | |
434 | UNHANDLED_EXCEPTION(_vector_0xf00) | |
435 | ||
436 | /* ---[ 0x1000: Reserved exception ]------------------------------------- */ | |
437 | .org 0x1000 | |
438 | UNHANDLED_EXCEPTION(_vector_0x1000) | |
439 | ||
440 | /* ---[ 0x1100: Reserved exception ]------------------------------------- */ | |
441 | .org 0x1100 | |
442 | UNHANDLED_EXCEPTION(_vector_0x1100) | |
443 | ||
444 | /* ---[ 0x1200: Reserved exception ]------------------------------------- */ | |
445 | .org 0x1200 | |
446 | UNHANDLED_EXCEPTION(_vector_0x1200) | |
447 | ||
448 | /* ---[ 0x1300: Reserved exception ]------------------------------------- */ | |
449 | .org 0x1300 | |
450 | UNHANDLED_EXCEPTION(_vector_0x1300) | |
451 | ||
452 | /* ---[ 0x1400: Reserved exception ]------------------------------------- */ | |
453 | .org 0x1400 | |
454 | UNHANDLED_EXCEPTION(_vector_0x1400) | |
455 | ||
456 | /* ---[ 0x1500: Reserved exception ]------------------------------------- */ | |
457 | .org 0x1500 | |
458 | UNHANDLED_EXCEPTION(_vector_0x1500) | |
459 | ||
460 | /* ---[ 0x1600: Reserved exception ]------------------------------------- */ | |
461 | .org 0x1600 | |
462 | UNHANDLED_EXCEPTION(_vector_0x1600) | |
463 | ||
464 | /* ---[ 0x1700: Reserved exception ]------------------------------------- */ | |
465 | .org 0x1700 | |
466 | UNHANDLED_EXCEPTION(_vector_0x1700) | |
467 | ||
468 | /* ---[ 0x1800: Reserved exception ]------------------------------------- */ | |
469 | .org 0x1800 | |
470 | UNHANDLED_EXCEPTION(_vector_0x1800) | |
471 | ||
472 | /* ---[ 0x1900: Reserved exception ]------------------------------------- */ | |
473 | .org 0x1900 | |
474 | UNHANDLED_EXCEPTION(_vector_0x1900) | |
475 | ||
476 | /* ---[ 0x1a00: Reserved exception ]------------------------------------- */ | |
477 | .org 0x1a00 | |
478 | UNHANDLED_EXCEPTION(_vector_0x1a00) | |
479 | ||
480 | /* ---[ 0x1b00: Reserved exception ]------------------------------------- */ | |
481 | .org 0x1b00 | |
482 | UNHANDLED_EXCEPTION(_vector_0x1b00) | |
483 | ||
484 | /* ---[ 0x1c00: Reserved exception ]------------------------------------- */ | |
485 | .org 0x1c00 | |
486 | UNHANDLED_EXCEPTION(_vector_0x1c00) | |
487 | ||
488 | /* ---[ 0x1d00: Reserved exception ]------------------------------------- */ | |
489 | .org 0x1d00 | |
490 | UNHANDLED_EXCEPTION(_vector_0x1d00) | |
491 | ||
492 | /* ---[ 0x1e00: Reserved exception ]------------------------------------- */ | |
493 | .org 0x1e00 | |
494 | UNHANDLED_EXCEPTION(_vector_0x1e00) | |
495 | ||
496 | /* ---[ 0x1f00: Reserved exception ]------------------------------------- */ | |
497 | .org 0x1f00 | |
498 | UNHANDLED_EXCEPTION(_vector_0x1f00) | |
499 | ||
500 | .org 0x2000 | |
501 | /* ===================================================[ kernel start ]=== */ | |
502 | ||
503 | /* .text*/ | |
504 | ||
505 | /* This early stuff belongs in HEAD, but some of the functions below definitely | |
506 | * don't... */ | |
507 | ||
508 | __HEAD | |
509 | .global _start | |
510 | _start: | |
a4d44266 SH |
511 | /* Init r0 to zero as per spec */ |
512 | CLEAR_GPR(r0) | |
513 | ||
dec83018 SK |
514 | /* save kernel parameters */ |
515 | l.or r25,r0,r3 /* pointer to fdt */ | |
516 | ||
9d02a428 JB |
517 | /* |
518 | * ensure a deterministic start | |
519 | */ | |
520 | ||
521 | l.ori r3,r0,0x1 | |
522 | l.mtspr r0,r3,SPR_SR | |
523 | ||
524 | CLEAR_GPR(r1) | |
525 | CLEAR_GPR(r2) | |
526 | CLEAR_GPR(r3) | |
527 | CLEAR_GPR(r4) | |
528 | CLEAR_GPR(r5) | |
529 | CLEAR_GPR(r6) | |
530 | CLEAR_GPR(r7) | |
531 | CLEAR_GPR(r8) | |
532 | CLEAR_GPR(r9) | |
533 | CLEAR_GPR(r10) | |
534 | CLEAR_GPR(r11) | |
535 | CLEAR_GPR(r12) | |
536 | CLEAR_GPR(r13) | |
537 | CLEAR_GPR(r14) | |
538 | CLEAR_GPR(r15) | |
539 | CLEAR_GPR(r16) | |
540 | CLEAR_GPR(r17) | |
541 | CLEAR_GPR(r18) | |
542 | CLEAR_GPR(r19) | |
543 | CLEAR_GPR(r20) | |
544 | CLEAR_GPR(r21) | |
545 | CLEAR_GPR(r22) | |
546 | CLEAR_GPR(r23) | |
547 | CLEAR_GPR(r24) | |
9d02a428 JB |
548 | CLEAR_GPR(r26) |
549 | CLEAR_GPR(r27) | |
550 | CLEAR_GPR(r28) | |
551 | CLEAR_GPR(r29) | |
552 | CLEAR_GPR(r30) | |
553 | CLEAR_GPR(r31) | |
554 | ||
8e6d08e0 SK |
555 | #ifdef CONFIG_SMP |
556 | l.mfspr r26,r0,SPR_COREID | |
557 | l.sfeq r26,r0 | |
558 | l.bnf secondary_wait | |
559 | l.nop | |
560 | #endif | |
9d02a428 JB |
561 | /* |
562 | * set up initial ksp and current | |
563 | */ | |
c2dc7243 SK |
564 | /* setup kernel stack */ |
565 | LOAD_SYMBOL_2_GPR(r1,init_thread_union + THREAD_SIZE) | |
9d02a428 JB |
566 | LOAD_SYMBOL_2_GPR(r10,init_thread_union) // setup current |
567 | tophys (r31,r10) | |
568 | l.sw TI_KSP(r31), r1 | |
569 | ||
570 | l.ori r4,r0,0x0 | |
571 | ||
572 | ||
573 | /* | |
574 | * .data contains initialized data, | |
575 | * .bss contains uninitialized data - clear it up | |
576 | */ | |
577 | clear_bss: | |
578 | LOAD_SYMBOL_2_GPR(r24, __bss_start) | |
579 | LOAD_SYMBOL_2_GPR(r26, _end) | |
580 | tophys(r28,r24) | |
581 | tophys(r30,r26) | |
582 | CLEAR_GPR(r24) | |
583 | CLEAR_GPR(r26) | |
584 | 1: | |
585 | l.sw (0)(r28),r0 | |
586 | l.sfltu r28,r30 | |
587 | l.bf 1b | |
588 | l.addi r28,r28,4 | |
589 | ||
590 | enable_ic: | |
591 | l.jal _ic_enable | |
592 | l.nop | |
593 | ||
594 | enable_dc: | |
595 | l.jal _dc_enable | |
596 | l.nop | |
597 | ||
598 | flush_tlb: | |
8c9b7db0 SK |
599 | l.jal _flush_tlb |
600 | l.nop | |
9d02a428 JB |
601 | |
602 | /* The MMU needs to be enabled before or32_early_setup is called */ | |
603 | ||
604 | enable_mmu: | |
605 | /* | |
606 | * enable dmmu & immu | |
607 | * SR[5] = 0, SR[6] = 0, 6th and 7th bit of SR set to 0 | |
608 | */ | |
609 | l.mfspr r30,r0,SPR_SR | |
610 | l.movhi r28,hi(SPR_SR_DME | SPR_SR_IME) | |
611 | l.ori r28,r28,lo(SPR_SR_DME | SPR_SR_IME) | |
612 | l.or r30,r30,r28 | |
613 | l.mtspr r0,r30,SPR_SR | |
614 | l.nop | |
615 | l.nop | |
616 | l.nop | |
617 | l.nop | |
618 | l.nop | |
619 | l.nop | |
620 | l.nop | |
621 | l.nop | |
622 | l.nop | |
623 | l.nop | |
624 | l.nop | |
625 | l.nop | |
626 | l.nop | |
627 | l.nop | |
628 | l.nop | |
629 | l.nop | |
630 | ||
631 | // reset the simulation counters | |
632 | l.nop 5 | |
633 | ||
dec83018 SK |
634 | /* check fdt header magic word */ |
635 | l.lwz r3,0(r25) /* load magic from fdt into r3 */ | |
636 | l.movhi r4,hi(OF_DT_HEADER) | |
637 | l.ori r4,r4,lo(OF_DT_HEADER) | |
638 | l.sfeq r3,r4 | |
639 | l.bf _fdt_found | |
640 | l.nop | |
641 | /* magic number mismatch, set fdt pointer to null */ | |
642 | l.or r25,r0,r0 | |
643 | _fdt_found: | |
644 | /* pass fdt pointer to or32_early_setup in r3 */ | |
645 | l.or r3,r0,r25 | |
9d02a428 JB |
646 | LOAD_SYMBOL_2_GPR(r24, or32_early_setup) |
647 | l.jalr r24 | |
648 | l.nop | |
649 | ||
650 | clear_regs: | |
651 | /* | |
652 | * clear all GPRS to increase determinism | |
653 | */ | |
654 | CLEAR_GPR(r2) | |
655 | CLEAR_GPR(r3) | |
656 | CLEAR_GPR(r4) | |
657 | CLEAR_GPR(r5) | |
658 | CLEAR_GPR(r6) | |
659 | CLEAR_GPR(r7) | |
660 | CLEAR_GPR(r8) | |
661 | CLEAR_GPR(r9) | |
662 | CLEAR_GPR(r11) | |
663 | CLEAR_GPR(r12) | |
664 | CLEAR_GPR(r13) | |
665 | CLEAR_GPR(r14) | |
666 | CLEAR_GPR(r15) | |
667 | CLEAR_GPR(r16) | |
668 | CLEAR_GPR(r17) | |
669 | CLEAR_GPR(r18) | |
670 | CLEAR_GPR(r19) | |
671 | CLEAR_GPR(r20) | |
672 | CLEAR_GPR(r21) | |
673 | CLEAR_GPR(r22) | |
674 | CLEAR_GPR(r23) | |
675 | CLEAR_GPR(r24) | |
676 | CLEAR_GPR(r25) | |
677 | CLEAR_GPR(r26) | |
678 | CLEAR_GPR(r27) | |
679 | CLEAR_GPR(r28) | |
680 | CLEAR_GPR(r29) | |
681 | CLEAR_GPR(r30) | |
682 | CLEAR_GPR(r31) | |
683 | ||
684 | jump_start_kernel: | |
685 | /* | |
686 | * jump to kernel entry (start_kernel) | |
687 | */ | |
688 | LOAD_SYMBOL_2_GPR(r30, start_kernel) | |
689 | l.jr r30 | |
690 | l.nop | |
691 | ||
8c9b7db0 SK |
692 | _flush_tlb: |
693 | /* | |
694 | * I N V A L I D A T E T L B e n t r i e s | |
695 | */ | |
696 | LOAD_SYMBOL_2_GPR(r5,SPR_DTLBMR_BASE(0)) | |
697 | LOAD_SYMBOL_2_GPR(r6,SPR_ITLBMR_BASE(0)) | |
698 | l.addi r7,r0,128 /* Maximum number of sets */ | |
699 | 1: | |
700 | l.mtspr r5,r0,0x0 | |
701 | l.mtspr r6,r0,0x0 | |
702 | ||
703 | l.addi r5,r5,1 | |
704 | l.addi r6,r6,1 | |
705 | l.sfeq r7,r0 | |
706 | l.bnf 1b | |
707 | l.addi r7,r7,-1 | |
708 | ||
709 | l.jr r9 | |
710 | l.nop | |
711 | ||
8e6d08e0 SK |
712 | #ifdef CONFIG_SMP |
713 | secondary_wait: | |
c0567184 SH |
714 | /* Doze the cpu until we are asked to run */ |
715 | /* If we dont have power management skip doze */ | |
716 | l.mfspr r25,r0,SPR_UPR | |
717 | l.andi r25,r25,SPR_UPR_PMP | |
718 | l.sfeq r25,r0 | |
719 | l.bf secondary_check_release | |
720 | l.nop | |
721 | ||
722 | /* Setup special secondary exception handler */ | |
723 | LOAD_SYMBOL_2_GPR(r3, _secondary_evbar) | |
724 | tophys(r25,r3) | |
725 | l.mtspr r0,r25,SPR_EVBAR | |
726 | ||
727 | /* Enable Interrupts */ | |
728 | l.mfspr r25,r0,SPR_SR | |
729 | l.ori r25,r25,SPR_SR_IEE | |
730 | l.mtspr r0,r25,SPR_SR | |
731 | ||
732 | /* Unmask interrupts interrupts */ | |
733 | l.mfspr r25,r0,SPR_PICMR | |
734 | l.ori r25,r25,0xffff | |
735 | l.mtspr r0,r25,SPR_PICMR | |
736 | ||
737 | /* Doze */ | |
738 | l.mfspr r25,r0,SPR_PMR | |
739 | LOAD_SYMBOL_2_GPR(r3, SPR_PMR_DME) | |
740 | l.or r25,r25,r3 | |
741 | l.mtspr r0,r25,SPR_PMR | |
742 | ||
743 | /* Wakeup - Restore exception handler */ | |
744 | l.mtspr r0,r0,SPR_EVBAR | |
745 | ||
746 | secondary_check_release: | |
747 | /* | |
748 | * Check if we actually got the release signal, if not go-back to | |
749 | * sleep. | |
750 | */ | |
8e6d08e0 | 751 | l.mfspr r25,r0,SPR_COREID |
c0567184 | 752 | LOAD_SYMBOL_2_GPR(r3, secondary_release) |
8e6d08e0 SK |
753 | tophys(r4, r3) |
754 | l.lwz r3,0(r4) | |
755 | l.sfeq r25,r3 | |
756 | l.bnf secondary_wait | |
757 | l.nop | |
758 | /* fall through to secondary_init */ | |
759 | ||
760 | secondary_init: | |
761 | /* | |
762 | * set up initial ksp and current | |
763 | */ | |
764 | LOAD_SYMBOL_2_GPR(r10, secondary_thread_info) | |
765 | tophys (r30,r10) | |
766 | l.lwz r10,0(r30) | |
767 | l.addi r1,r10,THREAD_SIZE | |
768 | tophys (r30,r10) | |
769 | l.sw TI_KSP(r30),r1 | |
770 | ||
771 | l.jal _ic_enable | |
772 | l.nop | |
773 | ||
774 | l.jal _dc_enable | |
775 | l.nop | |
776 | ||
777 | l.jal _flush_tlb | |
778 | l.nop | |
779 | ||
780 | /* | |
781 | * enable dmmu & immu | |
782 | */ | |
783 | l.mfspr r30,r0,SPR_SR | |
784 | l.movhi r28,hi(SPR_SR_DME | SPR_SR_IME) | |
785 | l.ori r28,r28,lo(SPR_SR_DME | SPR_SR_IME) | |
786 | l.or r30,r30,r28 | |
787 | /* | |
788 | * This is a bit tricky, we need to switch over from physical addresses | |
789 | * to virtual addresses on the fly. | |
790 | * To do that, we first set up ESR with the IME and DME bits set. | |
791 | * Then EPCR is set to secondary_start and then a l.rfe is issued to | |
792 | * "jump" to that. | |
793 | */ | |
794 | l.mtspr r0,r30,SPR_ESR_BASE | |
795 | LOAD_SYMBOL_2_GPR(r30, secondary_start) | |
796 | l.mtspr r0,r30,SPR_EPCR_BASE | |
797 | l.rfe | |
798 | ||
799 | secondary_start: | |
800 | LOAD_SYMBOL_2_GPR(r30, secondary_start_kernel) | |
801 | l.jr r30 | |
802 | l.nop | |
803 | ||
804 | #endif | |
805 | ||
9d02a428 JB |
806 | /* ========================================[ cache ]=== */ |
807 | ||
550116d2 MY |
808 | /* alignment here so we don't change memory offsets with |
809 | * memory controller defined | |
9d02a428 JB |
810 | */ |
811 | .align 0x2000 | |
812 | ||
813 | _ic_enable: | |
814 | /* Check if IC present and skip enabling otherwise */ | |
815 | l.mfspr r24,r0,SPR_UPR | |
816 | l.andi r26,r24,SPR_UPR_ICP | |
817 | l.sfeq r26,r0 | |
818 | l.bf 9f | |
819 | l.nop | |
820 | ||
821 | /* Disable IC */ | |
822 | l.mfspr r6,r0,SPR_SR | |
823 | l.addi r5,r0,-1 | |
824 | l.xori r5,r5,SPR_SR_ICE | |
825 | l.and r5,r6,r5 | |
826 | l.mtspr r0,r5,SPR_SR | |
827 | ||
828 | /* Establish cache block size | |
829 | If BS=0, 16; | |
830 | If BS=1, 32; | |
831 | r14 contain block size | |
832 | */ | |
833 | l.mfspr r24,r0,SPR_ICCFGR | |
834 | l.andi r26,r24,SPR_ICCFGR_CBS | |
835 | l.srli r28,r26,7 | |
836 | l.ori r30,r0,16 | |
837 | l.sll r14,r30,r28 | |
838 | ||
839 | /* Establish number of cache sets | |
840 | r16 contains number of cache sets | |
841 | r28 contains log(# of cache sets) | |
842 | */ | |
843 | l.andi r26,r24,SPR_ICCFGR_NCS | |
844 | l.srli r28,r26,3 | |
845 | l.ori r30,r0,1 | |
846 | l.sll r16,r30,r28 | |
847 | ||
848 | /* Invalidate IC */ | |
849 | l.addi r6,r0,0 | |
850 | l.sll r5,r14,r28 | |
851 | // l.mul r5,r14,r16 | |
852 | // l.trap 1 | |
853 | // l.addi r5,r0,IC_SIZE | |
854 | 1: | |
855 | l.mtspr r0,r6,SPR_ICBIR | |
856 | l.sfne r6,r5 | |
857 | l.bf 1b | |
858 | l.add r6,r6,r14 | |
859 | // l.addi r6,r6,IC_LINE | |
860 | ||
861 | /* Enable IC */ | |
862 | l.mfspr r6,r0,SPR_SR | |
863 | l.ori r6,r6,SPR_SR_ICE | |
864 | l.mtspr r0,r6,SPR_SR | |
865 | l.nop | |
866 | l.nop | |
867 | l.nop | |
868 | l.nop | |
869 | l.nop | |
870 | l.nop | |
871 | l.nop | |
872 | l.nop | |
873 | l.nop | |
874 | l.nop | |
875 | 9: | |
876 | l.jr r9 | |
877 | l.nop | |
878 | ||
879 | _dc_enable: | |
880 | /* Check if DC present and skip enabling otherwise */ | |
881 | l.mfspr r24,r0,SPR_UPR | |
882 | l.andi r26,r24,SPR_UPR_DCP | |
883 | l.sfeq r26,r0 | |
884 | l.bf 9f | |
885 | l.nop | |
886 | ||
887 | /* Disable DC */ | |
888 | l.mfspr r6,r0,SPR_SR | |
889 | l.addi r5,r0,-1 | |
890 | l.xori r5,r5,SPR_SR_DCE | |
891 | l.and r5,r6,r5 | |
892 | l.mtspr r0,r5,SPR_SR | |
893 | ||
894 | /* Establish cache block size | |
895 | If BS=0, 16; | |
896 | If BS=1, 32; | |
897 | r14 contain block size | |
898 | */ | |
899 | l.mfspr r24,r0,SPR_DCCFGR | |
900 | l.andi r26,r24,SPR_DCCFGR_CBS | |
901 | l.srli r28,r26,7 | |
902 | l.ori r30,r0,16 | |
903 | l.sll r14,r30,r28 | |
904 | ||
905 | /* Establish number of cache sets | |
906 | r16 contains number of cache sets | |
907 | r28 contains log(# of cache sets) | |
908 | */ | |
909 | l.andi r26,r24,SPR_DCCFGR_NCS | |
910 | l.srli r28,r26,3 | |
911 | l.ori r30,r0,1 | |
912 | l.sll r16,r30,r28 | |
913 | ||
914 | /* Invalidate DC */ | |
915 | l.addi r6,r0,0 | |
916 | l.sll r5,r14,r28 | |
917 | 1: | |
918 | l.mtspr r0,r6,SPR_DCBIR | |
919 | l.sfne r6,r5 | |
920 | l.bf 1b | |
921 | l.add r6,r6,r14 | |
922 | ||
923 | /* Enable DC */ | |
924 | l.mfspr r6,r0,SPR_SR | |
925 | l.ori r6,r6,SPR_SR_DCE | |
926 | l.mtspr r0,r6,SPR_SR | |
927 | 9: | |
928 | l.jr r9 | |
929 | l.nop | |
930 | ||
931 | /* ===============================================[ page table masks ]=== */ | |
932 | ||
9d02a428 JB |
933 | #define DTLB_UP_CONVERT_MASK 0x3fa |
934 | #define ITLB_UP_CONVERT_MASK 0x3a | |
935 | ||
936 | /* for SMP we'd have (this is a bit subtle, CC must be always set | |
937 | * for SMP, but since we have _PAGE_PRESENT bit always defined | |
938 | * we can just modify the mask) | |
939 | */ | |
940 | #define DTLB_SMP_CONVERT_MASK 0x3fb | |
941 | #define ITLB_SMP_CONVERT_MASK 0x3b | |
942 | ||
943 | /* ---[ boot dtlb miss handler ]----------------------------------------- */ | |
944 | ||
945 | boot_dtlb_miss_handler: | |
946 | ||
947 | /* mask for DTLB_MR register: - (0) sets V (valid) bit, | |
948 | * - (31-12) sets bits belonging to VPN (31-12) | |
949 | */ | |
950 | #define DTLB_MR_MASK 0xfffff001 | |
951 | ||
952 | /* mask for DTLB_TR register: - (2) sets CI (cache inhibit) bit, | |
953 | * - (4) sets A (access) bit, | |
954 | * - (5) sets D (dirty) bit, | |
955 | * - (8) sets SRE (superuser read) bit | |
956 | * - (9) sets SWE (superuser write) bit | |
957 | * - (31-12) sets bits belonging to VPN (31-12) | |
958 | */ | |
959 | #define DTLB_TR_MASK 0xfffff332 | |
960 | ||
961 | /* These are for masking out the VPN/PPN value from the MR/TR registers... | |
962 | * it's not the same as the PFN */ | |
963 | #define VPN_MASK 0xfffff000 | |
964 | #define PPN_MASK 0xfffff000 | |
965 | ||
966 | ||
967 | EXCEPTION_STORE_GPR6 | |
968 | ||
969 | #if 0 | |
970 | l.mfspr r6,r0,SPR_ESR_BASE // | |
971 | l.andi r6,r6,SPR_SR_SM // are we in kernel mode ? | |
972 | l.sfeqi r6,0 // r6 == 0x1 --> SM | |
973 | l.bf exit_with_no_dtranslation // | |
974 | l.nop | |
975 | #endif | |
976 | ||
977 | /* this could be optimized by moving storing of | |
978 | * non r6 registers here, and jumping r6 restore | |
979 | * if not in supervisor mode | |
980 | */ | |
981 | ||
982 | EXCEPTION_STORE_GPR2 | |
983 | EXCEPTION_STORE_GPR3 | |
984 | EXCEPTION_STORE_GPR4 | |
985 | EXCEPTION_STORE_GPR5 | |
986 | ||
987 | l.mfspr r4,r0,SPR_EEAR_BASE // get the offending EA | |
988 | ||
989 | immediate_translation: | |
990 | CLEAR_GPR(r6) | |
991 | ||
992 | l.srli r3,r4,0xd // r3 <- r4 / 8192 (sets are relative to page size (8Kb) NOT VPN size (4Kb) | |
993 | ||
994 | l.mfspr r6, r0, SPR_DMMUCFGR | |
995 | l.andi r6, r6, SPR_DMMUCFGR_NTS | |
996 | l.srli r6, r6, SPR_DMMUCFGR_NTS_OFF | |
997 | l.ori r5, r0, 0x1 | |
998 | l.sll r5, r5, r6 // r5 = number DMMU sets | |
999 | l.addi r6, r5, -1 // r6 = nsets mask | |
1000 | l.and r2, r3, r6 // r2 <- r3 % NSETS_MASK | |
1001 | ||
1002 | l.or r6,r6,r4 // r6 <- r4 | |
1003 | l.ori r6,r6,~(VPN_MASK) // r6 <- VPN :VPN .xfff - clear up lo(r6) to 0x**** *fff | |
1004 | l.movhi r5,hi(DTLB_MR_MASK) // r5 <- ffff:0000.x000 | |
1005 | l.ori r5,r5,lo(DTLB_MR_MASK) // r5 <- ffff:1111.x001 - apply DTLB_MR_MASK | |
1006 | l.and r5,r5,r6 // r5 <- VPN :VPN .x001 - we have DTLBMR entry | |
1007 | l.mtspr r2,r5,SPR_DTLBMR_BASE(0) // set DTLBMR | |
1008 | ||
1009 | /* set up DTLB with no translation for EA <= 0xbfffffff */ | |
1010 | LOAD_SYMBOL_2_GPR(r6,0xbfffffff) | |
1011 | l.sfgeu r6,r4 // flag if r6 >= r4 (if 0xbfffffff >= EA) | |
1012 | l.bf 1f // goto out | |
1013 | l.and r3,r4,r4 // delay slot :: 24 <- r4 (if flag==1) | |
1014 | ||
1015 | tophys(r3,r4) // r3 <- PA | |
1016 | 1: | |
1017 | l.ori r3,r3,~(PPN_MASK) // r3 <- PPN :PPN .xfff - clear up lo(r6) to 0x**** *fff | |
1018 | l.movhi r5,hi(DTLB_TR_MASK) // r5 <- ffff:0000.x000 | |
1019 | l.ori r5,r5,lo(DTLB_TR_MASK) // r5 <- ffff:1111.x330 - apply DTLB_MR_MASK | |
1020 | l.and r5,r5,r3 // r5 <- PPN :PPN .x330 - we have DTLBTR entry | |
1021 | l.mtspr r2,r5,SPR_DTLBTR_BASE(0) // set DTLBTR | |
1022 | ||
1023 | EXCEPTION_LOAD_GPR6 | |
1024 | EXCEPTION_LOAD_GPR5 | |
1025 | EXCEPTION_LOAD_GPR4 | |
1026 | EXCEPTION_LOAD_GPR3 | |
1027 | EXCEPTION_LOAD_GPR2 | |
1028 | ||
1029 | l.rfe // SR <- ESR, PC <- EPC | |
1030 | ||
1031 | exit_with_no_dtranslation: | |
1032 | /* EA out of memory or not in supervisor mode */ | |
1033 | EXCEPTION_LOAD_GPR6 | |
1034 | EXCEPTION_LOAD_GPR4 | |
1035 | l.j _dispatch_bus_fault | |
1036 | ||
1037 | /* ---[ boot itlb miss handler ]----------------------------------------- */ | |
1038 | ||
1039 | boot_itlb_miss_handler: | |
1040 | ||
1041 | /* mask for ITLB_MR register: - sets V (valid) bit, | |
1042 | * - sets bits belonging to VPN (15-12) | |
1043 | */ | |
1044 | #define ITLB_MR_MASK 0xfffff001 | |
1045 | ||
1046 | /* mask for ITLB_TR register: - sets A (access) bit, | |
1047 | * - sets SXE (superuser execute) bit | |
1048 | * - sets bits belonging to VPN (15-12) | |
1049 | */ | |
1050 | #define ITLB_TR_MASK 0xfffff050 | |
1051 | ||
1052 | /* | |
1053 | #define VPN_MASK 0xffffe000 | |
1054 | #define PPN_MASK 0xffffe000 | |
1055 | */ | |
1056 | ||
1057 | ||
1058 | ||
1059 | EXCEPTION_STORE_GPR2 | |
1060 | EXCEPTION_STORE_GPR3 | |
1061 | EXCEPTION_STORE_GPR4 | |
1062 | EXCEPTION_STORE_GPR5 | |
1063 | EXCEPTION_STORE_GPR6 | |
1064 | ||
1065 | #if 0 | |
1066 | l.mfspr r6,r0,SPR_ESR_BASE // | |
1067 | l.andi r6,r6,SPR_SR_SM // are we in kernel mode ? | |
1068 | l.sfeqi r6,0 // r6 == 0x1 --> SM | |
1069 | l.bf exit_with_no_itranslation | |
1070 | l.nop | |
1071 | #endif | |
1072 | ||
1073 | ||
1074 | l.mfspr r4,r0,SPR_EEAR_BASE // get the offending EA | |
1075 | ||
1076 | earlyearly: | |
1077 | CLEAR_GPR(r6) | |
1078 | ||
1079 | l.srli r3,r4,0xd // r3 <- r4 / 8192 (sets are relative to page size (8Kb) NOT VPN size (4Kb) | |
1080 | ||
1081 | l.mfspr r6, r0, SPR_IMMUCFGR | |
1082 | l.andi r6, r6, SPR_IMMUCFGR_NTS | |
1083 | l.srli r6, r6, SPR_IMMUCFGR_NTS_OFF | |
1084 | l.ori r5, r0, 0x1 | |
1085 | l.sll r5, r5, r6 // r5 = number IMMU sets from IMMUCFGR | |
1086 | l.addi r6, r5, -1 // r6 = nsets mask | |
1087 | l.and r2, r3, r6 // r2 <- r3 % NSETS_MASK | |
1088 | ||
1089 | l.or r6,r6,r4 // r6 <- r4 | |
1090 | l.ori r6,r6,~(VPN_MASK) // r6 <- VPN :VPN .xfff - clear up lo(r6) to 0x**** *fff | |
1091 | l.movhi r5,hi(ITLB_MR_MASK) // r5 <- ffff:0000.x000 | |
1092 | l.ori r5,r5,lo(ITLB_MR_MASK) // r5 <- ffff:1111.x001 - apply ITLB_MR_MASK | |
1093 | l.and r5,r5,r6 // r5 <- VPN :VPN .x001 - we have ITLBMR entry | |
1094 | l.mtspr r2,r5,SPR_ITLBMR_BASE(0) // set ITLBMR | |
1095 | ||
1096 | /* | |
1097 | * set up ITLB with no translation for EA <= 0x0fffffff | |
1098 | * | |
1099 | * we need this for head.S mapping (EA = PA). if we move all functions | |
1100 | * which run with mmu enabled into entry.S, we might be able to eliminate this. | |
1101 | * | |
1102 | */ | |
1103 | LOAD_SYMBOL_2_GPR(r6,0x0fffffff) | |
1104 | l.sfgeu r6,r4 // flag if r6 >= r4 (if 0xb0ffffff >= EA) | |
1105 | l.bf 1f // goto out | |
1106 | l.and r3,r4,r4 // delay slot :: 24 <- r4 (if flag==1) | |
1107 | ||
1108 | tophys(r3,r4) // r3 <- PA | |
1109 | 1: | |
1110 | l.ori r3,r3,~(PPN_MASK) // r3 <- PPN :PPN .xfff - clear up lo(r6) to 0x**** *fff | |
1111 | l.movhi r5,hi(ITLB_TR_MASK) // r5 <- ffff:0000.x000 | |
1112 | l.ori r5,r5,lo(ITLB_TR_MASK) // r5 <- ffff:1111.x050 - apply ITLB_MR_MASK | |
1113 | l.and r5,r5,r3 // r5 <- PPN :PPN .x050 - we have ITLBTR entry | |
1114 | l.mtspr r2,r5,SPR_ITLBTR_BASE(0) // set ITLBTR | |
1115 | ||
1116 | EXCEPTION_LOAD_GPR6 | |
1117 | EXCEPTION_LOAD_GPR5 | |
1118 | EXCEPTION_LOAD_GPR4 | |
1119 | EXCEPTION_LOAD_GPR3 | |
1120 | EXCEPTION_LOAD_GPR2 | |
1121 | ||
1122 | l.rfe // SR <- ESR, PC <- EPC | |
1123 | ||
1124 | exit_with_no_itranslation: | |
1125 | EXCEPTION_LOAD_GPR4 | |
1126 | EXCEPTION_LOAD_GPR6 | |
1127 | l.j _dispatch_bus_fault | |
1128 | l.nop | |
1129 | ||
1130 | /* ====================================================================== */ | |
1131 | /* | |
1132 | * Stuff below here shouldn't go into .head section... maybe this stuff | |
1133 | * can be moved to entry.S ??? | |
1134 | */ | |
1135 | ||
1136 | /* ==============================================[ DTLB miss handler ]=== */ | |
1137 | ||
1138 | /* | |
1139 | * Comments: | |
1140 | * Exception handlers are entered with MMU off so the following handler | |
1141 | * needs to use physical addressing | |
1142 | * | |
1143 | */ | |
1144 | ||
1145 | .text | |
1146 | ENTRY(dtlb_miss_handler) | |
1147 | EXCEPTION_STORE_GPR2 | |
1148 | EXCEPTION_STORE_GPR3 | |
1149 | EXCEPTION_STORE_GPR4 | |
9d02a428 JB |
1150 | /* |
1151 | * get EA of the miss | |
1152 | */ | |
1153 | l.mfspr r2,r0,SPR_EEAR_BASE | |
1154 | /* | |
1155 | * pmd = (pmd_t *)(current_pgd + pgd_index(daddr)); | |
1156 | */ | |
742fb582 | 1157 | GET_CURRENT_PGD(r3,r4) // r3 is current_pgd, r4 is temp |
9d02a428 JB |
1158 | l.srli r4,r2,0x18 // >> PAGE_SHIFT + (PAGE_SHIFT - 2) |
1159 | l.slli r4,r4,0x2 // to get address << 2 | |
742fb582 | 1160 | l.add r3,r4,r3 // r4 is pgd_index(daddr) |
9d02a428 JB |
1161 | /* |
1162 | * if (pmd_none(*pmd)) | |
1163 | * goto pmd_none: | |
1164 | */ | |
742fb582 | 1165 | tophys (r4,r3) |
9d02a428 JB |
1166 | l.lwz r3,0x0(r4) // get *pmd value |
1167 | l.sfne r3,r0 | |
1168 | l.bnf d_pmd_none | |
742fb582 SK |
1169 | l.addi r3,r0,0xffffe000 // PAGE_MASK |
1170 | ||
9d02a428 JB |
1171 | d_pmd_good: |
1172 | /* | |
1173 | * pte = *pte_offset(pmd, daddr); | |
1174 | */ | |
1175 | l.lwz r4,0x0(r4) // get **pmd value | |
1176 | l.and r4,r4,r3 // & PAGE_MASK | |
742fb582 SK |
1177 | l.srli r2,r2,0xd // >> PAGE_SHIFT, r2 == EEAR |
1178 | l.andi r3,r2,0x7ff // (1UL << PAGE_SHIFT - 2) - 1 | |
9d02a428 JB |
1179 | l.slli r3,r3,0x2 // to get address << 2 |
1180 | l.add r3,r3,r4 | |
742fb582 | 1181 | l.lwz r3,0x0(r3) // this is pte at last |
9d02a428 JB |
1182 | /* |
1183 | * if (!pte_present(pte)) | |
1184 | */ | |
742fb582 | 1185 | l.andi r4,r3,0x1 |
9d02a428 JB |
1186 | l.sfne r4,r0 // is pte present |
1187 | l.bnf d_pte_not_present | |
742fb582 | 1188 | l.addi r4,r0,0xffffe3fa // PAGE_MASK | DTLB_UP_CONVERT_MASK |
9d02a428 JB |
1189 | /* |
1190 | * fill DTLB TR register | |
1191 | */ | |
742fb582 | 1192 | l.and r4,r3,r4 // apply the mask |
9d02a428 | 1193 | // Determine number of DMMU sets |
742fb582 SK |
1194 | l.mfspr r2, r0, SPR_DMMUCFGR |
1195 | l.andi r2, r2, SPR_DMMUCFGR_NTS | |
1196 | l.srli r2, r2, SPR_DMMUCFGR_NTS_OFF | |
9d02a428 | 1197 | l.ori r3, r0, 0x1 |
742fb582 SK |
1198 | l.sll r3, r3, r2 // r3 = number DMMU sets DMMUCFGR |
1199 | l.addi r2, r3, -1 // r2 = nsets mask | |
1200 | l.mfspr r3, r0, SPR_EEAR_BASE | |
1201 | l.srli r3, r3, 0xd // >> PAGE_SHIFT | |
1202 | l.and r2, r3, r2 // calc offset: & (NUM_TLB_ENTRIES-1) | |
9d02a428 | 1203 | //NUM_TLB_ENTRIES |
742fb582 | 1204 | l.mtspr r2,r4,SPR_DTLBTR_BASE(0) |
9d02a428 JB |
1205 | /* |
1206 | * fill DTLB MR register | |
1207 | */ | |
742fb582 SK |
1208 | l.slli r3, r3, 0xd /* << PAGE_SHIFT => EA & PAGE_MASK */ |
1209 | l.ori r4,r3,0x1 // set hardware valid bit: DTBL_MR entry | |
1210 | l.mtspr r2,r4,SPR_DTLBMR_BASE(0) | |
9d02a428 JB |
1211 | |
1212 | EXCEPTION_LOAD_GPR2 | |
1213 | EXCEPTION_LOAD_GPR3 | |
1214 | EXCEPTION_LOAD_GPR4 | |
9d02a428 JB |
1215 | l.rfe |
1216 | d_pmd_none: | |
1217 | d_pte_not_present: | |
1218 | EXCEPTION_LOAD_GPR2 | |
1219 | EXCEPTION_LOAD_GPR3 | |
1220 | EXCEPTION_LOAD_GPR4 | |
a81252d7 | 1221 | EXCEPTION_HANDLE(_dtlb_miss_page_fault_handler) |
9d02a428 JB |
1222 | |
1223 | /* ==============================================[ ITLB miss handler ]=== */ | |
1224 | ENTRY(itlb_miss_handler) | |
1225 | EXCEPTION_STORE_GPR2 | |
1226 | EXCEPTION_STORE_GPR3 | |
1227 | EXCEPTION_STORE_GPR4 | |
9d02a428 JB |
1228 | /* |
1229 | * get EA of the miss | |
1230 | */ | |
1231 | l.mfspr r2,r0,SPR_EEAR_BASE | |
1232 | ||
1233 | /* | |
1234 | * pmd = (pmd_t *)(current_pgd + pgd_index(daddr)); | |
1235 | * | |
1236 | */ | |
742fb582 | 1237 | GET_CURRENT_PGD(r3,r4) // r3 is current_pgd, r5 is temp |
9d02a428 JB |
1238 | l.srli r4,r2,0x18 // >> PAGE_SHIFT + (PAGE_SHIFT - 2) |
1239 | l.slli r4,r4,0x2 // to get address << 2 | |
742fb582 | 1240 | l.add r3,r4,r3 // r4 is pgd_index(daddr) |
9d02a428 JB |
1241 | /* |
1242 | * if (pmd_none(*pmd)) | |
1243 | * goto pmd_none: | |
1244 | */ | |
742fb582 | 1245 | tophys (r4,r3) |
9d02a428 JB |
1246 | l.lwz r3,0x0(r4) // get *pmd value |
1247 | l.sfne r3,r0 | |
1248 | l.bnf i_pmd_none | |
742fb582 | 1249 | l.addi r3,r0,0xffffe000 // PAGE_MASK |
9d02a428 JB |
1250 | |
1251 | i_pmd_good: | |
1252 | /* | |
1253 | * pte = *pte_offset(pmd, iaddr); | |
1254 | * | |
1255 | */ | |
1256 | l.lwz r4,0x0(r4) // get **pmd value | |
1257 | l.and r4,r4,r3 // & PAGE_MASK | |
742fb582 SK |
1258 | l.srli r2,r2,0xd // >> PAGE_SHIFT, r2 == EEAR |
1259 | l.andi r3,r2,0x7ff // (1UL << PAGE_SHIFT - 2) - 1 | |
9d02a428 JB |
1260 | l.slli r3,r3,0x2 // to get address << 2 |
1261 | l.add r3,r3,r4 | |
742fb582 | 1262 | l.lwz r3,0x0(r3) // this is pte at last |
9d02a428 JB |
1263 | /* |
1264 | * if (!pte_present(pte)) | |
1265 | * | |
1266 | */ | |
742fb582 | 1267 | l.andi r4,r3,0x1 |
9d02a428 JB |
1268 | l.sfne r4,r0 // is pte present |
1269 | l.bnf i_pte_not_present | |
742fb582 | 1270 | l.addi r4,r0,0xffffe03a // PAGE_MASK | ITLB_UP_CONVERT_MASK |
9d02a428 JB |
1271 | /* |
1272 | * fill ITLB TR register | |
1273 | */ | |
742fb582 SK |
1274 | l.and r4,r3,r4 // apply the mask |
1275 | l.andi r3,r3,0x7c0 // _PAGE_EXEC | _PAGE_SRE | _PAGE_SWE | _PAGE_URE | _PAGE_UWE | |
9d02a428 JB |
1276 | l.sfeq r3,r0 |
1277 | l.bf itlb_tr_fill //_workaround | |
1278 | // Determine number of IMMU sets | |
742fb582 SK |
1279 | l.mfspr r2, r0, SPR_IMMUCFGR |
1280 | l.andi r2, r2, SPR_IMMUCFGR_NTS | |
1281 | l.srli r2, r2, SPR_IMMUCFGR_NTS_OFF | |
9d02a428 | 1282 | l.ori r3, r0, 0x1 |
742fb582 SK |
1283 | l.sll r3, r3, r2 // r3 = number IMMU sets IMMUCFGR |
1284 | l.addi r2, r3, -1 // r2 = nsets mask | |
1285 | l.mfspr r3, r0, SPR_EEAR_BASE | |
1286 | l.srli r3, r3, 0xd // >> PAGE_SHIFT | |
1287 | l.and r2, r3, r2 // calc offset: & (NUM_TLB_ENTRIES-1) | |
9d02a428 JB |
1288 | |
1289 | /* | |
1290 | * __PHX__ :: fixme | |
1291 | * we should not just blindly set executable flags, | |
1292 | * but it does help with ping. the clean way would be to find out | |
1293 | * (and fix it) why stack doesn't have execution permissions | |
1294 | */ | |
1295 | ||
1296 | itlb_tr_fill_workaround: | |
1297 | l.ori r4,r4,0xc0 // | (SPR_ITLBTR_UXE | ITLBTR_SXE) | |
1298 | itlb_tr_fill: | |
742fb582 | 1299 | l.mtspr r2,r4,SPR_ITLBTR_BASE(0) |
9d02a428 JB |
1300 | /* |
1301 | * fill DTLB MR register | |
1302 | */ | |
742fb582 SK |
1303 | l.slli r3, r3, 0xd /* << PAGE_SHIFT => EA & PAGE_MASK */ |
1304 | l.ori r4,r3,0x1 // set hardware valid bit: ITBL_MR entry | |
1305 | l.mtspr r2,r4,SPR_ITLBMR_BASE(0) | |
9d02a428 JB |
1306 | |
1307 | EXCEPTION_LOAD_GPR2 | |
1308 | EXCEPTION_LOAD_GPR3 | |
1309 | EXCEPTION_LOAD_GPR4 | |
9d02a428 JB |
1310 | l.rfe |
1311 | ||
9d02a428 JB |
1312 | i_pmd_none: |
1313 | i_pte_not_present: | |
1314 | EXCEPTION_LOAD_GPR2 | |
1315 | EXCEPTION_LOAD_GPR3 | |
1316 | EXCEPTION_LOAD_GPR4 | |
a81252d7 | 1317 | EXCEPTION_HANDLE(_itlb_miss_page_fault_handler) |
9d02a428 JB |
1318 | |
1319 | /* ==============================================[ boot tlb handlers ]=== */ | |
1320 | ||
1321 | ||
1322 | /* =================================================[ debugging aids ]=== */ | |
1323 | ||
1324 | .align 64 | |
1325 | _immu_trampoline: | |
1326 | .space 64 | |
1327 | _immu_trampoline_top: | |
1328 | ||
1329 | #define TRAMP_SLOT_0 (0x0) | |
1330 | #define TRAMP_SLOT_1 (0x4) | |
1331 | #define TRAMP_SLOT_2 (0x8) | |
1332 | #define TRAMP_SLOT_3 (0xc) | |
1333 | #define TRAMP_SLOT_4 (0x10) | |
1334 | #define TRAMP_SLOT_5 (0x14) | |
1335 | #define TRAMP_FRAME_SIZE (0x18) | |
1336 | ||
1337 | ENTRY(_immu_trampoline_workaround) | |
1338 | // r2 EEA | |
1339 | // r6 is physical EEA | |
1340 | tophys(r6,r2) | |
1341 | ||
1342 | LOAD_SYMBOL_2_GPR(r5,_immu_trampoline) | |
1343 | tophys (r3,r5) // r3 is trampoline (physical) | |
1344 | ||
1345 | LOAD_SYMBOL_2_GPR(r4,0x15000000) | |
1346 | l.sw TRAMP_SLOT_0(r3),r4 | |
1347 | l.sw TRAMP_SLOT_1(r3),r4 | |
1348 | l.sw TRAMP_SLOT_4(r3),r4 | |
1349 | l.sw TRAMP_SLOT_5(r3),r4 | |
1350 | ||
1351 | // EPC = EEA - 0x4 | |
1352 | l.lwz r4,0x0(r6) // load op @ EEA + 0x0 (fc address) | |
1353 | l.sw TRAMP_SLOT_3(r3),r4 // store it to _immu_trampoline_data | |
1354 | l.lwz r4,-0x4(r6) // load op @ EEA - 0x4 (f8 address) | |
1355 | l.sw TRAMP_SLOT_2(r3),r4 // store it to _immu_trampoline_data | |
1356 | ||
1357 | l.srli r5,r4,26 // check opcode for write access | |
1358 | l.sfeqi r5,0 // l.j | |
1359 | l.bf 0f | |
1360 | l.sfeqi r5,0x11 // l.jr | |
1361 | l.bf 1f | |
1362 | l.sfeqi r5,1 // l.jal | |
1363 | l.bf 2f | |
1364 | l.sfeqi r5,0x12 // l.jalr | |
1365 | l.bf 3f | |
1366 | l.sfeqi r5,3 // l.bnf | |
1367 | l.bf 4f | |
1368 | l.sfeqi r5,4 // l.bf | |
1369 | l.bf 5f | |
1370 | 99: | |
1371 | l.nop | |
1372 | l.j 99b // should never happen | |
1373 | l.nop 1 | |
1374 | ||
1375 | // r2 is EEA | |
1376 | // r3 is trampoline address (physical) | |
1377 | // r4 is instruction | |
1378 | // r6 is physical(EEA) | |
1379 | // | |
1380 | // r5 | |
1381 | ||
1382 | 2: // l.jal | |
1383 | ||
1384 | /* 19 20 aa aa l.movhi r9,0xaaaa | |
1385 | * a9 29 bb bb l.ori r9,0xbbbb | |
1386 | * | |
1387 | * where 0xaaaabbbb is EEA + 0x4 shifted right 2 | |
1388 | */ | |
1389 | ||
1390 | l.addi r6,r2,0x4 // this is 0xaaaabbbb | |
1391 | ||
1392 | // l.movhi r9,0xaaaa | |
1393 | l.ori r5,r0,0x1920 // 0x1920 == l.movhi r9 | |
1394 | l.sh (TRAMP_SLOT_0+0x0)(r3),r5 | |
1395 | l.srli r5,r6,16 | |
1396 | l.sh (TRAMP_SLOT_0+0x2)(r3),r5 | |
1397 | ||
1398 | // l.ori r9,0xbbbb | |
1399 | l.ori r5,r0,0xa929 // 0xa929 == l.ori r9 | |
1400 | l.sh (TRAMP_SLOT_1+0x0)(r3),r5 | |
1401 | l.andi r5,r6,0xffff | |
1402 | l.sh (TRAMP_SLOT_1+0x2)(r3),r5 | |
1403 | ||
1404 | /* falthrough, need to set up new jump offset */ | |
1405 | ||
1406 | ||
1407 | 0: // l.j | |
1408 | l.slli r6,r4,6 // original offset shifted left 6 - 2 | |
1409 | // l.srli r6,r6,6 // original offset shifted right 2 | |
1410 | ||
1411 | l.slli r4,r2,4 // old jump position: EEA shifted left 4 | |
1412 | // l.srli r4,r4,6 // old jump position: shifted right 2 | |
1413 | ||
1414 | l.addi r5,r3,0xc // new jump position (physical) | |
1415 | l.slli r5,r5,4 // new jump position: shifted left 4 | |
1416 | ||
1417 | // calculate new jump offset | |
1418 | // new_off = old_off + (old_jump - new_jump) | |
1419 | ||
1420 | l.sub r5,r4,r5 // old_jump - new_jump | |
1421 | l.add r5,r6,r5 // orig_off + (old_jump - new_jump) | |
1422 | l.srli r5,r5,6 // new offset shifted right 2 | |
1423 | ||
1424 | // r5 is new jump offset | |
1425 | // l.j has opcode 0x0... | |
1426 | l.sw TRAMP_SLOT_2(r3),r5 // write it back | |
1427 | ||
1428 | l.j trampoline_out | |
1429 | l.nop | |
1430 | ||
1431 | /* ----------------------------- */ | |
1432 | ||
1433 | 3: // l.jalr | |
1434 | ||
1435 | /* 19 20 aa aa l.movhi r9,0xaaaa | |
1436 | * a9 29 bb bb l.ori r9,0xbbbb | |
1437 | * | |
1438 | * where 0xaaaabbbb is EEA + 0x4 shifted right 2 | |
1439 | */ | |
1440 | ||
1441 | l.addi r6,r2,0x4 // this is 0xaaaabbbb | |
1442 | ||
1443 | // l.movhi r9,0xaaaa | |
1444 | l.ori r5,r0,0x1920 // 0x1920 == l.movhi r9 | |
1445 | l.sh (TRAMP_SLOT_0+0x0)(r3),r5 | |
1446 | l.srli r5,r6,16 | |
1447 | l.sh (TRAMP_SLOT_0+0x2)(r3),r5 | |
1448 | ||
1449 | // l.ori r9,0xbbbb | |
1450 | l.ori r5,r0,0xa929 // 0xa929 == l.ori r9 | |
1451 | l.sh (TRAMP_SLOT_1+0x0)(r3),r5 | |
1452 | l.andi r5,r6,0xffff | |
1453 | l.sh (TRAMP_SLOT_1+0x2)(r3),r5 | |
1454 | ||
1455 | l.lhz r5,(TRAMP_SLOT_2+0x0)(r3) // load hi part of jump instruction | |
1456 | l.andi r5,r5,0x3ff // clear out opcode part | |
1457 | l.ori r5,r5,0x4400 // opcode changed from l.jalr -> l.jr | |
1458 | l.sh (TRAMP_SLOT_2+0x0)(r3),r5 // write it back | |
1459 | ||
1460 | /* falthrough */ | |
1461 | ||
1462 | 1: // l.jr | |
1463 | l.j trampoline_out | |
1464 | l.nop | |
1465 | ||
1466 | /* ----------------------------- */ | |
1467 | ||
1468 | 4: // l.bnf | |
1469 | 5: // l.bf | |
1470 | l.slli r6,r4,6 // original offset shifted left 6 - 2 | |
1471 | // l.srli r6,r6,6 // original offset shifted right 2 | |
1472 | ||
1473 | l.slli r4,r2,4 // old jump position: EEA shifted left 4 | |
1474 | // l.srli r4,r4,6 // old jump position: shifted right 2 | |
1475 | ||
1476 | l.addi r5,r3,0xc // new jump position (physical) | |
1477 | l.slli r5,r5,4 // new jump position: shifted left 4 | |
1478 | ||
1479 | // calculate new jump offset | |
1480 | // new_off = old_off + (old_jump - new_jump) | |
1481 | ||
1482 | l.add r6,r6,r4 // (orig_off + old_jump) | |
1483 | l.sub r6,r6,r5 // (orig_off + old_jump) - new_jump | |
1484 | l.srli r6,r6,6 // new offset shifted right 2 | |
1485 | ||
1486 | // r6 is new jump offset | |
1487 | l.lwz r4,(TRAMP_SLOT_2+0x0)(r3) // load jump instruction | |
1488 | l.srli r4,r4,16 | |
1489 | l.andi r4,r4,0xfc00 // get opcode part | |
1490 | l.slli r4,r4,16 | |
1491 | l.or r6,r4,r6 // l.b(n)f new offset | |
1492 | l.sw TRAMP_SLOT_2(r3),r6 // write it back | |
1493 | ||
1494 | /* we need to add l.j to EEA + 0x8 */ | |
1495 | tophys (r4,r2) // may not be needed (due to shifts down_ | |
1496 | l.addi r4,r4,(0x8 - 0x8) // jump target = r2 + 0x8 (compensate for 0x8) | |
1497 | // jump position = r5 + 0x8 (0x8 compensated) | |
1498 | l.sub r4,r4,r5 // jump offset = target - new_position + 0x8 | |
1499 | ||
1500 | l.slli r4,r4,4 // the amount of info in imediate of jump | |
1501 | l.srli r4,r4,6 // jump instruction with offset | |
1502 | l.sw TRAMP_SLOT_4(r3),r4 // write it to 4th slot | |
1503 | ||
1504 | /* fallthrough */ | |
1505 | ||
1506 | trampoline_out: | |
1507 | // set up new EPC to point to our trampoline code | |
1508 | LOAD_SYMBOL_2_GPR(r5,_immu_trampoline) | |
1509 | l.mtspr r0,r5,SPR_EPCR_BASE | |
1510 | ||
1511 | // immu_trampoline is (4x) CACHE_LINE aligned | |
1512 | // and only 6 instructions long, | |
1513 | // so we need to invalidate only 2 lines | |
1514 | ||
1515 | /* Establish cache block size | |
1516 | If BS=0, 16; | |
1517 | If BS=1, 32; | |
1518 | r14 contain block size | |
1519 | */ | |
1520 | l.mfspr r21,r0,SPR_ICCFGR | |
1521 | l.andi r21,r21,SPR_ICCFGR_CBS | |
1522 | l.srli r21,r21,7 | |
1523 | l.ori r23,r0,16 | |
1524 | l.sll r14,r23,r21 | |
1525 | ||
1526 | l.mtspr r0,r5,SPR_ICBIR | |
1527 | l.add r5,r5,r14 | |
1528 | l.mtspr r0,r5,SPR_ICBIR | |
1529 | ||
1530 | l.jr r9 | |
1531 | l.nop | |
1532 | ||
1533 | ||
1534 | /* | |
1535 | * DSCR: prints a string referenced by r3. | |
1536 | * | |
1537 | * PRMS: r3 - address of the first character of null | |
1538 | * terminated string to be printed | |
1539 | * | |
1540 | * PREQ: UART at UART_BASE_ADD has to be initialized | |
1541 | * | |
1542 | * POST: caller should be aware that r3, r9 are changed | |
1543 | */ | |
1544 | ENTRY(_emergency_print) | |
1545 | EMERGENCY_PRINT_STORE_GPR4 | |
1546 | EMERGENCY_PRINT_STORE_GPR5 | |
1547 | EMERGENCY_PRINT_STORE_GPR6 | |
1548 | EMERGENCY_PRINT_STORE_GPR7 | |
1549 | 2: | |
1550 | l.lbz r7,0(r3) | |
1551 | l.sfeq r7,r0 | |
1552 | l.bf 9f | |
1553 | l.nop | |
1554 | ||
1555 | // putc: | |
1556 | l.movhi r4,hi(UART_BASE_ADD) | |
1557 | ||
1558 | l.addi r6,r0,0x20 | |
1559 | 1: l.lbz r5,5(r4) | |
1560 | l.andi r5,r5,0x20 | |
1561 | l.sfeq r5,r6 | |
1562 | l.bnf 1b | |
1563 | l.nop | |
1564 | ||
1565 | l.sb 0(r4),r7 | |
1566 | ||
1567 | l.addi r6,r0,0x60 | |
1568 | 1: l.lbz r5,5(r4) | |
1569 | l.andi r5,r5,0x60 | |
1570 | l.sfeq r5,r6 | |
1571 | l.bnf 1b | |
1572 | l.nop | |
1573 | ||
1574 | /* next character */ | |
1575 | l.j 2b | |
1576 | l.addi r3,r3,0x1 | |
1577 | ||
1578 | 9: | |
1579 | EMERGENCY_PRINT_LOAD_GPR7 | |
1580 | EMERGENCY_PRINT_LOAD_GPR6 | |
1581 | EMERGENCY_PRINT_LOAD_GPR5 | |
1582 | EMERGENCY_PRINT_LOAD_GPR4 | |
1583 | l.jr r9 | |
1584 | l.nop | |
1585 | ||
1586 | ENTRY(_emergency_print_nr) | |
1587 | EMERGENCY_PRINT_STORE_GPR4 | |
1588 | EMERGENCY_PRINT_STORE_GPR5 | |
1589 | EMERGENCY_PRINT_STORE_GPR6 | |
1590 | EMERGENCY_PRINT_STORE_GPR7 | |
1591 | EMERGENCY_PRINT_STORE_GPR8 | |
1592 | ||
1593 | l.addi r8,r0,32 // shift register | |
1594 | ||
1595 | 1: /* remove leading zeros */ | |
1596 | l.addi r8,r8,-0x4 | |
1597 | l.srl r7,r3,r8 | |
1598 | l.andi r7,r7,0xf | |
1599 | ||
1600 | /* don't skip the last zero if number == 0x0 */ | |
1601 | l.sfeqi r8,0x4 | |
1602 | l.bf 2f | |
1603 | l.nop | |
1604 | ||
1605 | l.sfeq r7,r0 | |
1606 | l.bf 1b | |
1607 | l.nop | |
1608 | ||
1609 | 2: | |
1610 | l.srl r7,r3,r8 | |
1611 | ||
1612 | l.andi r7,r7,0xf | |
1613 | l.sflts r8,r0 | |
1614 | l.bf 9f | |
1615 | ||
1616 | l.sfgtui r7,0x9 | |
1617 | l.bnf 8f | |
1618 | l.nop | |
1619 | l.addi r7,r7,0x27 | |
1620 | ||
1621 | 8: | |
1622 | l.addi r7,r7,0x30 | |
1623 | // putc: | |
1624 | l.movhi r4,hi(UART_BASE_ADD) | |
1625 | ||
1626 | l.addi r6,r0,0x20 | |
1627 | 1: l.lbz r5,5(r4) | |
1628 | l.andi r5,r5,0x20 | |
1629 | l.sfeq r5,r6 | |
1630 | l.bnf 1b | |
1631 | l.nop | |
1632 | ||
1633 | l.sb 0(r4),r7 | |
1634 | ||
1635 | l.addi r6,r0,0x60 | |
1636 | 1: l.lbz r5,5(r4) | |
1637 | l.andi r5,r5,0x60 | |
1638 | l.sfeq r5,r6 | |
1639 | l.bnf 1b | |
1640 | l.nop | |
1641 | ||
1642 | /* next character */ | |
1643 | l.j 2b | |
1644 | l.addi r8,r8,-0x4 | |
1645 | ||
1646 | 9: | |
1647 | EMERGENCY_PRINT_LOAD_GPR8 | |
1648 | EMERGENCY_PRINT_LOAD_GPR7 | |
1649 | EMERGENCY_PRINT_LOAD_GPR6 | |
1650 | EMERGENCY_PRINT_LOAD_GPR5 | |
1651 | EMERGENCY_PRINT_LOAD_GPR4 | |
1652 | l.jr r9 | |
1653 | l.nop | |
1654 | ||
1655 | ||
1656 | /* | |
1657 | * This should be used for debugging only. | |
1658 | * It messes up the Linux early serial output | |
1659 | * somehow, so use it sparingly and essentially | |
1660 | * only if you need to debug something that goes wrong | |
1661 | * before Linux gets the early serial going. | |
1662 | * | |
1663 | * Furthermore, you'll have to make sure you set the | |
1664 | * UART_DEVISOR correctly according to the system | |
1665 | * clock rate. | |
1666 | * | |
1667 | * | |
1668 | */ | |
1669 | ||
1670 | ||
1671 | ||
1672 | #define SYS_CLK 20000000 | |
1673 | //#define SYS_CLK 1843200 | |
1674 | #define OR32_CONSOLE_BAUD 115200 | |
1675 | #define UART_DIVISOR SYS_CLK/(16*OR32_CONSOLE_BAUD) | |
1676 | ||
1677 | ENTRY(_early_uart_init) | |
1678 | l.movhi r3,hi(UART_BASE_ADD) | |
1679 | ||
1680 | l.addi r4,r0,0x7 | |
1681 | l.sb 0x2(r3),r4 | |
1682 | ||
1683 | l.addi r4,r0,0x0 | |
1684 | l.sb 0x1(r3),r4 | |
1685 | ||
1686 | l.addi r4,r0,0x3 | |
1687 | l.sb 0x3(r3),r4 | |
1688 | ||
1689 | l.lbz r5,3(r3) | |
1690 | l.ori r4,r5,0x80 | |
1691 | l.sb 0x3(r3),r4 | |
1692 | l.addi r4,r0,((UART_DIVISOR>>8) & 0x000000ff) | |
1693 | l.sb UART_DLM(r3),r4 | |
1694 | l.addi r4,r0,((UART_DIVISOR) & 0x000000ff) | |
1695 | l.sb UART_DLL(r3),r4 | |
1696 | l.sb 0x3(r3),r5 | |
1697 | ||
1698 | l.jr r9 | |
1699 | l.nop | |
1700 | ||
c0567184 SH |
1701 | .align 0x1000 |
1702 | .global _secondary_evbar | |
1703 | _secondary_evbar: | |
1704 | ||
1705 | .space 0x800 | |
1706 | /* Just disable interrupts and Return */ | |
1707 | l.ori r3,r0,SPR_SR_SM | |
1708 | l.mtspr r0,r3,SPR_ESR_BASE | |
1709 | l.rfe | |
1710 | ||
1711 | ||
da99f00e | 1712 | .section .rodata |
9d02a428 JB |
1713 | _string_unhandled_exception: |
1714 | .string "\n\rRunarunaround: Unhandled exception 0x\0" | |
1715 | ||
1716 | _string_epc_prefix: | |
1717 | .string ": EPC=0x\0" | |
1718 | ||
1719 | _string_nl: | |
1720 | .string "\n\r\0" | |
1721 | ||
9d02a428 JB |
1722 | |
1723 | /* ========================================[ page aligned structures ]=== */ | |
1724 | ||
1725 | /* | |
1726 | * .data section should be page aligned | |
57ce8ba0 | 1727 | * (look into arch/openrisc/kernel/vmlinux.lds.S) |
9d02a428 JB |
1728 | */ |
1729 | .section .data,"aw" | |
1730 | .align 8192 | |
1731 | .global empty_zero_page | |
1732 | empty_zero_page: | |
1733 | .space 8192 | |
1734 | ||
1735 | .global swapper_pg_dir | |
1736 | swapper_pg_dir: | |
1737 | .space 8192 | |
1738 | ||
1739 | .global _unhandled_stack | |
1740 | _unhandled_stack: | |
1741 | .space 8192 | |
1742 | _unhandled_stack_top: | |
1743 | ||
1744 | /* ============================================================[ EOF ]=== */ |