]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* $Id: entry.S,v 1.144 2002/02/09 19:49:30 davem Exp $ |
2 | * arch/sparc64/kernel/entry.S: Sparc64 trap low-level entry points. | |
3 | * | |
4 | * Copyright (C) 1995,1997 David S. Miller (davem@caip.rutgers.edu) | |
5 | * Copyright (C) 1996 Eddie C. Dost (ecd@skynet.be) | |
6 | * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx) | |
7 | * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz) | |
8 | */ | |
9 | ||
10 | #include <linux/config.h> | |
11 | #include <linux/errno.h> | |
12 | ||
13 | #include <asm/head.h> | |
14 | #include <asm/asi.h> | |
15 | #include <asm/smp.h> | |
16 | #include <asm/ptrace.h> | |
17 | #include <asm/page.h> | |
18 | #include <asm/signal.h> | |
19 | #include <asm/pgtable.h> | |
20 | #include <asm/processor.h> | |
21 | #include <asm/visasm.h> | |
22 | #include <asm/estate.h> | |
23 | #include <asm/auxio.h> | |
24 | ||
25 | /* #define SYSCALL_TRACING 1 */ | |
26 | ||
27 | #define curptr g6 | |
28 | ||
29 | #define NR_SYSCALLS 284 /* Each OS is different... */ | |
30 | ||
31 | .text | |
32 | .align 32 | |
33 | ||
34 | .globl sparc64_vpte_patchme1 | |
35 | .globl sparc64_vpte_patchme2 | |
36 | /* | |
37 | * On a second level vpte miss, check whether the original fault is to the OBP | |
38 | * range (note that this is only possible for instruction miss, data misses to | |
39 | * obp range do not use vpte). If so, go back directly to the faulting address. | |
40 | * This is because we want to read the tpc, otherwise we have no way of knowing | |
41 | * the 8k aligned faulting address if we are using >8k kernel pagesize. This | |
42 | * also ensures no vpte range addresses are dropped into tlb while obp is | |
43 | * executing (see inherit_locked_prom_mappings() rant). | |
44 | */ | |
45 | sparc64_vpte_nucleus: | |
46 | /* Load 0xf0000000, which is LOW_OBP_ADDRESS. */ | |
47 | mov 0xf, %g5 | |
48 | sllx %g5, 28, %g5 | |
49 | ||
50 | /* Is addr >= LOW_OBP_ADDRESS? */ | |
51 | cmp %g4, %g5 | |
52 | blu,pn %xcc, sparc64_vpte_patchme1 | |
53 | mov 0x1, %g5 | |
54 | ||
55 | /* Load 0x100000000, which is HI_OBP_ADDRESS. */ | |
56 | sllx %g5, 32, %g5 | |
57 | ||
58 | /* Is addr < HI_OBP_ADDRESS? */ | |
59 | cmp %g4, %g5 | |
60 | blu,pn %xcc, obp_iaddr_patch | |
61 | nop | |
62 | ||
63 | /* These two instructions are patched by paginig_init(). */ | |
64 | sparc64_vpte_patchme1: | |
65 | sethi %hi(0), %g5 | |
66 | sparc64_vpte_patchme2: | |
67 | or %g5, %lo(0), %g5 | |
68 | ||
69 | /* With kernel PGD in %g5, branch back into dtlb_backend. */ | |
70 | ba,pt %xcc, sparc64_kpte_continue | |
71 | andn %g1, 0x3, %g1 /* Finish PMD offset adjustment. */ | |
72 | ||
73 | vpte_noent: | |
74 | /* Restore previous TAG_ACCESS, %g5 is zero, and we will | |
75 | * skip over the trap instruction so that the top level | |
76 | * TLB miss handler will thing this %g5 value is just an | |
77 | * invalid PTE, thus branching to full fault processing. | |
78 | */ | |
79 | mov TLB_SFSR, %g1 | |
80 | stxa %g4, [%g1 + %g1] ASI_DMMU | |
81 | done | |
82 | ||
83 | .globl obp_iaddr_patch | |
84 | obp_iaddr_patch: | |
85 | /* These two instructions patched by inherit_prom_mappings(). */ | |
86 | sethi %hi(0), %g5 | |
87 | or %g5, %lo(0), %g5 | |
88 | ||
89 | /* Behave as if we are at TL0. */ | |
90 | wrpr %g0, 1, %tl | |
91 | rdpr %tpc, %g4 /* Find original faulting iaddr */ | |
92 | srlx %g4, 13, %g4 /* Throw out context bits */ | |
93 | sllx %g4, 13, %g4 /* g4 has vpn + ctx0 now */ | |
94 | ||
95 | /* Restore previous TAG_ACCESS. */ | |
96 | mov TLB_SFSR, %g1 | |
97 | stxa %g4, [%g1 + %g1] ASI_IMMU | |
98 | ||
99 | /* Get PMD offset. */ | |
100 | srlx %g4, 23, %g6 | |
101 | and %g6, 0x7ff, %g6 | |
102 | sllx %g6, 2, %g6 | |
103 | ||
104 | /* Load PMD, is it valid? */ | |
105 | lduwa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
106 | brz,pn %g5, longpath | |
107 | sllx %g5, 11, %g5 | |
108 | ||
109 | /* Get PTE offset. */ | |
110 | srlx %g4, 13, %g6 | |
111 | and %g6, 0x3ff, %g6 | |
112 | sllx %g6, 3, %g6 | |
113 | ||
114 | /* Load PTE. */ | |
115 | ldxa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
116 | brgez,pn %g5, longpath | |
117 | nop | |
118 | ||
119 | /* TLB load and return from trap. */ | |
120 | stxa %g5, [%g0] ASI_ITLB_DATA_IN | |
121 | retry | |
122 | ||
123 | .globl obp_daddr_patch | |
124 | obp_daddr_patch: | |
125 | /* These two instructions patched by inherit_prom_mappings(). */ | |
126 | sethi %hi(0), %g5 | |
127 | or %g5, %lo(0), %g5 | |
128 | ||
129 | /* Get PMD offset. */ | |
130 | srlx %g4, 23, %g6 | |
131 | and %g6, 0x7ff, %g6 | |
132 | sllx %g6, 2, %g6 | |
133 | ||
134 | /* Load PMD, is it valid? */ | |
135 | lduwa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
136 | brz,pn %g5, longpath | |
137 | sllx %g5, 11, %g5 | |
138 | ||
139 | /* Get PTE offset. */ | |
140 | srlx %g4, 13, %g6 | |
141 | and %g6, 0x3ff, %g6 | |
142 | sllx %g6, 3, %g6 | |
143 | ||
144 | /* Load PTE. */ | |
145 | ldxa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
146 | brgez,pn %g5, longpath | |
147 | nop | |
148 | ||
149 | /* TLB load and return from trap. */ | |
150 | stxa %g5, [%g0] ASI_DTLB_DATA_IN | |
151 | retry | |
152 | ||
153 | /* | |
154 | * On a first level data miss, check whether this is to the OBP range (note | |
155 | * that such accesses can be made by prom, as well as by kernel using | |
156 | * prom_getproperty on "address"), and if so, do not use vpte access ... | |
157 | * rather, use information saved during inherit_prom_mappings() using 8k | |
158 | * pagesize. | |
159 | */ | |
160 | kvmap: | |
161 | /* Load 0xf0000000, which is LOW_OBP_ADDRESS. */ | |
162 | mov 0xf, %g5 | |
163 | sllx %g5, 28, %g5 | |
164 | ||
165 | /* Is addr >= LOW_OBP_ADDRESS? */ | |
166 | cmp %g4, %g5 | |
167 | blu,pn %xcc, vmalloc_addr | |
168 | mov 0x1, %g5 | |
169 | ||
170 | /* Load 0x100000000, which is HI_OBP_ADDRESS. */ | |
171 | sllx %g5, 32, %g5 | |
172 | ||
173 | /* Is addr < HI_OBP_ADDRESS? */ | |
174 | cmp %g4, %g5 | |
175 | blu,pn %xcc, obp_daddr_patch | |
176 | nop | |
177 | ||
178 | vmalloc_addr: | |
179 | /* If we get here, a vmalloc addr accessed, load kernel VPTE. */ | |
180 | ldxa [%g3 + %g6] ASI_N, %g5 | |
181 | brgez,pn %g5, longpath | |
182 | nop | |
183 | ||
184 | /* PTE is valid, load into TLB and return from trap. */ | |
185 | stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB | |
186 | retry | |
187 | ||
188 | /* This is trivial with the new code... */ | |
189 | .globl do_fpdis | |
190 | do_fpdis: | |
191 | sethi %hi(TSTATE_PEF), %g4 ! IEU0 | |
192 | rdpr %tstate, %g5 | |
193 | andcc %g5, %g4, %g0 | |
194 | be,pt %xcc, 1f | |
195 | nop | |
196 | rd %fprs, %g5 | |
197 | andcc %g5, FPRS_FEF, %g0 | |
198 | be,pt %xcc, 1f | |
199 | nop | |
200 | ||
201 | /* Legal state when DCR_IFPOE is set in Cheetah %dcr. */ | |
202 | sethi %hi(109f), %g7 | |
203 | ba,pt %xcc, etrap | |
204 | 109: or %g7, %lo(109b), %g7 | |
205 | add %g0, %g0, %g0 | |
206 | ba,a,pt %xcc, rtrap_clr_l6 | |
207 | ||
208 | 1: ldub [%g6 + TI_FPSAVED], %g5 ! Load Group | |
209 | wr %g0, FPRS_FEF, %fprs ! LSU Group+4bubbles | |
210 | andcc %g5, FPRS_FEF, %g0 ! IEU1 Group | |
211 | be,a,pt %icc, 1f ! CTI | |
212 | clr %g7 ! IEU0 | |
213 | ldx [%g6 + TI_GSR], %g7 ! Load Group | |
214 | 1: andcc %g5, FPRS_DL, %g0 ! IEU1 | |
215 | bne,pn %icc, 2f ! CTI | |
216 | fzero %f0 ! FPA | |
217 | andcc %g5, FPRS_DU, %g0 ! IEU1 Group | |
218 | bne,pn %icc, 1f ! CTI | |
219 | fzero %f2 ! FPA | |
220 | faddd %f0, %f2, %f4 | |
221 | fmuld %f0, %f2, %f6 | |
222 | faddd %f0, %f2, %f8 | |
223 | fmuld %f0, %f2, %f10 | |
224 | faddd %f0, %f2, %f12 | |
225 | fmuld %f0, %f2, %f14 | |
226 | faddd %f0, %f2, %f16 | |
227 | fmuld %f0, %f2, %f18 | |
228 | faddd %f0, %f2, %f20 | |
229 | fmuld %f0, %f2, %f22 | |
230 | faddd %f0, %f2, %f24 | |
231 | fmuld %f0, %f2, %f26 | |
232 | faddd %f0, %f2, %f28 | |
233 | fmuld %f0, %f2, %f30 | |
234 | faddd %f0, %f2, %f32 | |
235 | fmuld %f0, %f2, %f34 | |
236 | faddd %f0, %f2, %f36 | |
237 | fmuld %f0, %f2, %f38 | |
238 | faddd %f0, %f2, %f40 | |
239 | fmuld %f0, %f2, %f42 | |
240 | faddd %f0, %f2, %f44 | |
241 | fmuld %f0, %f2, %f46 | |
242 | faddd %f0, %f2, %f48 | |
243 | fmuld %f0, %f2, %f50 | |
244 | faddd %f0, %f2, %f52 | |
245 | fmuld %f0, %f2, %f54 | |
246 | faddd %f0, %f2, %f56 | |
247 | fmuld %f0, %f2, %f58 | |
248 | b,pt %xcc, fpdis_exit2 | |
249 | faddd %f0, %f2, %f60 | |
250 | 1: mov SECONDARY_CONTEXT, %g3 | |
251 | add %g6, TI_FPREGS + 0x80, %g1 | |
252 | faddd %f0, %f2, %f4 | |
253 | fmuld %f0, %f2, %f6 | |
254 | ldxa [%g3] ASI_DMMU, %g5 | |
255 | cplus_fptrap_insn_1: | |
256 | sethi %hi(0), %g2 | |
257 | stxa %g2, [%g3] ASI_DMMU | |
258 | membar #Sync | |
259 | add %g6, TI_FPREGS + 0xc0, %g2 | |
260 | faddd %f0, %f2, %f8 | |
261 | fmuld %f0, %f2, %f10 | |
262 | ldda [%g1] ASI_BLK_S, %f32 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
263 | ldda [%g2] ASI_BLK_S, %f48 | |
264 | faddd %f0, %f2, %f12 | |
265 | fmuld %f0, %f2, %f14 | |
266 | faddd %f0, %f2, %f16 | |
267 | fmuld %f0, %f2, %f18 | |
268 | faddd %f0, %f2, %f20 | |
269 | fmuld %f0, %f2, %f22 | |
270 | faddd %f0, %f2, %f24 | |
271 | fmuld %f0, %f2, %f26 | |
272 | faddd %f0, %f2, %f28 | |
273 | fmuld %f0, %f2, %f30 | |
274 | b,pt %xcc, fpdis_exit | |
275 | membar #Sync | |
276 | 2: andcc %g5, FPRS_DU, %g0 | |
277 | bne,pt %icc, 3f | |
278 | fzero %f32 | |
279 | mov SECONDARY_CONTEXT, %g3 | |
280 | fzero %f34 | |
281 | ldxa [%g3] ASI_DMMU, %g5 | |
282 | add %g6, TI_FPREGS, %g1 | |
283 | cplus_fptrap_insn_2: | |
284 | sethi %hi(0), %g2 | |
285 | stxa %g2, [%g3] ASI_DMMU | |
286 | membar #Sync | |
287 | add %g6, TI_FPREGS + 0x40, %g2 | |
288 | faddd %f32, %f34, %f36 | |
289 | fmuld %f32, %f34, %f38 | |
290 | ldda [%g1] ASI_BLK_S, %f0 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
291 | ldda [%g2] ASI_BLK_S, %f16 | |
292 | faddd %f32, %f34, %f40 | |
293 | fmuld %f32, %f34, %f42 | |
294 | faddd %f32, %f34, %f44 | |
295 | fmuld %f32, %f34, %f46 | |
296 | faddd %f32, %f34, %f48 | |
297 | fmuld %f32, %f34, %f50 | |
298 | faddd %f32, %f34, %f52 | |
299 | fmuld %f32, %f34, %f54 | |
300 | faddd %f32, %f34, %f56 | |
301 | fmuld %f32, %f34, %f58 | |
302 | faddd %f32, %f34, %f60 | |
303 | fmuld %f32, %f34, %f62 | |
304 | ba,pt %xcc, fpdis_exit | |
305 | membar #Sync | |
306 | 3: mov SECONDARY_CONTEXT, %g3 | |
307 | add %g6, TI_FPREGS, %g1 | |
308 | ldxa [%g3] ASI_DMMU, %g5 | |
309 | cplus_fptrap_insn_3: | |
310 | sethi %hi(0), %g2 | |
311 | stxa %g2, [%g3] ASI_DMMU | |
312 | membar #Sync | |
313 | mov 0x40, %g2 | |
314 | ldda [%g1] ASI_BLK_S, %f0 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
315 | ldda [%g1 + %g2] ASI_BLK_S, %f16 | |
316 | add %g1, 0x80, %g1 | |
317 | ldda [%g1] ASI_BLK_S, %f32 | |
318 | ldda [%g1 + %g2] ASI_BLK_S, %f48 | |
319 | membar #Sync | |
320 | fpdis_exit: | |
321 | stxa %g5, [%g3] ASI_DMMU | |
322 | membar #Sync | |
323 | fpdis_exit2: | |
324 | wr %g7, 0, %gsr | |
325 | ldx [%g6 + TI_XFSR], %fsr | |
326 | rdpr %tstate, %g3 | |
327 | or %g3, %g4, %g3 ! anal... | |
328 | wrpr %g3, %tstate | |
329 | wr %g0, FPRS_FEF, %fprs ! clean DU/DL bits | |
330 | retry | |
331 | ||
332 | .align 32 | |
333 | fp_other_bounce: | |
334 | call do_fpother | |
335 | add %sp, PTREGS_OFF, %o0 | |
336 | ba,pt %xcc, rtrap | |
337 | clr %l6 | |
338 | ||
339 | .globl do_fpother_check_fitos | |
340 | .align 32 | |
341 | do_fpother_check_fitos: | |
342 | sethi %hi(fp_other_bounce - 4), %g7 | |
343 | or %g7, %lo(fp_other_bounce - 4), %g7 | |
344 | ||
345 | /* NOTE: Need to preserve %g7 until we fully commit | |
346 | * to the fitos fixup. | |
347 | */ | |
348 | stx %fsr, [%g6 + TI_XFSR] | |
349 | rdpr %tstate, %g3 | |
350 | andcc %g3, TSTATE_PRIV, %g0 | |
351 | bne,pn %xcc, do_fptrap_after_fsr | |
352 | nop | |
353 | ldx [%g6 + TI_XFSR], %g3 | |
354 | srlx %g3, 14, %g1 | |
355 | and %g1, 7, %g1 | |
356 | cmp %g1, 2 ! Unfinished FP-OP | |
357 | bne,pn %xcc, do_fptrap_after_fsr | |
358 | sethi %hi(1 << 23), %g1 ! Inexact | |
359 | andcc %g3, %g1, %g0 | |
360 | bne,pn %xcc, do_fptrap_after_fsr | |
361 | rdpr %tpc, %g1 | |
362 | lduwa [%g1] ASI_AIUP, %g3 ! This cannot ever fail | |
363 | #define FITOS_MASK 0xc1f83fe0 | |
364 | #define FITOS_COMPARE 0x81a01880 | |
365 | sethi %hi(FITOS_MASK), %g1 | |
366 | or %g1, %lo(FITOS_MASK), %g1 | |
367 | and %g3, %g1, %g1 | |
368 | sethi %hi(FITOS_COMPARE), %g2 | |
369 | or %g2, %lo(FITOS_COMPARE), %g2 | |
370 | cmp %g1, %g2 | |
371 | bne,pn %xcc, do_fptrap_after_fsr | |
372 | nop | |
373 | std %f62, [%g6 + TI_FPREGS + (62 * 4)] | |
374 | sethi %hi(fitos_table_1), %g1 | |
375 | and %g3, 0x1f, %g2 | |
376 | or %g1, %lo(fitos_table_1), %g1 | |
377 | sllx %g2, 2, %g2 | |
378 | jmpl %g1 + %g2, %g0 | |
379 | ba,pt %xcc, fitos_emul_continue | |
380 | ||
381 | fitos_table_1: | |
382 | fitod %f0, %f62 | |
383 | fitod %f1, %f62 | |
384 | fitod %f2, %f62 | |
385 | fitod %f3, %f62 | |
386 | fitod %f4, %f62 | |
387 | fitod %f5, %f62 | |
388 | fitod %f6, %f62 | |
389 | fitod %f7, %f62 | |
390 | fitod %f8, %f62 | |
391 | fitod %f9, %f62 | |
392 | fitod %f10, %f62 | |
393 | fitod %f11, %f62 | |
394 | fitod %f12, %f62 | |
395 | fitod %f13, %f62 | |
396 | fitod %f14, %f62 | |
397 | fitod %f15, %f62 | |
398 | fitod %f16, %f62 | |
399 | fitod %f17, %f62 | |
400 | fitod %f18, %f62 | |
401 | fitod %f19, %f62 | |
402 | fitod %f20, %f62 | |
403 | fitod %f21, %f62 | |
404 | fitod %f22, %f62 | |
405 | fitod %f23, %f62 | |
406 | fitod %f24, %f62 | |
407 | fitod %f25, %f62 | |
408 | fitod %f26, %f62 | |
409 | fitod %f27, %f62 | |
410 | fitod %f28, %f62 | |
411 | fitod %f29, %f62 | |
412 | fitod %f30, %f62 | |
413 | fitod %f31, %f62 | |
414 | ||
415 | fitos_emul_continue: | |
416 | sethi %hi(fitos_table_2), %g1 | |
417 | srl %g3, 25, %g2 | |
418 | or %g1, %lo(fitos_table_2), %g1 | |
419 | and %g2, 0x1f, %g2 | |
420 | sllx %g2, 2, %g2 | |
421 | jmpl %g1 + %g2, %g0 | |
422 | ba,pt %xcc, fitos_emul_fini | |
423 | ||
424 | fitos_table_2: | |
425 | fdtos %f62, %f0 | |
426 | fdtos %f62, %f1 | |
427 | fdtos %f62, %f2 | |
428 | fdtos %f62, %f3 | |
429 | fdtos %f62, %f4 | |
430 | fdtos %f62, %f5 | |
431 | fdtos %f62, %f6 | |
432 | fdtos %f62, %f7 | |
433 | fdtos %f62, %f8 | |
434 | fdtos %f62, %f9 | |
435 | fdtos %f62, %f10 | |
436 | fdtos %f62, %f11 | |
437 | fdtos %f62, %f12 | |
438 | fdtos %f62, %f13 | |
439 | fdtos %f62, %f14 | |
440 | fdtos %f62, %f15 | |
441 | fdtos %f62, %f16 | |
442 | fdtos %f62, %f17 | |
443 | fdtos %f62, %f18 | |
444 | fdtos %f62, %f19 | |
445 | fdtos %f62, %f20 | |
446 | fdtos %f62, %f21 | |
447 | fdtos %f62, %f22 | |
448 | fdtos %f62, %f23 | |
449 | fdtos %f62, %f24 | |
450 | fdtos %f62, %f25 | |
451 | fdtos %f62, %f26 | |
452 | fdtos %f62, %f27 | |
453 | fdtos %f62, %f28 | |
454 | fdtos %f62, %f29 | |
455 | fdtos %f62, %f30 | |
456 | fdtos %f62, %f31 | |
457 | ||
458 | fitos_emul_fini: | |
459 | ldd [%g6 + TI_FPREGS + (62 * 4)], %f62 | |
460 | done | |
461 | ||
462 | .globl do_fptrap | |
463 | .align 32 | |
464 | do_fptrap: | |
465 | stx %fsr, [%g6 + TI_XFSR] | |
466 | do_fptrap_after_fsr: | |
467 | ldub [%g6 + TI_FPSAVED], %g3 | |
468 | rd %fprs, %g1 | |
469 | or %g3, %g1, %g3 | |
470 | stb %g3, [%g6 + TI_FPSAVED] | |
471 | rd %gsr, %g3 | |
472 | stx %g3, [%g6 + TI_GSR] | |
473 | mov SECONDARY_CONTEXT, %g3 | |
474 | ldxa [%g3] ASI_DMMU, %g5 | |
475 | cplus_fptrap_insn_4: | |
476 | sethi %hi(0), %g2 | |
477 | stxa %g2, [%g3] ASI_DMMU | |
478 | membar #Sync | |
479 | add %g6, TI_FPREGS, %g2 | |
480 | andcc %g1, FPRS_DL, %g0 | |
481 | be,pn %icc, 4f | |
482 | mov 0x40, %g3 | |
483 | stda %f0, [%g2] ASI_BLK_S | |
484 | stda %f16, [%g2 + %g3] ASI_BLK_S | |
485 | andcc %g1, FPRS_DU, %g0 | |
486 | be,pn %icc, 5f | |
487 | 4: add %g2, 128, %g2 | |
488 | stda %f32, [%g2] ASI_BLK_S | |
489 | stda %f48, [%g2 + %g3] ASI_BLK_S | |
490 | 5: mov SECONDARY_CONTEXT, %g1 | |
491 | membar #Sync | |
492 | stxa %g5, [%g1] ASI_DMMU | |
493 | membar #Sync | |
494 | ba,pt %xcc, etrap | |
495 | wr %g0, 0, %fprs | |
496 | ||
497 | cplus_fptrap_1: | |
498 | sethi %hi(CTX_CHEETAH_PLUS_CTX0), %g2 | |
499 | ||
500 | .globl cheetah_plus_patch_fpdis | |
501 | cheetah_plus_patch_fpdis: | |
502 | /* We configure the dTLB512_0 for 4MB pages and the | |
503 | * dTLB512_1 for 8K pages when in context zero. | |
504 | */ | |
505 | sethi %hi(cplus_fptrap_1), %o0 | |
506 | lduw [%o0 + %lo(cplus_fptrap_1)], %o1 | |
507 | ||
508 | set cplus_fptrap_insn_1, %o2 | |
509 | stw %o1, [%o2] | |
510 | flush %o2 | |
511 | set cplus_fptrap_insn_2, %o2 | |
512 | stw %o1, [%o2] | |
513 | flush %o2 | |
514 | set cplus_fptrap_insn_3, %o2 | |
515 | stw %o1, [%o2] | |
516 | flush %o2 | |
517 | set cplus_fptrap_insn_4, %o2 | |
518 | stw %o1, [%o2] | |
519 | flush %o2 | |
520 | ||
521 | retl | |
522 | nop | |
523 | ||
524 | /* The registers for cross calls will be: | |
525 | * | |
526 | * DATA 0: [low 32-bits] Address of function to call, jmp to this | |
527 | * [high 32-bits] MMU Context Argument 0, place in %g5 | |
528 | * DATA 1: Address Argument 1, place in %g6 | |
529 | * DATA 2: Address Argument 2, place in %g7 | |
530 | * | |
531 | * With this method we can do most of the cross-call tlb/cache | |
532 | * flushing very quickly. | |
533 | * | |
534 | * Current CPU's IRQ worklist table is locked into %g1, | |
535 | * don't touch. | |
536 | */ | |
537 | .text | |
538 | .align 32 | |
539 | .globl do_ivec | |
540 | do_ivec: | |
541 | mov 0x40, %g3 | |
542 | ldxa [%g3 + %g0] ASI_INTR_R, %g3 | |
543 | sethi %hi(KERNBASE), %g4 | |
544 | cmp %g3, %g4 | |
545 | bgeu,pn %xcc, do_ivec_xcall | |
546 | srlx %g3, 32, %g5 | |
547 | stxa %g0, [%g0] ASI_INTR_RECEIVE | |
548 | membar #Sync | |
549 | ||
550 | sethi %hi(ivector_table), %g2 | |
551 | sllx %g3, 5, %g3 | |
552 | or %g2, %lo(ivector_table), %g2 | |
553 | add %g2, %g3, %g3 | |
554 | ldx [%g3 + 0x08], %g2 /* irq_info */ | |
555 | ldub [%g3 + 0x04], %g4 /* pil */ | |
556 | brz,pn %g2, do_ivec_spurious | |
557 | mov 1, %g2 | |
558 | ||
559 | sllx %g2, %g4, %g2 | |
560 | sllx %g4, 2, %g4 | |
561 | lduw [%g6 + %g4], %g5 /* g5 = irq_work(cpu, pil) */ | |
562 | stw %g5, [%g3 + 0x00] /* bucket->irq_chain = g5 */ | |
563 | stw %g3, [%g6 + %g4] /* irq_work(cpu, pil) = bucket */ | |
564 | wr %g2, 0x0, %set_softint | |
565 | retry | |
566 | do_ivec_xcall: | |
567 | mov 0x50, %g1 | |
568 | ||
569 | ldxa [%g1 + %g0] ASI_INTR_R, %g1 | |
570 | srl %g3, 0, %g3 | |
571 | mov 0x60, %g7 | |
572 | ldxa [%g7 + %g0] ASI_INTR_R, %g7 | |
573 | stxa %g0, [%g0] ASI_INTR_RECEIVE | |
574 | membar #Sync | |
575 | ba,pt %xcc, 1f | |
576 | nop | |
577 | ||
578 | .align 32 | |
579 | 1: jmpl %g3, %g0 | |
580 | nop | |
581 | ||
582 | do_ivec_spurious: | |
583 | stw %g3, [%g6 + 0x00] /* irq_work(cpu, 0) = bucket */ | |
584 | rdpr %pstate, %g5 | |
585 | ||
586 | wrpr %g5, PSTATE_IG | PSTATE_AG, %pstate | |
587 | sethi %hi(109f), %g7 | |
588 | ba,pt %xcc, etrap | |
589 | 109: or %g7, %lo(109b), %g7 | |
590 | call catch_disabled_ivec | |
591 | add %sp, PTREGS_OFF, %o0 | |
592 | ba,pt %xcc, rtrap | |
593 | clr %l6 | |
594 | ||
595 | .globl save_alternate_globals | |
596 | save_alternate_globals: /* %o0 = save_area */ | |
597 | rdpr %pstate, %o5 | |
598 | andn %o5, PSTATE_IE, %o1 | |
599 | wrpr %o1, PSTATE_AG, %pstate | |
600 | stx %g0, [%o0 + 0x00] | |
601 | stx %g1, [%o0 + 0x08] | |
602 | stx %g2, [%o0 + 0x10] | |
603 | stx %g3, [%o0 + 0x18] | |
604 | stx %g4, [%o0 + 0x20] | |
605 | stx %g5, [%o0 + 0x28] | |
606 | stx %g6, [%o0 + 0x30] | |
607 | stx %g7, [%o0 + 0x38] | |
608 | wrpr %o1, PSTATE_IG, %pstate | |
609 | stx %g0, [%o0 + 0x40] | |
610 | stx %g1, [%o0 + 0x48] | |
611 | stx %g2, [%o0 + 0x50] | |
612 | stx %g3, [%o0 + 0x58] | |
613 | stx %g4, [%o0 + 0x60] | |
614 | stx %g5, [%o0 + 0x68] | |
615 | stx %g6, [%o0 + 0x70] | |
616 | stx %g7, [%o0 + 0x78] | |
617 | wrpr %o1, PSTATE_MG, %pstate | |
618 | stx %g0, [%o0 + 0x80] | |
619 | stx %g1, [%o0 + 0x88] | |
620 | stx %g2, [%o0 + 0x90] | |
621 | stx %g3, [%o0 + 0x98] | |
622 | stx %g4, [%o0 + 0xa0] | |
623 | stx %g5, [%o0 + 0xa8] | |
624 | stx %g6, [%o0 + 0xb0] | |
625 | stx %g7, [%o0 + 0xb8] | |
626 | wrpr %o5, 0x0, %pstate | |
627 | retl | |
628 | nop | |
629 | ||
630 | .globl restore_alternate_globals | |
631 | restore_alternate_globals: /* %o0 = save_area */ | |
632 | rdpr %pstate, %o5 | |
633 | andn %o5, PSTATE_IE, %o1 | |
634 | wrpr %o1, PSTATE_AG, %pstate | |
635 | ldx [%o0 + 0x00], %g0 | |
636 | ldx [%o0 + 0x08], %g1 | |
637 | ldx [%o0 + 0x10], %g2 | |
638 | ldx [%o0 + 0x18], %g3 | |
639 | ldx [%o0 + 0x20], %g4 | |
640 | ldx [%o0 + 0x28], %g5 | |
641 | ldx [%o0 + 0x30], %g6 | |
642 | ldx [%o0 + 0x38], %g7 | |
643 | wrpr %o1, PSTATE_IG, %pstate | |
644 | ldx [%o0 + 0x40], %g0 | |
645 | ldx [%o0 + 0x48], %g1 | |
646 | ldx [%o0 + 0x50], %g2 | |
647 | ldx [%o0 + 0x58], %g3 | |
648 | ldx [%o0 + 0x60], %g4 | |
649 | ldx [%o0 + 0x68], %g5 | |
650 | ldx [%o0 + 0x70], %g6 | |
651 | ldx [%o0 + 0x78], %g7 | |
652 | wrpr %o1, PSTATE_MG, %pstate | |
653 | ldx [%o0 + 0x80], %g0 | |
654 | ldx [%o0 + 0x88], %g1 | |
655 | ldx [%o0 + 0x90], %g2 | |
656 | ldx [%o0 + 0x98], %g3 | |
657 | ldx [%o0 + 0xa0], %g4 | |
658 | ldx [%o0 + 0xa8], %g5 | |
659 | ldx [%o0 + 0xb0], %g6 | |
660 | ldx [%o0 + 0xb8], %g7 | |
661 | wrpr %o5, 0x0, %pstate | |
662 | retl | |
663 | nop | |
664 | ||
665 | .globl getcc, setcc | |
666 | getcc: | |
667 | ldx [%o0 + PT_V9_TSTATE], %o1 | |
668 | srlx %o1, 32, %o1 | |
669 | and %o1, 0xf, %o1 | |
670 | retl | |
671 | stx %o1, [%o0 + PT_V9_G1] | |
672 | setcc: | |
673 | ldx [%o0 + PT_V9_TSTATE], %o1 | |
674 | ldx [%o0 + PT_V9_G1], %o2 | |
675 | or %g0, %ulo(TSTATE_ICC), %o3 | |
676 | sllx %o3, 32, %o3 | |
677 | andn %o1, %o3, %o1 | |
678 | sllx %o2, 32, %o2 | |
679 | and %o2, %o3, %o2 | |
680 | or %o1, %o2, %o1 | |
681 | retl | |
682 | stx %o1, [%o0 + PT_V9_TSTATE] | |
683 | ||
684 | .globl utrap, utrap_ill | |
685 | utrap: brz,pn %g1, etrap | |
686 | nop | |
687 | save %sp, -128, %sp | |
688 | rdpr %tstate, %l6 | |
689 | rdpr %cwp, %l7 | |
690 | andn %l6, TSTATE_CWP, %l6 | |
691 | wrpr %l6, %l7, %tstate | |
692 | rdpr %tpc, %l6 | |
693 | rdpr %tnpc, %l7 | |
694 | wrpr %g1, 0, %tnpc | |
695 | done | |
696 | utrap_ill: | |
697 | call bad_trap | |
698 | add %sp, PTREGS_OFF, %o0 | |
699 | ba,pt %xcc, rtrap | |
700 | clr %l6 | |
701 | ||
702 | #ifdef CONFIG_BLK_DEV_FD | |
703 | .globl floppy_hardint | |
704 | floppy_hardint: | |
705 | wr %g0, (1 << 11), %clear_softint | |
706 | sethi %hi(doing_pdma), %g1 | |
707 | ld [%g1 + %lo(doing_pdma)], %g2 | |
708 | brz,pn %g2, floppy_dosoftint | |
709 | sethi %hi(fdc_status), %g3 | |
710 | ldx [%g3 + %lo(fdc_status)], %g3 | |
711 | sethi %hi(pdma_vaddr), %g5 | |
712 | ldx [%g5 + %lo(pdma_vaddr)], %g4 | |
713 | sethi %hi(pdma_size), %g5 | |
714 | ldx [%g5 + %lo(pdma_size)], %g5 | |
715 | ||
716 | next_byte: | |
717 | lduba [%g3] ASI_PHYS_BYPASS_EC_E, %g7 | |
718 | andcc %g7, 0x80, %g0 | |
719 | be,pn %icc, floppy_fifo_emptied | |
720 | andcc %g7, 0x20, %g0 | |
721 | be,pn %icc, floppy_overrun | |
722 | andcc %g7, 0x40, %g0 | |
723 | be,pn %icc, floppy_write | |
724 | sub %g5, 1, %g5 | |
725 | ||
726 | inc %g3 | |
727 | lduba [%g3] ASI_PHYS_BYPASS_EC_E, %g7 | |
728 | dec %g3 | |
729 | orcc %g0, %g5, %g0 | |
730 | stb %g7, [%g4] | |
731 | bne,pn %xcc, next_byte | |
732 | add %g4, 1, %g4 | |
733 | ||
734 | b,pt %xcc, floppy_tdone | |
735 | nop | |
736 | ||
737 | floppy_write: | |
738 | ldub [%g4], %g7 | |
739 | orcc %g0, %g5, %g0 | |
740 | inc %g3 | |
741 | stba %g7, [%g3] ASI_PHYS_BYPASS_EC_E | |
742 | dec %g3 | |
743 | bne,pn %xcc, next_byte | |
744 | add %g4, 1, %g4 | |
745 | ||
746 | floppy_tdone: | |
747 | sethi %hi(pdma_vaddr), %g1 | |
748 | stx %g4, [%g1 + %lo(pdma_vaddr)] | |
749 | sethi %hi(pdma_size), %g1 | |
750 | stx %g5, [%g1 + %lo(pdma_size)] | |
751 | sethi %hi(auxio_register), %g1 | |
752 | ldx [%g1 + %lo(auxio_register)], %g7 | |
753 | lduba [%g7] ASI_PHYS_BYPASS_EC_E, %g5 | |
754 | or %g5, AUXIO_AUX1_FTCNT, %g5 | |
755 | /* andn %g5, AUXIO_AUX1_MASK, %g5 */ | |
756 | stba %g5, [%g7] ASI_PHYS_BYPASS_EC_E | |
757 | andn %g5, AUXIO_AUX1_FTCNT, %g5 | |
758 | /* andn %g5, AUXIO_AUX1_MASK, %g5 */ | |
759 | ||
760 | nop; nop; nop; nop; nop; nop; | |
761 | nop; nop; nop; nop; nop; nop; | |
762 | ||
763 | stba %g5, [%g7] ASI_PHYS_BYPASS_EC_E | |
764 | sethi %hi(doing_pdma), %g1 | |
765 | b,pt %xcc, floppy_dosoftint | |
766 | st %g0, [%g1 + %lo(doing_pdma)] | |
767 | ||
768 | floppy_fifo_emptied: | |
769 | sethi %hi(pdma_vaddr), %g1 | |
770 | stx %g4, [%g1 + %lo(pdma_vaddr)] | |
771 | sethi %hi(pdma_size), %g1 | |
772 | stx %g5, [%g1 + %lo(pdma_size)] | |
773 | sethi %hi(irq_action), %g1 | |
774 | or %g1, %lo(irq_action), %g1 | |
775 | ldx [%g1 + (11 << 3)], %g3 ! irqaction[floppy_irq] | |
776 | ldx [%g3 + 0x08], %g4 ! action->flags>>48==ino | |
777 | sethi %hi(ivector_table), %g3 | |
778 | srlx %g4, 48, %g4 | |
779 | or %g3, %lo(ivector_table), %g3 | |
780 | sllx %g4, 5, %g4 | |
781 | ldx [%g3 + %g4], %g4 ! &ivector_table[ino] | |
782 | ldx [%g4 + 0x10], %g4 ! bucket->iclr | |
783 | stwa %g0, [%g4] ASI_PHYS_BYPASS_EC_E ! ICLR_IDLE | |
784 | membar #Sync ! probably not needed... | |
785 | retry | |
786 | ||
787 | floppy_overrun: | |
788 | sethi %hi(pdma_vaddr), %g1 | |
789 | stx %g4, [%g1 + %lo(pdma_vaddr)] | |
790 | sethi %hi(pdma_size), %g1 | |
791 | stx %g5, [%g1 + %lo(pdma_size)] | |
792 | sethi %hi(doing_pdma), %g1 | |
793 | st %g0, [%g1 + %lo(doing_pdma)] | |
794 | ||
795 | floppy_dosoftint: | |
796 | rdpr %pil, %g2 | |
797 | wrpr %g0, 15, %pil | |
798 | sethi %hi(109f), %g7 | |
799 | b,pt %xcc, etrap_irq | |
800 | 109: or %g7, %lo(109b), %g7 | |
801 | ||
802 | mov 11, %o0 | |
803 | mov 0, %o1 | |
804 | call sparc_floppy_irq | |
805 | add %sp, PTREGS_OFF, %o2 | |
806 | ||
807 | b,pt %xcc, rtrap_irq | |
808 | nop | |
809 | ||
810 | #endif /* CONFIG_BLK_DEV_FD */ | |
811 | ||
812 | /* XXX Here is stuff we still need to write... -DaveM XXX */ | |
813 | .globl netbsd_syscall | |
814 | netbsd_syscall: | |
815 | retl | |
816 | nop | |
817 | ||
818 | /* These next few routines must be sure to clear the | |
819 | * SFSR FaultValid bit so that the fast tlb data protection | |
820 | * handler does not flush the wrong context and lock up the | |
821 | * box. | |
822 | */ | |
823 | .globl __do_data_access_exception | |
824 | .globl __do_data_access_exception_tl1 | |
825 | __do_data_access_exception_tl1: | |
826 | rdpr %pstate, %g4 | |
827 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
828 | mov TLB_SFSR, %g3 | |
829 | mov DMMU_SFAR, %g5 | |
830 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
831 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
832 | stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit | |
833 | membar #Sync | |
834 | ba,pt %xcc, winfix_dax | |
835 | rdpr %tpc, %g3 | |
836 | __do_data_access_exception: | |
837 | rdpr %pstate, %g4 | |
838 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
839 | mov TLB_SFSR, %g3 | |
840 | mov DMMU_SFAR, %g5 | |
841 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
842 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
843 | stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit | |
844 | membar #Sync | |
845 | sethi %hi(109f), %g7 | |
846 | ba,pt %xcc, etrap | |
847 | 109: or %g7, %lo(109b), %g7 | |
848 | mov %l4, %o1 | |
849 | mov %l5, %o2 | |
850 | call data_access_exception | |
851 | add %sp, PTREGS_OFF, %o0 | |
852 | ba,pt %xcc, rtrap | |
853 | clr %l6 | |
854 | ||
855 | .globl __do_instruction_access_exception | |
856 | .globl __do_instruction_access_exception_tl1 | |
857 | __do_instruction_access_exception_tl1: | |
858 | rdpr %pstate, %g4 | |
859 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
860 | mov TLB_SFSR, %g3 | |
861 | mov DMMU_SFAR, %g5 | |
862 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
863 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
864 | stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit | |
865 | membar #Sync | |
866 | sethi %hi(109f), %g7 | |
867 | ba,pt %xcc, etraptl1 | |
868 | 109: or %g7, %lo(109b), %g7 | |
869 | mov %l4, %o1 | |
870 | mov %l5, %o2 | |
871 | call instruction_access_exception_tl1 | |
872 | add %sp, PTREGS_OFF, %o0 | |
873 | ba,pt %xcc, rtrap | |
874 | clr %l6 | |
875 | ||
876 | __do_instruction_access_exception: | |
877 | rdpr %pstate, %g4 | |
878 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
879 | mov TLB_SFSR, %g3 | |
880 | mov DMMU_SFAR, %g5 | |
881 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
882 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
883 | stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit | |
884 | membar #Sync | |
885 | sethi %hi(109f), %g7 | |
886 | ba,pt %xcc, etrap | |
887 | 109: or %g7, %lo(109b), %g7 | |
888 | mov %l4, %o1 | |
889 | mov %l5, %o2 | |
890 | call instruction_access_exception | |
891 | add %sp, PTREGS_OFF, %o0 | |
892 | ba,pt %xcc, rtrap | |
893 | clr %l6 | |
894 | ||
895 | /* This is the trap handler entry point for ECC correctable | |
896 | * errors. They are corrected, but we listen for the trap | |
897 | * so that the event can be logged. | |
898 | * | |
899 | * Disrupting errors are either: | |
900 | * 1) single-bit ECC errors during UDB reads to system | |
901 | * memory | |
902 | * 2) data parity errors during write-back events | |
903 | * | |
904 | * As far as I can make out from the manual, the CEE trap | |
905 | * is only for correctable errors during memory read | |
906 | * accesses by the front-end of the processor. | |
907 | * | |
908 | * The code below is only for trap level 1 CEE events, | |
909 | * as it is the only situation where we can safely record | |
910 | * and log. For trap level >1 we just clear the CE bit | |
911 | * in the AFSR and return. | |
912 | */ | |
913 | ||
914 | /* Our trap handling infrastructure allows us to preserve | |
915 | * two 64-bit values during etrap for arguments to | |
916 | * subsequent C code. Therefore we encode the information | |
917 | * as follows: | |
918 | * | |
919 | * value 1) Full 64-bits of AFAR | |
920 | * value 2) Low 33-bits of AFSR, then bits 33-->42 | |
921 | * are UDBL error status and bits 43-->52 | |
922 | * are UDBH error status | |
923 | */ | |
924 | .align 64 | |
925 | .globl cee_trap | |
926 | cee_trap: | |
927 | ldxa [%g0] ASI_AFSR, %g1 ! Read AFSR | |
928 | ldxa [%g0] ASI_AFAR, %g2 ! Read AFAR | |
929 | sllx %g1, 31, %g1 ! Clear reserved bits | |
930 | srlx %g1, 31, %g1 ! in AFSR | |
931 | ||
932 | /* NOTE: UltraSparc-I/II have high and low UDB error | |
933 | * registers, corresponding to the two UDB units | |
934 | * present on those chips. UltraSparc-IIi only | |
935 | * has a single UDB, called "SDB" in the manual. | |
936 | * For IIi the upper UDB register always reads | |
937 | * as zero so for our purposes things will just | |
938 | * work with the checks below. | |
939 | */ | |
940 | ldxa [%g0] ASI_UDBL_ERROR_R, %g3 ! Read UDB-Low error status | |
941 | andcc %g3, (1 << 8), %g4 ! Check CE bit | |
942 | sllx %g3, (64 - 10), %g3 ! Clear reserved bits | |
943 | srlx %g3, (64 - 10), %g3 ! in UDB-Low error status | |
944 | ||
945 | sllx %g3, (33 + 0), %g3 ! Shift up to encoding area | |
946 | or %g1, %g3, %g1 ! Or it in | |
947 | be,pn %xcc, 1f ! Branch if CE bit was clear | |
948 | nop | |
949 | stxa %g4, [%g0] ASI_UDB_ERROR_W ! Clear CE sticky bit in UDBL | |
950 | membar #Sync ! Synchronize ASI stores | |
951 | 1: mov 0x18, %g5 ! Addr of UDB-High error status | |
952 | ldxa [%g5] ASI_UDBH_ERROR_R, %g3 ! Read it | |
953 | ||
954 | andcc %g3, (1 << 8), %g4 ! Check CE bit | |
955 | sllx %g3, (64 - 10), %g3 ! Clear reserved bits | |
956 | srlx %g3, (64 - 10), %g3 ! in UDB-High error status | |
957 | sllx %g3, (33 + 10), %g3 ! Shift up to encoding area | |
958 | or %g1, %g3, %g1 ! Or it in | |
959 | be,pn %xcc, 1f ! Branch if CE bit was clear | |
960 | nop | |
961 | nop | |
962 | ||
963 | stxa %g4, [%g5] ASI_UDB_ERROR_W ! Clear CE sticky bit in UDBH | |
964 | membar #Sync ! Synchronize ASI stores | |
965 | 1: mov 1, %g5 ! AFSR CE bit is | |
966 | sllx %g5, 20, %g5 ! bit 20 | |
967 | stxa %g5, [%g0] ASI_AFSR ! Clear CE sticky bit in AFSR | |
968 | membar #Sync ! Synchronize ASI stores | |
969 | sllx %g2, (64 - 41), %g2 ! Clear reserved bits | |
970 | srlx %g2, (64 - 41), %g2 ! in latched AFAR | |
971 | ||
972 | andn %g2, 0x0f, %g2 ! Finish resv bit clearing | |
973 | mov %g1, %g4 ! Move AFSR+UDB* into save reg | |
974 | mov %g2, %g5 ! Move AFAR into save reg | |
975 | rdpr %pil, %g2 | |
976 | wrpr %g0, 15, %pil | |
977 | ba,pt %xcc, etrap_irq | |
978 | rd %pc, %g7 | |
979 | mov %l4, %o0 | |
980 | ||
981 | mov %l5, %o1 | |
982 | call cee_log | |
983 | add %sp, PTREGS_OFF, %o2 | |
984 | ba,a,pt %xcc, rtrap_irq | |
985 | ||
986 | /* Capture I/D/E-cache state into per-cpu error scoreboard. | |
987 | * | |
988 | * %g1: (TL>=0) ? 1 : 0 | |
989 | * %g2: scratch | |
990 | * %g3: scratch | |
991 | * %g4: AFSR | |
992 | * %g5: AFAR | |
993 | * %g6: current thread ptr | |
994 | * %g7: scratch | |
995 | */ | |
996 | #define CHEETAH_LOG_ERROR \ | |
997 | /* Put "TL1" software bit into AFSR. */ \ | |
998 | and %g1, 0x1, %g1; \ | |
999 | sllx %g1, 63, %g2; \ | |
1000 | or %g4, %g2, %g4; \ | |
1001 | /* Get log entry pointer for this cpu at this trap level. */ \ | |
1002 | BRANCH_IF_JALAPENO(g2,g3,50f) \ | |
1003 | ldxa [%g0] ASI_SAFARI_CONFIG, %g2; \ | |
1004 | srlx %g2, 17, %g2; \ | |
1005 | ba,pt %xcc, 60f; \ | |
1006 | and %g2, 0x3ff, %g2; \ | |
1007 | 50: ldxa [%g0] ASI_JBUS_CONFIG, %g2; \ | |
1008 | srlx %g2, 17, %g2; \ | |
1009 | and %g2, 0x1f, %g2; \ | |
1010 | 60: sllx %g2, 9, %g2; \ | |
1011 | sethi %hi(cheetah_error_log), %g3; \ | |
1012 | ldx [%g3 + %lo(cheetah_error_log)], %g3; \ | |
1013 | brz,pn %g3, 80f; \ | |
1014 | nop; \ | |
1015 | add %g3, %g2, %g3; \ | |
1016 | sllx %g1, 8, %g1; \ | |
1017 | add %g3, %g1, %g1; \ | |
1018 | /* %g1 holds pointer to the top of the logging scoreboard */ \ | |
1019 | ldx [%g1 + 0x0], %g7; \ | |
1020 | cmp %g7, -1; \ | |
1021 | bne,pn %xcc, 80f; \ | |
1022 | nop; \ | |
1023 | stx %g4, [%g1 + 0x0]; \ | |
1024 | stx %g5, [%g1 + 0x8]; \ | |
1025 | add %g1, 0x10, %g1; \ | |
1026 | /* %g1 now points to D-cache logging area */ \ | |
1027 | set 0x3ff8, %g2; /* DC_addr mask */ \ | |
1028 | and %g5, %g2, %g2; /* DC_addr bits of AFAR */ \ | |
1029 | srlx %g5, 12, %g3; \ | |
1030 | or %g3, 1, %g3; /* PHYS tag + valid */ \ | |
1031 | 10: ldxa [%g2] ASI_DCACHE_TAG, %g7; \ | |
1032 | cmp %g3, %g7; /* TAG match? */ \ | |
1033 | bne,pt %xcc, 13f; \ | |
1034 | nop; \ | |
1035 | /* Yep, what we want, capture state. */ \ | |
1036 | stx %g2, [%g1 + 0x20]; \ | |
1037 | stx %g7, [%g1 + 0x28]; \ | |
1038 | /* A membar Sync is required before and after utag access. */ \ | |
1039 | membar #Sync; \ | |
1040 | ldxa [%g2] ASI_DCACHE_UTAG, %g7; \ | |
1041 | membar #Sync; \ | |
1042 | stx %g7, [%g1 + 0x30]; \ | |
1043 | ldxa [%g2] ASI_DCACHE_SNOOP_TAG, %g7; \ | |
1044 | stx %g7, [%g1 + 0x38]; \ | |
1045 | clr %g3; \ | |
1046 | 12: ldxa [%g2 + %g3] ASI_DCACHE_DATA, %g7; \ | |
1047 | stx %g7, [%g1]; \ | |
1048 | add %g3, (1 << 5), %g3; \ | |
1049 | cmp %g3, (4 << 5); \ | |
1050 | bl,pt %xcc, 12b; \ | |
1051 | add %g1, 0x8, %g1; \ | |
1052 | ba,pt %xcc, 20f; \ | |
1053 | add %g1, 0x20, %g1; \ | |
1054 | 13: sethi %hi(1 << 14), %g7; \ | |
1055 | add %g2, %g7, %g2; \ | |
1056 | srlx %g2, 14, %g7; \ | |
1057 | cmp %g7, 4; \ | |
1058 | bl,pt %xcc, 10b; \ | |
1059 | nop; \ | |
1060 | add %g1, 0x40, %g1; \ | |
1061 | 20: /* %g1 now points to I-cache logging area */ \ | |
1062 | set 0x1fe0, %g2; /* IC_addr mask */ \ | |
1063 | and %g5, %g2, %g2; /* IC_addr bits of AFAR */ \ | |
1064 | sllx %g2, 1, %g2; /* IC_addr[13:6]==VA[12:5] */ \ | |
1065 | srlx %g5, (13 - 8), %g3; /* Make PTAG */ \ | |
1066 | andn %g3, 0xff, %g3; /* Mask off undefined bits */ \ | |
1067 | 21: ldxa [%g2] ASI_IC_TAG, %g7; \ | |
1068 | andn %g7, 0xff, %g7; \ | |
1069 | cmp %g3, %g7; \ | |
1070 | bne,pt %xcc, 23f; \ | |
1071 | nop; \ | |
1072 | /* Yep, what we want, capture state. */ \ | |
1073 | stx %g2, [%g1 + 0x40]; \ | |
1074 | stx %g7, [%g1 + 0x48]; \ | |
1075 | add %g2, (1 << 3), %g2; \ | |
1076 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
1077 | add %g2, (1 << 3), %g2; \ | |
1078 | stx %g7, [%g1 + 0x50]; \ | |
1079 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
1080 | add %g2, (1 << 3), %g2; \ | |
1081 | stx %g7, [%g1 + 0x60]; \ | |
1082 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
1083 | stx %g7, [%g1 + 0x68]; \ | |
1084 | sub %g2, (3 << 3), %g2; \ | |
1085 | ldxa [%g2] ASI_IC_STAG, %g7; \ | |
1086 | stx %g7, [%g1 + 0x58]; \ | |
1087 | clr %g3; \ | |
1088 | srlx %g2, 2, %g2; \ | |
1089 | 22: ldxa [%g2 + %g3] ASI_IC_INSTR, %g7; \ | |
1090 | stx %g7, [%g1]; \ | |
1091 | add %g3, (1 << 3), %g3; \ | |
1092 | cmp %g3, (8 << 3); \ | |
1093 | bl,pt %xcc, 22b; \ | |
1094 | add %g1, 0x8, %g1; \ | |
1095 | ba,pt %xcc, 30f; \ | |
1096 | add %g1, 0x30, %g1; \ | |
1097 | 23: sethi %hi(1 << 14), %g7; \ | |
1098 | add %g2, %g7, %g2; \ | |
1099 | srlx %g2, 14, %g7; \ | |
1100 | cmp %g7, 4; \ | |
1101 | bl,pt %xcc, 21b; \ | |
1102 | nop; \ | |
1103 | add %g1, 0x70, %g1; \ | |
1104 | 30: /* %g1 now points to E-cache logging area */ \ | |
1105 | andn %g5, (32 - 1), %g2; /* E-cache subblock */ \ | |
1106 | stx %g2, [%g1 + 0x20]; \ | |
1107 | ldxa [%g2] ASI_EC_TAG_DATA, %g7; \ | |
1108 | stx %g7, [%g1 + 0x28]; \ | |
1109 | ldxa [%g2] ASI_EC_R, %g0; \ | |
1110 | clr %g3; \ | |
1111 | 31: ldxa [%g3] ASI_EC_DATA, %g7; \ | |
1112 | stx %g7, [%g1 + %g3]; \ | |
1113 | add %g3, 0x8, %g3; \ | |
1114 | cmp %g3, 0x20; \ | |
1115 | bl,pt %xcc, 31b; \ | |
1116 | nop; \ | |
1117 | 80: /* DONE */ | |
1118 | ||
1119 | /* These get patched into the trap table at boot time | |
1120 | * once we know we have a cheetah processor. | |
1121 | */ | |
1122 | .globl cheetah_fecc_trap_vector, cheetah_fecc_trap_vector_tl1 | |
1123 | cheetah_fecc_trap_vector: | |
1124 | membar #Sync | |
1125 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1126 | andn %g1, DCU_DC | DCU_IC, %g1 | |
1127 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1128 | membar #Sync | |
1129 | sethi %hi(cheetah_fast_ecc), %g2 | |
1130 | jmpl %g2 + %lo(cheetah_fast_ecc), %g0 | |
1131 | mov 0, %g1 | |
1132 | cheetah_fecc_trap_vector_tl1: | |
1133 | membar #Sync | |
1134 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1135 | andn %g1, DCU_DC | DCU_IC, %g1 | |
1136 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1137 | membar #Sync | |
1138 | sethi %hi(cheetah_fast_ecc), %g2 | |
1139 | jmpl %g2 + %lo(cheetah_fast_ecc), %g0 | |
1140 | mov 1, %g1 | |
1141 | .globl cheetah_cee_trap_vector, cheetah_cee_trap_vector_tl1 | |
1142 | cheetah_cee_trap_vector: | |
1143 | membar #Sync | |
1144 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1145 | andn %g1, DCU_IC, %g1 | |
1146 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1147 | membar #Sync | |
1148 | sethi %hi(cheetah_cee), %g2 | |
1149 | jmpl %g2 + %lo(cheetah_cee), %g0 | |
1150 | mov 0, %g1 | |
1151 | cheetah_cee_trap_vector_tl1: | |
1152 | membar #Sync | |
1153 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1154 | andn %g1, DCU_IC, %g1 | |
1155 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1156 | membar #Sync | |
1157 | sethi %hi(cheetah_cee), %g2 | |
1158 | jmpl %g2 + %lo(cheetah_cee), %g0 | |
1159 | mov 1, %g1 | |
1160 | .globl cheetah_deferred_trap_vector, cheetah_deferred_trap_vector_tl1 | |
1161 | cheetah_deferred_trap_vector: | |
1162 | membar #Sync | |
1163 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1; | |
1164 | andn %g1, DCU_DC | DCU_IC, %g1; | |
1165 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG; | |
1166 | membar #Sync; | |
1167 | sethi %hi(cheetah_deferred_trap), %g2 | |
1168 | jmpl %g2 + %lo(cheetah_deferred_trap), %g0 | |
1169 | mov 0, %g1 | |
1170 | cheetah_deferred_trap_vector_tl1: | |
1171 | membar #Sync; | |
1172 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1; | |
1173 | andn %g1, DCU_DC | DCU_IC, %g1; | |
1174 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG; | |
1175 | membar #Sync; | |
1176 | sethi %hi(cheetah_deferred_trap), %g2 | |
1177 | jmpl %g2 + %lo(cheetah_deferred_trap), %g0 | |
1178 | mov 1, %g1 | |
1179 | ||
1180 | /* Cheetah+ specific traps. These are for the new I/D cache parity | |
1181 | * error traps. The first argument to cheetah_plus_parity_handler | |
1182 | * is encoded as follows: | |
1183 | * | |
1184 | * Bit0: 0=dcache,1=icache | |
1185 | * Bit1: 0=recoverable,1=unrecoverable | |
1186 | */ | |
1187 | .globl cheetah_plus_dcpe_trap_vector, cheetah_plus_dcpe_trap_vector_tl1 | |
1188 | cheetah_plus_dcpe_trap_vector: | |
1189 | membar #Sync | |
1190 | sethi %hi(do_cheetah_plus_data_parity), %g7 | |
1191 | jmpl %g7 + %lo(do_cheetah_plus_data_parity), %g0 | |
1192 | nop | |
1193 | nop | |
1194 | nop | |
1195 | nop | |
1196 | nop | |
1197 | ||
1198 | do_cheetah_plus_data_parity: | |
1199 | ba,pt %xcc, etrap | |
1200 | rd %pc, %g7 | |
1201 | mov 0x0, %o0 | |
1202 | call cheetah_plus_parity_error | |
1203 | add %sp, PTREGS_OFF, %o1 | |
1204 | ba,pt %xcc, rtrap | |
1205 | clr %l6 | |
1206 | ||
1207 | cheetah_plus_dcpe_trap_vector_tl1: | |
1208 | membar #Sync | |
1209 | wrpr PSTATE_IG | PSTATE_PEF | PSTATE_PRIV, %pstate | |
1210 | sethi %hi(do_dcpe_tl1), %g3 | |
1211 | jmpl %g3 + %lo(do_dcpe_tl1), %g0 | |
1212 | nop | |
1213 | nop | |
1214 | nop | |
1215 | nop | |
1216 | ||
1217 | .globl cheetah_plus_icpe_trap_vector, cheetah_plus_icpe_trap_vector_tl1 | |
1218 | cheetah_plus_icpe_trap_vector: | |
1219 | membar #Sync | |
1220 | sethi %hi(do_cheetah_plus_insn_parity), %g7 | |
1221 | jmpl %g7 + %lo(do_cheetah_plus_insn_parity), %g0 | |
1222 | nop | |
1223 | nop | |
1224 | nop | |
1225 | nop | |
1226 | nop | |
1227 | ||
1228 | do_cheetah_plus_insn_parity: | |
1229 | ba,pt %xcc, etrap | |
1230 | rd %pc, %g7 | |
1231 | mov 0x1, %o0 | |
1232 | call cheetah_plus_parity_error | |
1233 | add %sp, PTREGS_OFF, %o1 | |
1234 | ba,pt %xcc, rtrap | |
1235 | clr %l6 | |
1236 | ||
1237 | cheetah_plus_icpe_trap_vector_tl1: | |
1238 | membar #Sync | |
1239 | wrpr PSTATE_IG | PSTATE_PEF | PSTATE_PRIV, %pstate | |
1240 | sethi %hi(do_icpe_tl1), %g3 | |
1241 | jmpl %g3 + %lo(do_icpe_tl1), %g0 | |
1242 | nop | |
1243 | nop | |
1244 | nop | |
1245 | nop | |
1246 | ||
1247 | /* If we take one of these traps when tl >= 1, then we | |
1248 | * jump to interrupt globals. If some trap level above us | |
1249 | * was also using interrupt globals, we cannot recover. | |
1250 | * We may use all interrupt global registers except %g6. | |
1251 | */ | |
1252 | .globl do_dcpe_tl1, do_icpe_tl1 | |
1253 | do_dcpe_tl1: | |
1254 | rdpr %tl, %g1 ! Save original trap level | |
1255 | mov 1, %g2 ! Setup TSTATE checking loop | |
1256 | sethi %hi(TSTATE_IG), %g3 ! TSTATE mask bit | |
1257 | 1: wrpr %g2, %tl ! Set trap level to check | |
1258 | rdpr %tstate, %g4 ! Read TSTATE for this level | |
1259 | andcc %g4, %g3, %g0 ! Interrupt globals in use? | |
1260 | bne,a,pn %xcc, do_dcpe_tl1_fatal ! Yep, irrecoverable | |
1261 | wrpr %g1, %tl ! Restore original trap level | |
1262 | add %g2, 1, %g2 ! Next trap level | |
1263 | cmp %g2, %g1 ! Hit them all yet? | |
1264 | ble,pt %icc, 1b ! Not yet | |
1265 | nop | |
1266 | wrpr %g1, %tl ! Restore original trap level | |
1267 | do_dcpe_tl1_nonfatal: /* Ok we may use interrupt globals safely. */ | |
1268 | /* Reset D-cache parity */ | |
1269 | sethi %hi(1 << 16), %g1 ! D-cache size | |
1270 | mov (1 << 5), %g2 ! D-cache line size | |
1271 | sub %g1, %g2, %g1 ! Move down 1 cacheline | |
1272 | 1: srl %g1, 14, %g3 ! Compute UTAG | |
1273 | membar #Sync | |
1274 | stxa %g3, [%g1] ASI_DCACHE_UTAG | |
1275 | membar #Sync | |
1276 | sub %g2, 8, %g3 ! 64-bit data word within line | |
1277 | 2: membar #Sync | |
1278 | stxa %g0, [%g1 + %g3] ASI_DCACHE_DATA | |
1279 | membar #Sync | |
1280 | subcc %g3, 8, %g3 ! Next 64-bit data word | |
1281 | bge,pt %icc, 2b | |
1282 | nop | |
1283 | subcc %g1, %g2, %g1 ! Next cacheline | |
1284 | bge,pt %icc, 1b | |
1285 | nop | |
1286 | ba,pt %xcc, dcpe_icpe_tl1_common | |
1287 | nop | |
1288 | ||
1289 | do_dcpe_tl1_fatal: | |
1290 | sethi %hi(1f), %g7 | |
1291 | ba,pt %xcc, etraptl1 | |
1292 | 1: or %g7, %lo(1b), %g7 | |
1293 | mov 0x2, %o0 | |
1294 | call cheetah_plus_parity_error | |
1295 | add %sp, PTREGS_OFF, %o1 | |
1296 | ba,pt %xcc, rtrap | |
1297 | clr %l6 | |
1298 | ||
1299 | do_icpe_tl1: | |
1300 | rdpr %tl, %g1 ! Save original trap level | |
1301 | mov 1, %g2 ! Setup TSTATE checking loop | |
1302 | sethi %hi(TSTATE_IG), %g3 ! TSTATE mask bit | |
1303 | 1: wrpr %g2, %tl ! Set trap level to check | |
1304 | rdpr %tstate, %g4 ! Read TSTATE for this level | |
1305 | andcc %g4, %g3, %g0 ! Interrupt globals in use? | |
1306 | bne,a,pn %xcc, do_icpe_tl1_fatal ! Yep, irrecoverable | |
1307 | wrpr %g1, %tl ! Restore original trap level | |
1308 | add %g2, 1, %g2 ! Next trap level | |
1309 | cmp %g2, %g1 ! Hit them all yet? | |
1310 | ble,pt %icc, 1b ! Not yet | |
1311 | nop | |
1312 | wrpr %g1, %tl ! Restore original trap level | |
1313 | do_icpe_tl1_nonfatal: /* Ok we may use interrupt globals safely. */ | |
1314 | /* Flush I-cache */ | |
1315 | sethi %hi(1 << 15), %g1 ! I-cache size | |
1316 | mov (1 << 5), %g2 ! I-cache line size | |
1317 | sub %g1, %g2, %g1 | |
1318 | 1: or %g1, (2 << 3), %g3 | |
1319 | stxa %g0, [%g3] ASI_IC_TAG | |
1320 | membar #Sync | |
1321 | subcc %g1, %g2, %g1 | |
1322 | bge,pt %icc, 1b | |
1323 | nop | |
1324 | ba,pt %xcc, dcpe_icpe_tl1_common | |
1325 | nop | |
1326 | ||
1327 | do_icpe_tl1_fatal: | |
1328 | sethi %hi(1f), %g7 | |
1329 | ba,pt %xcc, etraptl1 | |
1330 | 1: or %g7, %lo(1b), %g7 | |
1331 | mov 0x3, %o0 | |
1332 | call cheetah_plus_parity_error | |
1333 | add %sp, PTREGS_OFF, %o1 | |
1334 | ba,pt %xcc, rtrap | |
1335 | clr %l6 | |
1336 | ||
1337 | dcpe_icpe_tl1_common: | |
1338 | /* Flush D-cache, re-enable D/I caches in DCU and finally | |
1339 | * retry the trapping instruction. | |
1340 | */ | |
1341 | sethi %hi(1 << 16), %g1 ! D-cache size | |
1342 | mov (1 << 5), %g2 ! D-cache line size | |
1343 | sub %g1, %g2, %g1 | |
1344 | 1: stxa %g0, [%g1] ASI_DCACHE_TAG | |
1345 | membar #Sync | |
1346 | subcc %g1, %g2, %g1 | |
1347 | bge,pt %icc, 1b | |
1348 | nop | |
1349 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1350 | or %g1, (DCU_DC | DCU_IC), %g1 | |
1351 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1352 | membar #Sync | |
1353 | retry | |
1354 | ||
1355 | /* Cheetah FECC trap handling, we get here from tl{0,1}_fecc | |
1356 | * in the trap table. That code has done a memory barrier | |
1357 | * and has disabled both the I-cache and D-cache in the DCU | |
1358 | * control register. The I-cache is disabled so that we may | |
1359 | * capture the corrupted cache line, and the D-cache is disabled | |
1360 | * because corrupt data may have been placed there and we don't | |
1361 | * want to reference it. | |
1362 | * | |
1363 | * %g1 is one if this trap occurred at %tl >= 1. | |
1364 | * | |
1365 | * Next, we turn off error reporting so that we don't recurse. | |
1366 | */ | |
1367 | .globl cheetah_fast_ecc | |
1368 | cheetah_fast_ecc: | |
1369 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1370 | andn %g2, ESTATE_ERROR_NCEEN | ESTATE_ERROR_CEEN, %g2 | |
1371 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1372 | membar #Sync | |
1373 | ||
1374 | /* Fetch and clear AFSR/AFAR */ | |
1375 | ldxa [%g0] ASI_AFSR, %g4 | |
1376 | ldxa [%g0] ASI_AFAR, %g5 | |
1377 | stxa %g4, [%g0] ASI_AFSR | |
1378 | membar #Sync | |
1379 | ||
1380 | CHEETAH_LOG_ERROR | |
1381 | ||
1382 | rdpr %pil, %g2 | |
1383 | wrpr %g0, 15, %pil | |
1384 | ba,pt %xcc, etrap_irq | |
1385 | rd %pc, %g7 | |
1386 | mov %l4, %o1 | |
1387 | mov %l5, %o2 | |
1388 | call cheetah_fecc_handler | |
1389 | add %sp, PTREGS_OFF, %o0 | |
1390 | ba,a,pt %xcc, rtrap_irq | |
1391 | ||
1392 | /* Our caller has disabled I-cache and performed membar Sync. */ | |
1393 | .globl cheetah_cee | |
1394 | cheetah_cee: | |
1395 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1396 | andn %g2, ESTATE_ERROR_CEEN, %g2 | |
1397 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1398 | membar #Sync | |
1399 | ||
1400 | /* Fetch and clear AFSR/AFAR */ | |
1401 | ldxa [%g0] ASI_AFSR, %g4 | |
1402 | ldxa [%g0] ASI_AFAR, %g5 | |
1403 | stxa %g4, [%g0] ASI_AFSR | |
1404 | membar #Sync | |
1405 | ||
1406 | CHEETAH_LOG_ERROR | |
1407 | ||
1408 | rdpr %pil, %g2 | |
1409 | wrpr %g0, 15, %pil | |
1410 | ba,pt %xcc, etrap_irq | |
1411 | rd %pc, %g7 | |
1412 | mov %l4, %o1 | |
1413 | mov %l5, %o2 | |
1414 | call cheetah_cee_handler | |
1415 | add %sp, PTREGS_OFF, %o0 | |
1416 | ba,a,pt %xcc, rtrap_irq | |
1417 | ||
1418 | /* Our caller has disabled I-cache+D-cache and performed membar Sync. */ | |
1419 | .globl cheetah_deferred_trap | |
1420 | cheetah_deferred_trap: | |
1421 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1422 | andn %g2, ESTATE_ERROR_NCEEN | ESTATE_ERROR_CEEN, %g2 | |
1423 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1424 | membar #Sync | |
1425 | ||
1426 | /* Fetch and clear AFSR/AFAR */ | |
1427 | ldxa [%g0] ASI_AFSR, %g4 | |
1428 | ldxa [%g0] ASI_AFAR, %g5 | |
1429 | stxa %g4, [%g0] ASI_AFSR | |
1430 | membar #Sync | |
1431 | ||
1432 | CHEETAH_LOG_ERROR | |
1433 | ||
1434 | rdpr %pil, %g2 | |
1435 | wrpr %g0, 15, %pil | |
1436 | ba,pt %xcc, etrap_irq | |
1437 | rd %pc, %g7 | |
1438 | mov %l4, %o1 | |
1439 | mov %l5, %o2 | |
1440 | call cheetah_deferred_handler | |
1441 | add %sp, PTREGS_OFF, %o0 | |
1442 | ba,a,pt %xcc, rtrap_irq | |
1443 | ||
1444 | .globl __do_privact | |
1445 | __do_privact: | |
1446 | mov TLB_SFSR, %g3 | |
1447 | stxa %g0, [%g3] ASI_DMMU ! Clear FaultValid bit | |
1448 | membar #Sync | |
1449 | sethi %hi(109f), %g7 | |
1450 | ba,pt %xcc, etrap | |
1451 | 109: or %g7, %lo(109b), %g7 | |
1452 | call do_privact | |
1453 | add %sp, PTREGS_OFF, %o0 | |
1454 | ba,pt %xcc, rtrap | |
1455 | clr %l6 | |
1456 | ||
1457 | .globl do_mna | |
1458 | do_mna: | |
1459 | rdpr %tl, %g3 | |
1460 | cmp %g3, 1 | |
1461 | ||
1462 | /* Setup %g4/%g5 now as they are used in the | |
1463 | * winfixup code. | |
1464 | */ | |
1465 | mov TLB_SFSR, %g3 | |
1466 | mov DMMU_SFAR, %g4 | |
1467 | ldxa [%g4] ASI_DMMU, %g4 | |
1468 | ldxa [%g3] ASI_DMMU, %g5 | |
1469 | stxa %g0, [%g3] ASI_DMMU ! Clear FaultValid bit | |
1470 | membar #Sync | |
1471 | bgu,pn %icc, winfix_mna | |
1472 | rdpr %tpc, %g3 | |
1473 | ||
1474 | 1: sethi %hi(109f), %g7 | |
1475 | ba,pt %xcc, etrap | |
1476 | 109: or %g7, %lo(109b), %g7 | |
1477 | mov %l4, %o1 | |
1478 | mov %l5, %o2 | |
1479 | call mem_address_unaligned | |
1480 | add %sp, PTREGS_OFF, %o0 | |
1481 | ba,pt %xcc, rtrap | |
1482 | clr %l6 | |
1483 | ||
1484 | .globl do_lddfmna | |
1485 | do_lddfmna: | |
1486 | sethi %hi(109f), %g7 | |
1487 | mov TLB_SFSR, %g4 | |
1488 | ldxa [%g4] ASI_DMMU, %g5 | |
1489 | stxa %g0, [%g4] ASI_DMMU ! Clear FaultValid bit | |
1490 | membar #Sync | |
1491 | mov DMMU_SFAR, %g4 | |
1492 | ldxa [%g4] ASI_DMMU, %g4 | |
1493 | ba,pt %xcc, etrap | |
1494 | 109: or %g7, %lo(109b), %g7 | |
1495 | mov %l4, %o1 | |
1496 | mov %l5, %o2 | |
1497 | call handle_lddfmna | |
1498 | add %sp, PTREGS_OFF, %o0 | |
1499 | ba,pt %xcc, rtrap | |
1500 | clr %l6 | |
1501 | ||
1502 | .globl do_stdfmna | |
1503 | do_stdfmna: | |
1504 | sethi %hi(109f), %g7 | |
1505 | mov TLB_SFSR, %g4 | |
1506 | ldxa [%g4] ASI_DMMU, %g5 | |
1507 | stxa %g0, [%g4] ASI_DMMU ! Clear FaultValid bit | |
1508 | membar #Sync | |
1509 | mov DMMU_SFAR, %g4 | |
1510 | ldxa [%g4] ASI_DMMU, %g4 | |
1511 | ba,pt %xcc, etrap | |
1512 | 109: or %g7, %lo(109b), %g7 | |
1513 | mov %l4, %o1 | |
1514 | mov %l5, %o2 | |
1515 | call handle_stdfmna | |
1516 | add %sp, PTREGS_OFF, %o0 | |
1517 | ba,pt %xcc, rtrap | |
1518 | clr %l6 | |
1519 | ||
1520 | .globl breakpoint_trap | |
1521 | breakpoint_trap: | |
1522 | call sparc_breakpoint | |
1523 | add %sp, PTREGS_OFF, %o0 | |
1524 | ba,pt %xcc, rtrap | |
1525 | nop | |
1526 | ||
1527 | #if defined(CONFIG_SUNOS_EMUL) || defined(CONFIG_SOLARIS_EMUL) || \ | |
1528 | defined(CONFIG_SOLARIS_EMUL_MODULE) | |
1529 | /* SunOS uses syscall zero as the 'indirect syscall' it looks | |
1530 | * like indir_syscall(scall_num, arg0, arg1, arg2...); etc. | |
1531 | * This is complete brain damage. | |
1532 | */ | |
1533 | .globl sunos_indir | |
1534 | sunos_indir: | |
1535 | srl %o0, 0, %o0 | |
1536 | mov %o7, %l4 | |
1537 | cmp %o0, NR_SYSCALLS | |
1538 | blu,a,pt %icc, 1f | |
1539 | sll %o0, 0x2, %o0 | |
1540 | sethi %hi(sunos_nosys), %l6 | |
1541 | b,pt %xcc, 2f | |
1542 | or %l6, %lo(sunos_nosys), %l6 | |
1543 | 1: sethi %hi(sunos_sys_table), %l7 | |
1544 | or %l7, %lo(sunos_sys_table), %l7 | |
1545 | lduw [%l7 + %o0], %l6 | |
1546 | 2: mov %o1, %o0 | |
1547 | mov %o2, %o1 | |
1548 | mov %o3, %o2 | |
1549 | mov %o4, %o3 | |
1550 | mov %o5, %o4 | |
1551 | call %l6 | |
1552 | mov %l4, %o7 | |
1553 | ||
1554 | .globl sunos_getpid | |
1555 | sunos_getpid: | |
1556 | call sys_getppid | |
1557 | nop | |
1558 | call sys_getpid | |
1559 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1560 | b,pt %xcc, ret_sys_call | |
1561 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1562 | ||
1563 | /* SunOS getuid() returns uid in %o0 and euid in %o1 */ | |
1564 | .globl sunos_getuid | |
1565 | sunos_getuid: | |
1566 | call sys32_geteuid16 | |
1567 | nop | |
1568 | call sys32_getuid16 | |
1569 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1570 | b,pt %xcc, ret_sys_call | |
1571 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1572 | ||
1573 | /* SunOS getgid() returns gid in %o0 and egid in %o1 */ | |
1574 | .globl sunos_getgid | |
1575 | sunos_getgid: | |
1576 | call sys32_getegid16 | |
1577 | nop | |
1578 | call sys32_getgid16 | |
1579 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1580 | b,pt %xcc, ret_sys_call | |
1581 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1582 | #endif | |
1583 | ||
1584 | /* SunOS's execv() call only specifies the argv argument, the | |
1585 | * environment settings are the same as the calling processes. | |
1586 | */ | |
1587 | .globl sunos_execv | |
1588 | sys_execve: | |
1589 | sethi %hi(sparc_execve), %g1 | |
1590 | ba,pt %xcc, execve_merge | |
1591 | or %g1, %lo(sparc_execve), %g1 | |
1592 | #ifdef CONFIG_COMPAT | |
1593 | .globl sys_execve | |
1594 | sunos_execv: | |
1595 | stx %g0, [%sp + PTREGS_OFF + PT_V9_I2] | |
1596 | .globl sys32_execve | |
1597 | sys32_execve: | |
1598 | sethi %hi(sparc32_execve), %g1 | |
1599 | or %g1, %lo(sparc32_execve), %g1 | |
1600 | #endif | |
1601 | execve_merge: | |
1602 | flushw | |
1603 | jmpl %g1, %g0 | |
1604 | add %sp, PTREGS_OFF, %o0 | |
1605 | ||
1606 | .globl sys_pipe, sys_sigpause, sys_nis_syscall | |
1607 | .globl sys_sigsuspend, sys_rt_sigsuspend | |
1608 | .globl sys_rt_sigreturn | |
1609 | .globl sys_ptrace | |
1610 | .globl sys_sigaltstack | |
1611 | .align 32 | |
1612 | sys_pipe: ba,pt %xcc, sparc_pipe | |
1613 | add %sp, PTREGS_OFF, %o0 | |
1614 | sys_nis_syscall:ba,pt %xcc, c_sys_nis_syscall | |
1615 | add %sp, PTREGS_OFF, %o0 | |
1616 | sys_memory_ordering: | |
1617 | ba,pt %xcc, sparc_memory_ordering | |
1618 | add %sp, PTREGS_OFF, %o1 | |
1619 | sys_sigaltstack:ba,pt %xcc, do_sigaltstack | |
1620 | add %i6, STACK_BIAS, %o2 | |
1621 | #ifdef CONFIG_COMPAT | |
1622 | .globl sys32_sigstack | |
1623 | sys32_sigstack: ba,pt %xcc, do_sys32_sigstack | |
1624 | mov %i6, %o2 | |
1625 | .globl sys32_sigaltstack | |
1626 | sys32_sigaltstack: | |
1627 | ba,pt %xcc, do_sys32_sigaltstack | |
1628 | mov %i6, %o2 | |
1629 | #endif | |
1630 | .align 32 | |
1631 | sys_sigsuspend: add %sp, PTREGS_OFF, %o0 | |
1632 | call do_sigsuspend | |
1633 | add %o7, 1f-.-4, %o7 | |
1634 | nop | |
1635 | sys_rt_sigsuspend: /* NOTE: %o0,%o1 have a correct value already */ | |
1636 | add %sp, PTREGS_OFF, %o2 | |
1637 | call do_rt_sigsuspend | |
1638 | add %o7, 1f-.-4, %o7 | |
1639 | nop | |
1640 | #ifdef CONFIG_COMPAT | |
1641 | .globl sys32_rt_sigsuspend | |
1642 | sys32_rt_sigsuspend: /* NOTE: %o0,%o1 have a correct value already */ | |
1643 | srl %o0, 0, %o0 | |
1644 | add %sp, PTREGS_OFF, %o2 | |
1645 | call do_rt_sigsuspend32 | |
1646 | add %o7, 1f-.-4, %o7 | |
1647 | #endif | |
1648 | /* NOTE: %o0 has a correct value already */ | |
1649 | sys_sigpause: add %sp, PTREGS_OFF, %o1 | |
1650 | call do_sigpause | |
1651 | add %o7, 1f-.-4, %o7 | |
1652 | nop | |
1653 | #ifdef CONFIG_COMPAT | |
1654 | .globl sys32_sigreturn | |
1655 | sys32_sigreturn: | |
1656 | add %sp, PTREGS_OFF, %o0 | |
1657 | call do_sigreturn32 | |
1658 | add %o7, 1f-.-4, %o7 | |
1659 | nop | |
1660 | #endif | |
1661 | sys_rt_sigreturn: | |
1662 | add %sp, PTREGS_OFF, %o0 | |
1663 | call do_rt_sigreturn | |
1664 | add %o7, 1f-.-4, %o7 | |
1665 | nop | |
1666 | #ifdef CONFIG_COMPAT | |
1667 | .globl sys32_rt_sigreturn | |
1668 | sys32_rt_sigreturn: | |
1669 | add %sp, PTREGS_OFF, %o0 | |
1670 | call do_rt_sigreturn32 | |
1671 | add %o7, 1f-.-4, %o7 | |
1672 | nop | |
1673 | #endif | |
1674 | sys_ptrace: add %sp, PTREGS_OFF, %o0 | |
1675 | call do_ptrace | |
1676 | add %o7, 1f-.-4, %o7 | |
1677 | nop | |
1678 | .align 32 | |
1679 | 1: ldx [%curptr + TI_FLAGS], %l5 | |
1680 | andcc %l5, _TIF_SYSCALL_TRACE, %g0 | |
1681 | be,pt %icc, rtrap | |
1682 | clr %l6 | |
1683 | call syscall_trace | |
1684 | nop | |
1685 | ||
1686 | ba,pt %xcc, rtrap | |
1687 | clr %l6 | |
1688 | ||
1689 | /* This is how fork() was meant to be done, 8 instruction entry. | |
1690 | * | |
1691 | * I questioned the following code briefly, let me clear things | |
1692 | * up so you must not reason on it like I did. | |
1693 | * | |
1694 | * Know the fork_kpsr etc. we use in the sparc32 port? We don't | |
1695 | * need it here because the only piece of window state we copy to | |
1696 | * the child is the CWP register. Even if the parent sleeps, | |
1697 | * we are safe because we stuck it into pt_regs of the parent | |
1698 | * so it will not change. | |
1699 | * | |
1700 | * XXX This raises the question, whether we can do the same on | |
1701 | * XXX sparc32 to get rid of fork_kpsr _and_ fork_kwim. The | |
1702 | * XXX answer is yes. We stick fork_kpsr in UREG_G0 and | |
1703 | * XXX fork_kwim in UREG_G1 (global registers are considered | |
1704 | * XXX volatile across a system call in the sparc ABI I think | |
1705 | * XXX if it isn't we can use regs->y instead, anyone who depends | |
1706 | * XXX upon the Y register being preserved across a fork deserves | |
1707 | * XXX to lose). | |
1708 | * | |
1709 | * In fact we should take advantage of that fact for other things | |
1710 | * during system calls... | |
1711 | */ | |
1712 | .globl sys_fork, sys_vfork, sys_clone, sparc_exit | |
1713 | .globl ret_from_syscall | |
1714 | .align 32 | |
1715 | sys_vfork: /* Under Linux, vfork and fork are just special cases of clone. */ | |
1716 | sethi %hi(0x4000 | 0x0100 | SIGCHLD), %o0 | |
1717 | or %o0, %lo(0x4000 | 0x0100 | SIGCHLD), %o0 | |
1718 | ba,pt %xcc, sys_clone | |
1719 | sys_fork: clr %o1 | |
1720 | mov SIGCHLD, %o0 | |
1721 | sys_clone: flushw | |
1722 | movrz %o1, %fp, %o1 | |
1723 | mov 0, %o3 | |
1724 | ba,pt %xcc, sparc_do_fork | |
1725 | add %sp, PTREGS_OFF, %o2 | |
1726 | ret_from_syscall: | |
1727 | /* Clear SPARC_FLAG_NEWCHILD, switch_to leaves thread.flags in | |
1728 | * %o7 for us. Check performance counter stuff too. | |
1729 | */ | |
1730 | andn %o7, _TIF_NEWCHILD, %l0 | |
1731 | stx %l0, [%g6 + TI_FLAGS] | |
1732 | call schedule_tail | |
1733 | mov %g7, %o0 | |
1734 | andcc %l0, _TIF_PERFCTR, %g0 | |
1735 | be,pt %icc, 1f | |
1736 | nop | |
1737 | ldx [%g6 + TI_PCR], %o7 | |
1738 | wr %g0, %o7, %pcr | |
1739 | ||
1740 | /* Blackbird errata workaround. See commentary in | |
1741 | * smp.c:smp_percpu_timer_interrupt() for more | |
1742 | * information. | |
1743 | */ | |
1744 | ba,pt %xcc, 99f | |
1745 | nop | |
1746 | .align 64 | |
1747 | 99: wr %g0, %g0, %pic | |
1748 | rd %pic, %g0 | |
1749 | ||
1750 | 1: b,pt %xcc, ret_sys_call | |
1751 | ldx [%sp + PTREGS_OFF + PT_V9_I0], %o0 | |
1752 | sparc_exit: wrpr %g0, (PSTATE_RMO | PSTATE_PEF | PSTATE_PRIV), %pstate | |
1753 | rdpr %otherwin, %g1 | |
1754 | rdpr %cansave, %g3 | |
1755 | add %g3, %g1, %g3 | |
1756 | wrpr %g3, 0x0, %cansave | |
1757 | wrpr %g0, 0x0, %otherwin | |
1758 | wrpr %g0, (PSTATE_RMO | PSTATE_PEF | PSTATE_PRIV | PSTATE_IE), %pstate | |
1759 | ba,pt %xcc, sys_exit | |
1760 | stb %g0, [%g6 + TI_WSAVED] | |
1761 | ||
1762 | linux_sparc_ni_syscall: | |
1763 | sethi %hi(sys_ni_syscall), %l7 | |
1764 | b,pt %xcc, 4f | |
1765 | or %l7, %lo(sys_ni_syscall), %l7 | |
1766 | ||
1767 | linux_syscall_trace32: | |
1768 | call syscall_trace | |
1769 | nop | |
1770 | srl %i0, 0, %o0 | |
1771 | mov %i4, %o4 | |
1772 | srl %i1, 0, %o1 | |
1773 | srl %i2, 0, %o2 | |
1774 | b,pt %xcc, 2f | |
1775 | srl %i3, 0, %o3 | |
1776 | ||
1777 | linux_syscall_trace: | |
1778 | call syscall_trace | |
1779 | nop | |
1780 | mov %i0, %o0 | |
1781 | mov %i1, %o1 | |
1782 | mov %i2, %o2 | |
1783 | mov %i3, %o3 | |
1784 | b,pt %xcc, 2f | |
1785 | mov %i4, %o4 | |
1786 | ||
1787 | ||
1788 | /* Linux 32-bit and SunOS system calls enter here... */ | |
1789 | .align 32 | |
1790 | .globl linux_sparc_syscall32 | |
1791 | linux_sparc_syscall32: | |
1792 | /* Direct access to user regs, much faster. */ | |
1793 | cmp %g1, NR_SYSCALLS ! IEU1 Group | |
1794 | bgeu,pn %xcc, linux_sparc_ni_syscall ! CTI | |
1795 | srl %i0, 0, %o0 ! IEU0 | |
1796 | sll %g1, 2, %l4 ! IEU0 Group | |
1797 | #ifdef SYSCALL_TRACING | |
1798 | call syscall_trace_entry | |
1799 | add %sp, PTREGS_OFF, %o0 | |
1800 | srl %i0, 0, %o0 | |
1801 | #endif | |
1802 | srl %i4, 0, %o4 ! IEU1 | |
1803 | lduw [%l7 + %l4], %l7 ! Load | |
1804 | srl %i1, 0, %o1 ! IEU0 Group | |
1805 | ldx [%curptr + TI_FLAGS], %l0 ! Load | |
1806 | ||
1807 | srl %i5, 0, %o5 ! IEU1 | |
1808 | srl %i2, 0, %o2 ! IEU0 Group | |
1809 | andcc %l0, _TIF_SYSCALL_TRACE, %g0 ! IEU0 Group | |
1810 | bne,pn %icc, linux_syscall_trace32 ! CTI | |
1811 | mov %i0, %l5 ! IEU1 | |
1812 | call %l7 ! CTI Group brk forced | |
1813 | srl %i3, 0, %o3 ! IEU0 | |
1814 | ba,a,pt %xcc, 3f | |
1815 | ||
1816 | /* Linux native and SunOS system calls enter here... */ | |
1817 | .align 32 | |
1818 | .globl linux_sparc_syscall, ret_sys_call | |
1819 | linux_sparc_syscall: | |
1820 | /* Direct access to user regs, much faster. */ | |
1821 | cmp %g1, NR_SYSCALLS ! IEU1 Group | |
1822 | bgeu,pn %xcc, linux_sparc_ni_syscall ! CTI | |
1823 | mov %i0, %o0 ! IEU0 | |
1824 | sll %g1, 2, %l4 ! IEU0 Group | |
1825 | #ifdef SYSCALL_TRACING | |
1826 | call syscall_trace_entry | |
1827 | add %sp, PTREGS_OFF, %o0 | |
1828 | mov %i0, %o0 | |
1829 | #endif | |
1830 | mov %i1, %o1 ! IEU1 | |
1831 | lduw [%l7 + %l4], %l7 ! Load | |
1832 | 4: mov %i2, %o2 ! IEU0 Group | |
1833 | ldx [%curptr + TI_FLAGS], %l0 ! Load | |
1834 | ||
1835 | mov %i3, %o3 ! IEU1 | |
1836 | mov %i4, %o4 ! IEU0 Group | |
1837 | andcc %l0, _TIF_SYSCALL_TRACE, %g0 ! IEU1 Group+1 bubble | |
1838 | bne,pn %icc, linux_syscall_trace ! CTI Group | |
1839 | mov %i0, %l5 ! IEU0 | |
1840 | 2: call %l7 ! CTI Group brk forced | |
1841 | mov %i5, %o5 ! IEU0 | |
1842 | nop | |
1843 | ||
1844 | 3: stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1845 | ret_sys_call: | |
1846 | #ifdef SYSCALL_TRACING | |
1847 | mov %o0, %o1 | |
1848 | call syscall_trace_exit | |
1849 | add %sp, PTREGS_OFF, %o0 | |
1850 | mov %o1, %o0 | |
1851 | #endif | |
1852 | ldx [%sp + PTREGS_OFF + PT_V9_TSTATE], %g3 | |
1853 | ldx [%sp + PTREGS_OFF + PT_V9_TNPC], %l1 ! pc = npc | |
1854 | sra %o0, 0, %o0 | |
1855 | mov %ulo(TSTATE_XCARRY | TSTATE_ICARRY), %g2 | |
1856 | sllx %g2, 32, %g2 | |
1857 | ||
1858 | /* Check if force_successful_syscall_return() | |
1859 | * was invoked. | |
1860 | */ | |
1861 | ldx [%curptr + TI_FLAGS], %l0 | |
1862 | andcc %l0, _TIF_SYSCALL_SUCCESS, %g0 | |
1863 | be,pt %icc, 1f | |
1864 | andn %l0, _TIF_SYSCALL_SUCCESS, %l0 | |
1865 | ba,pt %xcc, 80f | |
1866 | stx %l0, [%curptr + TI_FLAGS] | |
1867 | ||
1868 | 1: | |
1869 | cmp %o0, -ERESTART_RESTARTBLOCK | |
1870 | bgeu,pn %xcc, 1f | |
1871 | andcc %l0, _TIF_SYSCALL_TRACE, %l6 | |
1872 | 80: | |
1873 | /* System call success, clear Carry condition code. */ | |
1874 | andn %g3, %g2, %g3 | |
1875 | stx %g3, [%sp + PTREGS_OFF + PT_V9_TSTATE] | |
1876 | bne,pn %icc, linux_syscall_trace2 | |
1877 | add %l1, 0x4, %l2 ! npc = npc+4 | |
1878 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1879 | ba,pt %xcc, rtrap_clr_l6 | |
1880 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1881 | ||
1882 | 1: | |
1883 | /* System call failure, set Carry condition code. | |
1884 | * Also, get abs(errno) to return to the process. | |
1885 | */ | |
1886 | andcc %l0, _TIF_SYSCALL_TRACE, %l6 | |
1887 | sub %g0, %o0, %o0 | |
1888 | or %g3, %g2, %g3 | |
1889 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1890 | mov 1, %l6 | |
1891 | stx %g3, [%sp + PTREGS_OFF + PT_V9_TSTATE] | |
1892 | bne,pn %icc, linux_syscall_trace2 | |
1893 | add %l1, 0x4, %l2 ! npc = npc+4 | |
1894 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1895 | ||
1896 | b,pt %xcc, rtrap | |
1897 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1898 | linux_syscall_trace2: | |
1899 | call syscall_trace | |
1900 | nop | |
1901 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1902 | ba,pt %xcc, rtrap | |
1903 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1904 | ||
1905 | .align 32 | |
1906 | .globl __flushw_user | |
1907 | __flushw_user: | |
1908 | rdpr %otherwin, %g1 | |
1909 | brz,pn %g1, 2f | |
1910 | clr %g2 | |
1911 | 1: save %sp, -128, %sp | |
1912 | rdpr %otherwin, %g1 | |
1913 | brnz,pt %g1, 1b | |
1914 | add %g2, 1, %g2 | |
1915 | 1: sub %g2, 1, %g2 | |
1916 | brnz,pt %g2, 1b | |
1917 | restore %g0, %g0, %g0 | |
1918 | 2: retl | |
1919 | nop |