]>
Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
6 | * Copyright (C) 1994 - 2000, 2001, 2003 Ralf Baechle | |
7 | * Copyright (C) 1999, 2000 Silicon Graphics, Inc. | |
8 | * Copyright (C) 2001 MIPS Technologies, Inc. | |
9 | */ | |
10 | #include <linux/config.h> | |
11 | ||
12 | #include <asm/asm.h> | |
13 | #include <asm/asmmacro.h> | |
14 | #include <asm/regdef.h> | |
15 | #include <asm/mipsregs.h> | |
16 | #include <asm/stackframe.h> | |
17 | #include <asm/isadep.h> | |
18 | #include <asm/thread_info.h> | |
19 | #include <asm/war.h> | |
20 | ||
21 | #ifdef CONFIG_PREEMPT | |
c2648527 | 22 | .macro preempt_stop |
1da177e4 LT |
23 | .endm |
24 | #else | |
c2648527 TS |
25 | .macro preempt_stop |
26 | local_irq_disable | |
1da177e4 LT |
27 | .endm |
28 | #define resume_kernel restore_all | |
29 | #endif | |
30 | ||
31 | .text | |
32 | .align 5 | |
33 | FEXPORT(ret_from_exception) | |
34 | preempt_stop | |
35 | FEXPORT(ret_from_irq) | |
36 | LONG_L t0, PT_STATUS(sp) # returning to kernel mode? | |
37 | andi t0, t0, KU_USER | |
38 | beqz t0, resume_kernel | |
39 | ||
c2648527 TS |
40 | resume_userspace: |
41 | local_irq_disable # make sure we dont miss an | |
1da177e4 LT |
42 | # interrupt setting need_resched |
43 | # between sampling and return | |
44 | LONG_L a2, TI_FLAGS($28) # current->work | |
c2648527 TS |
45 | andi t0, a2, _TIF_WORK_MASK # (ignoring syscall_trace) |
46 | bnez t0, work_pending | |
1da177e4 LT |
47 | j restore_all |
48 | ||
49 | #ifdef CONFIG_PREEMPT | |
c2648527 | 50 | resume_kernel: |
a18815ab | 51 | local_irq_disable |
1da177e4 LT |
52 | lw t0, TI_PRE_COUNT($28) |
53 | bnez t0, restore_all | |
54 | need_resched: | |
55 | LONG_L t0, TI_FLAGS($28) | |
56 | andi t1, t0, _TIF_NEED_RESCHED | |
57 | beqz t1, restore_all | |
58 | LONG_L t0, PT_STATUS(sp) # Interrupts off? | |
59 | andi t0, 1 | |
60 | beqz t0, restore_all | |
61 | li t0, PREEMPT_ACTIVE | |
62 | sw t0, TI_PRE_COUNT($28) | |
a18815ab | 63 | jal preempt_schedule_irq |
1da177e4 LT |
64 | #endif |
65 | ||
66 | FEXPORT(ret_from_fork) | |
67 | jal schedule_tail # a0 = task_t *prev | |
68 | ||
69 | FEXPORT(syscall_exit) | |
70 | local_irq_disable # make sure need_resched and | |
71 | # signals dont change between | |
72 | # sampling and return | |
73 | LONG_L a2, TI_FLAGS($28) # current->work | |
74 | li t0, _TIF_ALLWORK_MASK | |
75 | and t0, a2, t0 | |
76 | bnez t0, syscall_exit_work | |
77 | ||
78 | FEXPORT(restore_all) # restore full frame | |
79 | .set noat | |
80 | RESTORE_TEMP | |
81 | RESTORE_AT | |
82 | RESTORE_STATIC | |
83 | FEXPORT(restore_partial) # restore partial frame | |
84 | RESTORE_SOME | |
85 | RESTORE_SP_AND_RET | |
86 | .set at | |
87 | ||
c2648527 TS |
88 | work_pending: |
89 | andi t0, a2, _TIF_NEED_RESCHED # a2 is preloaded with TI_FLAGS | |
1da177e4 LT |
90 | beqz t0, work_notifysig |
91 | work_resched: | |
92 | jal schedule | |
93 | ||
c2648527 | 94 | local_irq_disable # make sure need_resched and |
1da177e4 LT |
95 | # signals dont change between |
96 | # sampling and return | |
97 | LONG_L a2, TI_FLAGS($28) | |
98 | andi t0, a2, _TIF_WORK_MASK # is there any work to be done | |
99 | # other than syscall tracing? | |
100 | beqz t0, restore_all | |
101 | andi t0, a2, _TIF_NEED_RESCHED | |
102 | bnez t0, work_resched | |
103 | ||
104 | work_notifysig: # deal with pending signals and | |
105 | # notify-resume requests | |
106 | move a0, sp | |
107 | li a1, 0 | |
108 | jal do_notify_resume # a2 already loaded | |
109 | j restore_all | |
110 | ||
111 | FEXPORT(syscall_exit_work_partial) | |
112 | SAVE_STATIC | |
c2648527 TS |
113 | syscall_exit_work: |
114 | li t0, _TIF_SYSCALL_TRACE | _TIF_SYSCALL_AUDIT | |
115 | and t0, a2 # a2 is preloaded with TI_FLAGS | |
116 | beqz t0, work_pending # trace bit set? | |
1da177e4 LT |
117 | local_irq_enable # could let do_syscall_trace() |
118 | # call schedule() instead | |
119 | move a0, sp | |
120 | li a1, 1 | |
121 | jal do_syscall_trace | |
122 | b resume_userspace | |
123 | ||
124 | /* | |
125 | * Common spurious interrupt handler. | |
126 | */ | |
1da177e4 LT |
127 | LEAF(spurious_interrupt) |
128 | /* | |
129 | * Someone tried to fool us by sending an interrupt but we | |
130 | * couldn't find a cause for it. | |
131 | */ | |
b59a9504 | 132 | PTR_LA t1, irq_err_count |
1da177e4 | 133 | #ifdef CONFIG_SMP |
b59a9504 | 134 | 1: ll t0, (t1) |
1da177e4 | 135 | addiu t0, 1 |
b59a9504 | 136 | sc t0, (t1) |
1da177e4 LT |
137 | #if R10000_LLSC_WAR |
138 | beqzl t0, 1b | |
139 | #else | |
140 | beqz t0, 1b | |
141 | #endif | |
142 | #else | |
b59a9504 | 143 | lw t0, (t1) |
1da177e4 | 144 | addiu t0, 1 |
b59a9504 | 145 | sw t0, (t1) |
1da177e4 LT |
146 | #endif |
147 | j ret_from_irq | |
148 | END(spurious_interrupt) |