diff options
author | Yoshinori Sato <ysato@users.sourceforge.jp> | 2007-07-16 02:38:36 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@woody.linux-foundation.org> | 2007-07-16 12:05:37 -0400 |
commit | 2fea299f74c846157b068be8ae15b406db0719d9 (patch) | |
tree | 649ee49eb7568927f2379f9ea2d213f2ec40e873 | |
parent | 542f739d12159e3198611aa471359cc63600be1a (diff) |
h8300 entry.S update
Signed-off-by: Yoshinori Sato <ysato@users.sourceforge.jp>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
-rw-r--r-- | arch/h8300/kernel/Makefile | 3 | ||||
-rw-r--r-- | arch/h8300/kernel/entry.S (renamed from arch/h8300/platform/h8s/entry.S) | 302 | ||||
-rw-r--r-- | arch/h8300/kernel/ptrace.c | 2 | ||||
-rw-r--r-- | arch/h8300/kernel/signal.c | 6 | ||||
-rw-r--r-- | arch/h8300/platform/h8300h/Makefile | 2 | ||||
-rw-r--r-- | arch/h8300/platform/h8300h/entry.S | 332 | ||||
-rw-r--r-- | arch/h8300/platform/h8s/Makefile | 2 | ||||
-rw-r--r-- | include/asm-h8300/thread_info.h | 2 |
8 files changed, 195 insertions, 456 deletions
diff --git a/arch/h8300/kernel/Makefile b/arch/h8300/kernel/Makefile index ccc1a7fbf94b..874f6aefee65 100644 --- a/arch/h8300/kernel/Makefile +++ b/arch/h8300/kernel/Makefile | |||
@@ -6,6 +6,7 @@ extra-y := vmlinux.lds | |||
6 | 6 | ||
7 | obj-y := process.o traps.o ptrace.o irq.o \ | 7 | obj-y := process.o traps.o ptrace.o irq.o \ |
8 | sys_h8300.o time.o semaphore.o signal.o \ | 8 | sys_h8300.o time.o semaphore.o signal.o \ |
9 | setup.o gpio.o init_task.o syscalls.o | 9 | setup.o gpio.o init_task.o syscalls.o \ |
10 | entry.o | ||
10 | 11 | ||
11 | obj-$(CONFIG_MODULES) += module.o h8300_ksyms.o | 12 | obj-$(CONFIG_MODULES) += module.o h8300_ksyms.o |
diff --git a/arch/h8300/platform/h8s/entry.S b/arch/h8300/kernel/entry.S index f3d6b8e8f959..ca7431690300 100644 --- a/arch/h8300/platform/h8s/entry.S +++ b/arch/h8300/kernel/entry.S | |||
@@ -1,11 +1,10 @@ | |||
1 | /* -*- mode: asm -*- | 1 | /* -*- mode: asm -*- |
2 | * | 2 | * |
3 | * linux/arch/h8300/platform/h8s/entry.S | 3 | * linux/arch/h8300/platform/h8300h/entry.S |
4 | * | 4 | * |
5 | * Yoshinori Sato <ysato@users.sourceforge.jp> | 5 | * Yoshinori Sato <ysato@users.sourceforge.jp> |
6 | * David McCullough <davidm@snapgear.com> | ||
6 | * | 7 | * |
7 | * fairly heavy changes to fix syscall args and signal processing | ||
8 | * by David McCullough <davidm@snapgear.com> | ||
9 | */ | 8 | */ |
10 | 9 | ||
11 | /* | 10 | /* |
@@ -23,13 +22,66 @@ | |||
23 | #include <asm/thread_info.h> | 22 | #include <asm/thread_info.h> |
24 | #include <asm/errno.h> | 23 | #include <asm/errno.h> |
25 | 24 | ||
25 | #if defined(CONFIG_CPU_H8300H) | ||
26 | #define USERRET 8 | ||
27 | INTERRUPTS = 64 | ||
28 | .h8300h | ||
29 | .macro SHLL2 reg | ||
30 | shll.l \reg | ||
31 | shll.l \reg | ||
32 | .endm | ||
33 | .macro SHLR2 reg | ||
34 | shlr.l \reg | ||
35 | shlr.l \reg | ||
36 | .endm | ||
37 | .macro SAVEREGS | ||
38 | mov.l er0,@-sp | ||
39 | mov.l er1,@-sp | ||
40 | mov.l er2,@-sp | ||
41 | mov.l er3,@-sp | ||
42 | .endm | ||
43 | .macro RESTOREREGS | ||
44 | mov.l @sp+,er3 | ||
45 | mov.l @sp+,er2 | ||
46 | .endm | ||
47 | .macro SAVEEXR | ||
48 | .endm | ||
49 | .macro RESTOREEXR | ||
50 | .endm | ||
51 | #endif | ||
52 | #if defined(CONFIG_CPU_H8S) | ||
53 | #define USERRET 10 | ||
54 | #define USEREXR 8 | ||
55 | INTERRUPTS = 128 | ||
26 | .h8300s | 56 | .h8300s |
57 | .macro SHLL2 reg | ||
58 | shll.l #2,\reg | ||
59 | .endm | ||
60 | .macro SHLR2 reg | ||
61 | shlr.l #2,\reg | ||
62 | .endm | ||
63 | .macro SAVEREGS | ||
64 | stm.l er0-er3,@-sp | ||
65 | .endm | ||
66 | .macro RESTOREREGS | ||
67 | ldm.l @sp+,er2-er3 | ||
68 | .endm | ||
69 | .macro SAVEEXR | ||
70 | mov.w @(USEREXR:16,er0),r1 | ||
71 | mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */ | ||
72 | .endm | ||
73 | .macro RESTOREEXR | ||
74 | mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */ | ||
75 | mov.b r1l,r1h | ||
76 | mov.w r1,@(USEREXR:16,er0) | ||
77 | .endm | ||
78 | #endif | ||
79 | |||
27 | 80 | ||
28 | /* CPU context save/restore macros. */ | 81 | /* CPU context save/restore macros. */ |
29 | 82 | ||
30 | .macro SAVE_ALL | 83 | .macro SAVE_ALL |
31 | mov.l er0,@-sp | 84 | mov.l er0,@-sp |
32 | |||
33 | stc ccr,r0l /* check kernel mode */ | 85 | stc ccr,r0l /* check kernel mode */ |
34 | btst #4,r0l | 86 | btst #4,r0l |
35 | bne 5f | 87 | bne 5f |
@@ -39,42 +91,38 @@ | |||
39 | mov.l @sp,er0 /* restore saved er0 */ | 91 | mov.l @sp,er0 /* restore saved er0 */ |
40 | orc #0x10,ccr /* switch kernel stack */ | 92 | orc #0x10,ccr /* switch kernel stack */ |
41 | mov.l @SYMBOL_NAME(sw_ksp),sp | 93 | mov.l @SYMBOL_NAME(sw_ksp),sp |
42 | sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */ | 94 | sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */ |
43 | stm.l er0-er3,@-sp | 95 | SAVEREGS |
44 | mov.l @SYMBOL_NAME(sw_usp),er0 | 96 | mov.l @SYMBOL_NAME(sw_usp),er0 |
45 | mov.l @(10:16,er0),er1 /* copy the RET addr */ | 97 | mov.l @(USERRET:16,er0),er1 /* copy the RET addr */ |
46 | mov.l er1,@(LRET-LER3:16,sp) | 98 | mov.l er1,@(LRET-LER3:16,sp) |
47 | mov.w @(8:16,er0),r1 | 99 | SAVEEXR |
48 | mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */ | ||
49 | 100 | ||
50 | mov.w e1,r1 /* e1 highbyte = ccr */ | ||
51 | and #0xef,r1h /* mask mode? flag */ | ||
52 | sub.w r0,r0 | ||
53 | mov.b r1h,r0l | ||
54 | mov.w r0,@(LCCR-LER3:16,sp) /* copy ccr */ | ||
55 | mov.l @(LORIG-LER3:16,sp),er0 | 101 | mov.l @(LORIG-LER3:16,sp),er0 |
56 | mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */ | 102 | mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */ |
103 | mov.w e1,r1 /* e1 highbyte = ccr */ | ||
104 | and #0xef,r1h /* mask mode? flag */ | ||
57 | bra 6f | 105 | bra 6f |
58 | 5: | 106 | 5: |
59 | /* kernel mode */ | 107 | /* kernel mode */ |
60 | mov.l @sp,er0 /* restore saved er0 */ | 108 | mov.l @sp,er0 /* restore saved er0 */ |
61 | subs #2,sp /* set dummy ccr */ | 109 | subs #2,sp /* set dummy ccr */ |
62 | stm.l er0-er3,@-sp | 110 | SAVEREGS |
63 | mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */ | 111 | mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */ |
112 | 6: | ||
64 | mov.b r1h,r1l | 113 | mov.b r1h,r1l |
65 | mov.b #0,r1h | 114 | mov.b #0,r1h |
66 | mov.w r1,@(LCCR-LER3:16,sp) | 115 | mov.w r1,@(LCCR-LER3:16,sp) /* set ccr */ |
67 | 6: | ||
68 | mov.l er6,@-sp /* syscall arg #6 */ | 116 | mov.l er6,@-sp /* syscall arg #6 */ |
69 | mov.l er5,@-sp /* syscall arg #5 */ | 117 | mov.l er5,@-sp /* syscall arg #5 */ |
70 | mov.l er4,@-sp /* syscall arg #4 */ | 118 | mov.l er4,@-sp /* syscall arg #4 */ |
71 | .endm | 119 | .endm /* r1 = ccr */ |
72 | 120 | ||
73 | .macro RESTORE_ALL | 121 | .macro RESTORE_ALL |
74 | mov.l @sp+,er4 | 122 | mov.l @sp+,er4 |
75 | mov.l @sp+,er5 | 123 | mov.l @sp+,er5 |
76 | mov.l @sp+,er6 | 124 | mov.l @sp+,er6 |
77 | ldm.l @sp+,er2-er3 | 125 | RESTOREREGS |
78 | mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */ | 126 | mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */ |
79 | btst #4,r0l | 127 | btst #4,r0l |
80 | bne 7f | 128 | bne 7f |
@@ -83,18 +131,16 @@ | |||
83 | mov.l @SYMBOL_NAME(sw_usp),er0 | 131 | mov.l @SYMBOL_NAME(sw_usp),er0 |
84 | mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */ | 132 | mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */ |
85 | mov.l er1,@er0 | 133 | mov.l er1,@er0 |
86 | mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */ | 134 | RESTOREEXR |
87 | mov.b r1l,r1h | ||
88 | mov.w r1,@(8:16,er0) | ||
89 | mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */ | 135 | mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */ |
90 | mov.b r1l,r1h | 136 | mov.b r1l,r1h |
91 | mov.b @(LRET+1-LER1:16,sp),r1l | 137 | mov.b @(LRET+1-LER1:16,sp),r1l |
92 | mov.w r1,e1 | 138 | mov.w r1,e1 |
93 | mov.w @(LRET+2-LER1:16,sp),r1 | 139 | mov.w @(LRET+2-LER1:16,sp),r1 |
94 | mov.l er1,@(10:16,er0) | 140 | mov.l er1,@(USERRET:16,er0) |
95 | 141 | ||
96 | mov.l @sp+,er1 | 142 | mov.l @sp+,er1 |
97 | add.l #(LRET-LER1),sp /* remove LORIG - LRET */ | 143 | add.l #(LRET-LER1),sp /* remove LORIG - LRET */ |
98 | mov.l sp,@SYMBOL_NAME(sw_ksp) | 144 | mov.l sp,@SYMBOL_NAME(sw_ksp) |
99 | andc #0xef,ccr /* switch to user mode */ | 145 | andc #0xef,ccr /* switch to user mode */ |
100 | mov.l er0,sp | 146 | mov.l er0,sp |
@@ -108,7 +154,7 @@ | |||
108 | adds #4,sp /* remove the sw created LVEC */ | 154 | adds #4,sp /* remove the sw created LVEC */ |
109 | rte | 155 | rte |
110 | .endm | 156 | .endm |
111 | 157 | ||
112 | .globl SYMBOL_NAME(system_call) | 158 | .globl SYMBOL_NAME(system_call) |
113 | .globl SYMBOL_NAME(ret_from_exception) | 159 | .globl SYMBOL_NAME(ret_from_exception) |
114 | .globl SYMBOL_NAME(ret_from_fork) | 160 | .globl SYMBOL_NAME(ret_from_fork) |
@@ -116,16 +162,25 @@ | |||
116 | .globl SYMBOL_NAME(interrupt_redirect_table) | 162 | .globl SYMBOL_NAME(interrupt_redirect_table) |
117 | .globl SYMBOL_NAME(sw_ksp),SYMBOL_NAME(sw_usp) | 163 | .globl SYMBOL_NAME(sw_ksp),SYMBOL_NAME(sw_usp) |
118 | .globl SYMBOL_NAME(resume) | 164 | .globl SYMBOL_NAME(resume) |
119 | .globl SYMBOL_NAME(trace_break) | ||
120 | .globl SYMBOL_NAME(interrupt_entry) | 165 | .globl SYMBOL_NAME(interrupt_entry) |
121 | 166 | .globl SYMBOL_NAME(trace_break) | |
122 | INTERRUPTS = 128 | 167 | |
123 | #if defined(CONFIG_ROMKERNEL) | 168 | #if defined(CONFIG_ROMKERNEL) |
124 | .section .int_redirect,"ax" | 169 | .section .int_redirect,"ax" |
125 | SYMBOL_NAME_LABEL(interrupt_redirect_table) | 170 | SYMBOL_NAME_LABEL(interrupt_redirect_table) |
171 | #if defined(CONFIG_CPU_H8300H) | ||
126 | .rept 7 | 172 | .rept 7 |
127 | .long 0 | 173 | .long 0 |
128 | .endr | 174 | .endr |
175 | #endif | ||
176 | #if defined(CONFIG_CPU_H8S) | ||
177 | .rept 5 | ||
178 | .long 0 | ||
179 | .endr | ||
180 | jmp @SYMBOL_NAME(trace_break) | ||
181 | .long 0 | ||
182 | #endif | ||
183 | |||
129 | jsr @SYMBOL_NAME(interrupt_entry) /* NMI */ | 184 | jsr @SYMBOL_NAME(interrupt_entry) /* NMI */ |
130 | jmp @SYMBOL_NAME(system_call) /* TRAPA #0 (System call) */ | 185 | jmp @SYMBOL_NAME(system_call) /* TRAPA #0 (System call) */ |
131 | .long 0 | 186 | .long 0 |
@@ -141,20 +196,20 @@ SYMBOL_NAME_LABEL(interrupt_redirect_table) | |||
141 | SYMBOL_NAME_LABEL(interrupt_redirect_table) | 196 | SYMBOL_NAME_LABEL(interrupt_redirect_table) |
142 | .space 4 | 197 | .space 4 |
143 | #endif | 198 | #endif |
144 | 199 | ||
145 | .section .text | 200 | .section .text |
146 | .align 2 | 201 | .align 2 |
147 | SYMBOL_NAME_LABEL(interrupt_entry) | 202 | SYMBOL_NAME_LABEL(interrupt_entry) |
148 | SAVE_ALL | 203 | SAVE_ALL |
149 | mov.w @(LCCR,sp),r0 | 204 | mov.l sp,er0 |
150 | btst #4,r0l | 205 | add.l #LVEC,er0 |
206 | btst #4,r1l | ||
151 | bne 1f | 207 | bne 1f |
208 | /* user LVEC */ | ||
152 | mov.l @SYMBOL_NAME(sw_usp),er0 | 209 | mov.l @SYMBOL_NAME(sw_usp),er0 |
153 | mov.l @(4:16,er0),er0 | 210 | adds #4,er0 |
154 | bra 2f | ||
155 | 1: | 211 | 1: |
156 | mov.l @(LVEC:16,sp),er0 | 212 | mov.l @er0,er0 /* LVEC address */ |
157 | 2: | ||
158 | #if defined(CONFIG_ROMKERNEL) | 213 | #if defined(CONFIG_ROMKERNEL) |
159 | sub.l #SYMBOL_NAME(interrupt_redirect_table),er0 | 214 | sub.l #SYMBOL_NAME(interrupt_redirect_table),er0 |
160 | #endif | 215 | #endif |
@@ -162,69 +217,62 @@ SYMBOL_NAME_LABEL(interrupt_entry) | |||
162 | mov.l @SYMBOL_NAME(interrupt_redirect_table),er1 | 217 | mov.l @SYMBOL_NAME(interrupt_redirect_table),er1 |
163 | sub.l er1,er0 | 218 | sub.l er1,er0 |
164 | #endif | 219 | #endif |
165 | shlr.l #2,er0 | 220 | SHLR2 er0 |
166 | dec.l #1,er0 | 221 | dec.l #1,er0 |
167 | mov.l sp,er1 | 222 | mov.l sp,er1 |
168 | subs #4,er1 /* adjust ret_pc */ | 223 | subs #4,er1 /* adjust ret_pc */ |
169 | jsr @SYMBOL_NAME(process_int) | 224 | jsr @SYMBOL_NAME(do_IRQ) |
170 | mov.l @SYMBOL_NAME(irq_stat)+CPUSTAT_SOFTIRQ_PENDING,er0 | 225 | jmp @SYMBOL_NAME(ret_from_interrupt) |
171 | beq 1f | ||
172 | jsr @SYMBOL_NAME(do_softirq) | ||
173 | 1: | ||
174 | jmp @SYMBOL_NAME(ret_from_exception) | ||
175 | 226 | ||
176 | SYMBOL_NAME_LABEL(system_call) | 227 | SYMBOL_NAME_LABEL(system_call) |
177 | subs #4,sp /* dummy LVEC */ | 228 | subs #4,sp /* dummy LVEC */ |
178 | SAVE_ALL | 229 | SAVE_ALL |
230 | andc #0x7f,ccr | ||
179 | mov.l er0,er4 | 231 | mov.l er0,er4 |
180 | mov.l #-ENOSYS,er0 | ||
181 | mov.l er0,@(LER0:16,sp) | ||
182 | 232 | ||
183 | /* save top of frame */ | 233 | /* save top of frame */ |
184 | mov.l sp,er0 | 234 | mov.l sp,er0 |
185 | jsr @SYMBOL_NAME(set_esp0) | 235 | jsr @SYMBOL_NAME(set_esp0) |
186 | cmp.l #NR_syscalls,er4 | ||
187 | bcc SYMBOL_NAME(ret_from_exception):16 | ||
188 | shll.l #2,er4 | ||
189 | mov.l #SYMBOL_NAME(sys_call_table),er0 | ||
190 | add.l er4,er0 | ||
191 | mov.l @er0,er0 | ||
192 | mov.l er0,er4 | ||
193 | beq SYMBOL_NAME(ret_from_exception):16 | ||
194 | mov.l sp,er2 | 236 | mov.l sp,er2 |
195 | and.w #0xe000,r2 | 237 | and.w #0xe000,r2 |
196 | mov.b @((TASK_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l | 238 | mov.b @((TI_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l |
197 | btst #(TIF_SYSCALL_TRACE & 7),r2l | 239 | btst #(TIF_SYSCALL_TRACE & 7),r2l |
240 | beq 1f | ||
241 | jsr @SYMBOL_NAME(do_syscall_trace) | ||
242 | 1: | ||
243 | cmp.l #NR_syscalls,er4 | ||
244 | bcc badsys | ||
245 | SHLL2 er4 | ||
246 | mov.l #SYMBOL_NAME(sys_call_table),er0 | ||
247 | add.l er4,er0 | ||
248 | mov.l @er0,er4 | ||
249 | beq SYMBOL_NAME(ret_from_exception):16 | ||
198 | mov.l @(LER1:16,sp),er0 | 250 | mov.l @(LER1:16,sp),er0 |
199 | mov.l @(LER2:16,sp),er1 | 251 | mov.l @(LER2:16,sp),er1 |
200 | mov.l @(LER3:16,sp),er2 | 252 | mov.l @(LER3:16,sp),er2 |
201 | andc #0x7f,ccr | ||
202 | jsr @er4 | 253 | jsr @er4 |
203 | mov.l er0,@(LER0:16,sp) /* save the return value */ | 254 | mov.l er0,@(LER0:16,sp) /* save the return value */ |
255 | mov.l sp,er2 | ||
256 | and.w #0xe000,r2 | ||
257 | mov.b @((TI_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l | ||
258 | btst #(TIF_SYSCALL_TRACE & 7),r2l | ||
259 | beq 2f | ||
260 | jsr @SYMBOL_NAME(do_syscall_trace) | ||
261 | 2: | ||
204 | #if defined(CONFIG_SYSCALL_PRINT) | 262 | #if defined(CONFIG_SYSCALL_PRINT) |
205 | jsr @SYMBOL_NAME(syscall_print) | 263 | jsr @SYMBOL_NAME(syscall_print) |
206 | #endif | 264 | #endif |
207 | bra SYMBOL_NAME(ret_from_exception):8 | 265 | orc #0x80,ccr |
208 | 1: | 266 | bra resume_userspace |
209 | jsr SYMBOL_NAME(syscall_trace) | ||
210 | mov.l @(LER1:16,sp),er0 | ||
211 | mov.l @(LER2:16,sp),er1 | ||
212 | mov.l @(LER3:16,sp),er2 | ||
213 | jsr @er4 | ||
214 | mov.l er0,@(LER0:16,sp) /* save the return value */ | ||
215 | jsr @SYMBOL_NAME(syscall_trace) | ||
216 | bra SYMBOL_NAME(ret_from_exception):8 | ||
217 | 267 | ||
218 | SYMBOL_NAME_LABEL(ret_from_fork) | 268 | badsys: |
219 | mov.l er2,er0 | 269 | mov.l #-ENOSYS,er0 |
220 | jsr @SYMBOL_NAME(schedule_tail) | 270 | mov.l er0,@(LER0:16,sp) |
221 | bra SYMBOL_NAME(ret_from_exception):8 | 271 | bra resume_userspace |
222 | 272 | ||
223 | SYMBOL_NAME_LABEL(reschedule) | 273 | #if !defined(CONFIG_PREEMPT) |
224 | /* save top of frame */ | 274 | #define resume_kernel restore_all |
225 | mov.l sp,er0 | 275 | #endif |
226 | jsr @SYMBOL_NAME(set_esp0) | ||
227 | jsr @SYMBOL_NAME(schedule) | ||
228 | 276 | ||
229 | SYMBOL_NAME_LABEL(ret_from_exception) | 277 | SYMBOL_NAME_LABEL(ret_from_exception) |
230 | #if defined(CONFIG_PREEMPT) | 278 | #if defined(CONFIG_PREEMPT) |
@@ -232,58 +280,68 @@ SYMBOL_NAME_LABEL(ret_from_exception) | |||
232 | #endif | 280 | #endif |
233 | SYMBOL_NAME_LABEL(ret_from_interrupt) | 281 | SYMBOL_NAME_LABEL(ret_from_interrupt) |
234 | mov.b @(LCCR+1:16,sp),r0l | 282 | mov.b @(LCCR+1:16,sp),r0l |
235 | btst #4,r0l /* check if returning to kernel */ | 283 | btst #4,r0l |
236 | bne done:8 /* if so, skip resched, signals */ | 284 | bne resume_kernel:8 /* return from kernel */ |
285 | resume_userspace: | ||
237 | andc #0x7f,ccr | 286 | andc #0x7f,ccr |
238 | mov.l sp,er4 | 287 | mov.l sp,er4 |
239 | and.w #0xe000,r4 | 288 | and.w #0xe000,r4 /* er4 <- current thread info */ |
240 | mov.l @(TI_FLAGS:16,er4),er1 | 289 | mov.l @(TI_FLAGS:16,er4),er1 |
241 | and.l #_TIF_WORK_MASK,er1 | 290 | and.l #_TIF_WORK_MASK,er1 |
242 | beq done:8 | 291 | beq restore_all:8 |
243 | 1: | 292 | work_pending: |
244 | mov.l @(TI_FLAGS:16,er4),er1 | ||
245 | btst #TIF_NEED_RESCHED,r1l | 293 | btst #TIF_NEED_RESCHED,r1l |
246 | bne SYMBOL_NAME(reschedule):16 | 294 | bne work_resched:8 |
295 | /* work notifysig */ | ||
247 | mov.l sp,er0 | 296 | mov.l sp,er0 |
248 | subs #4,er0 /* adjust retpc */ | 297 | subs #4,er0 /* er0: pt_regs */ |
249 | mov.l er2,er1 | 298 | jsr @SYMBOL_NAME(do_notify_resume) |
250 | jsr @SYMBOL_NAME(do_signal) | 299 | bra restore_all:8 |
300 | work_resched: | ||
301 | mov.l sp,er0 | ||
302 | jsr @SYMBOL_NAME(set_esp0) | ||
303 | jsr @SYMBOL_NAME(schedule) | ||
304 | bra resume_userspace:8 | ||
305 | restore_all: | ||
306 | RESTORE_ALL /* Does RTE */ | ||
307 | |||
251 | #if defined(CONFIG_PREEMPT) | 308 | #if defined(CONFIG_PREEMPT) |
252 | bra done:8 /* userspace thoru */ | 309 | resume_kernel: |
253 | 3: | 310 | mov.l @(TI_PRE_COUNT:16,er4),er0 |
254 | btst #4,r0l | 311 | bne restore_all:8 |
255 | beq done:8 /* userspace thoru */ | 312 | need_resched: |
256 | 4: | 313 | mov.l @(TI_FLAGS:16,er4),er0 |
257 | mov.l @(TI_PRE_COUNT:16,er4),er1 | 314 | btst #TIF_NEED_RESCHED,r0l |
258 | bne done:8 | 315 | beq restore_all:8 |
259 | mov.l @(TI_FLAGS:16,er4),er1 | 316 | mov.b @(LCCR+1:16,sp),r0l /* Interrupt Enabled? */ |
260 | btst #TIF_NEED_RESCHED,r1l | 317 | bmi restore_all:8 |
261 | beq done:8 | 318 | mov.l #PREEMPT_ACTIVE,er0 |
262 | mov.b r0l,r0l | 319 | mov.l er0,@(TI_PRE_COUNT:16,er4) |
263 | bpl done:8 /* interrupt off (exception path?) */ | ||
264 | mov.l #PREEMPT_ACTIVE,er1 | ||
265 | mov.l er1,@(TI_PRE_COUNT:16,er4) | ||
266 | andc #0x7f,ccr | 320 | andc #0x7f,ccr |
321 | mov.l sp,er0 | ||
322 | jsr @SYMBOL_NAME(set_esp0) | ||
267 | jsr @SYMBOL_NAME(schedule) | 323 | jsr @SYMBOL_NAME(schedule) |
268 | sub.l er1,er1 | ||
269 | mov.l er1,@(TI_PRE_COUNT:16,er4) | ||
270 | orc #0x80,ccr | 324 | orc #0x80,ccr |
271 | bra 4b:8 | 325 | bra need_resched:8 |
272 | #endif | 326 | #endif |
273 | done: | 327 | |
274 | RESTORE_ALL /* Does RTE */ | 328 | SYMBOL_NAME_LABEL(ret_from_fork) |
329 | mov.l er2,er0 | ||
330 | jsr @SYMBOL_NAME(schedule_tail) | ||
331 | jmp @SYMBOL_NAME(ret_from_exception) | ||
275 | 332 | ||
276 | SYMBOL_NAME_LABEL(resume) | 333 | SYMBOL_NAME_LABEL(resume) |
277 | /* | 334 | /* |
278 | * er0 = prev | 335 | * Beware - when entering resume, offset of tss is in d1, |
279 | * er1 = next | 336 | * prev (the current task) is in a0, next (the new task) |
280 | * return last in er2 | 337 | * is in a1 and d2.b is non-zero if the mm structure is |
338 | * shared between the tasks, so don't change these | ||
339 | * registers until their contents are no longer needed. | ||
281 | */ | 340 | */ |
282 | 341 | ||
283 | /* save sr */ | 342 | /* save sr */ |
284 | sub.w r3,r3 | 343 | sub.w r3,r3 |
285 | stc ccr,r3l | 344 | stc ccr,r3l |
286 | stc exr,r3h | ||
287 | mov.w r3,@(THREAD_CCR+2:16,er0) | 345 | mov.w r3,@(THREAD_CCR+2:16,er0) |
288 | 346 | ||
289 | /* disable interrupts */ | 347 | /* disable interrupts */ |
@@ -291,41 +349,45 @@ SYMBOL_NAME_LABEL(resume) | |||
291 | mov.l @SYMBOL_NAME(sw_usp),er3 | 349 | mov.l @SYMBOL_NAME(sw_usp),er3 |
292 | mov.l er3,@(THREAD_USP:16,er0) | 350 | mov.l er3,@(THREAD_USP:16,er0) |
293 | mov.l sp,@(THREAD_KSP:16,er0) | 351 | mov.l sp,@(THREAD_KSP:16,er0) |
294 | 352 | ||
295 | /* Skip address space switching if they are the same. */ | 353 | /* Skip address space switching if they are the same. */ |
296 | /* FIXME: what did we hack out of here, this does nothing! */ | 354 | /* FIXME: what did we hack out of here, this does nothing! */ |
297 | 355 | ||
298 | mov.l @(THREAD_USP:16,er1),er0 | 356 | mov.l @(THREAD_USP:16,er1),er0 |
299 | mov.l er0,@SYMBOL_NAME(sw_usp) | 357 | mov.l er0,@SYMBOL_NAME(sw_usp) |
300 | mov.l @(THREAD_KSP:16,er1),sp | 358 | mov.l @(THREAD_KSP:16,er1),sp |
301 | 359 | ||
302 | /* restore status register */ | 360 | /* restore status register */ |
303 | mov.w @(THREAD_CCR+2:16,er1),r3 | 361 | mov.w @(THREAD_CCR+2:16,er1),r3 |
304 | 362 | ||
305 | ldc r3l,ccr | 363 | ldc r3l,ccr |
306 | ldc r3h,exr | ||
307 | |||
308 | rts | 364 | rts |
309 | 365 | ||
310 | SYMBOL_NAME_LABEL(trace_break) | 366 | SYMBOL_NAME_LABEL(trace_break) |
311 | subs #4,sp /* dummy LVEC */ | 367 | subs #4,sp |
312 | SAVE_ALL | 368 | SAVE_ALL |
313 | sub.l er1,er1 | 369 | sub.l er1,er1 |
314 | dec.l #1,er1 | 370 | dec.l #1,er1 |
315 | mov.l er1,@(LORIG,sp) | 371 | mov.l er1,@(LORIG,sp) |
316 | mov.l sp,er0 | 372 | mov.l sp,er0 |
317 | jsr @SYMBOL_NAME(set_esp0) | 373 | jsr @SYMBOL_NAME(set_esp0) |
318 | mov.l @SYMBOL_NAME(sw_usp),er0 | 374 | mov.l @SYMBOL_NAME(sw_usp),er0 |
319 | mov.l @er0,er1 | 375 | mov.l @er0,er1 |
376 | mov.w @(-2:16,er1),r2 | ||
377 | cmp.w #0x5730,r2 | ||
378 | beq 1f | ||
320 | subs #2,er1 | 379 | subs #2,er1 |
321 | mov.l er1,@er0 | 380 | mov.l er1,@er0 |
381 | 1: | ||
322 | and.w #0xff,e1 | 382 | and.w #0xff,e1 |
323 | mov.l er1,er0 | 383 | mov.l er1,er0 |
324 | jsr @SYMBOL_NAME(trace_trap) | 384 | jsr @SYMBOL_NAME(trace_trap) |
325 | jmp @SYMBOL_NAME(ret_from_exception) | 385 | jmp @SYMBOL_NAME(ret_from_exception) |
326 | 386 | ||
327 | .section .bss | 387 | .section .bss |
328 | SYMBOL_NAME_LABEL(sw_ksp) | 388 | SYMBOL_NAME_LABEL(sw_ksp) |
329 | .space 4 | 389 | .space 4 |
330 | SYMBOL_NAME_LABEL(sw_usp) | 390 | SYMBOL_NAME_LABEL(sw_usp) |
331 | .space 4 | 391 | .space 4 |
392 | |||
393 | .end | ||
diff --git a/arch/h8300/kernel/ptrace.c b/arch/h8300/kernel/ptrace.c index 8f2411db7eaf..8a7a991b8f76 100644 --- a/arch/h8300/kernel/ptrace.c +++ b/arch/h8300/kernel/ptrace.c | |||
@@ -219,7 +219,7 @@ long arch_ptrace(struct task_struct *child, long request, long addr, long data) | |||
219 | return ret; | 219 | return ret; |
220 | } | 220 | } |
221 | 221 | ||
222 | asmlinkage void syscall_trace(void) | 222 | asmlinkage void do_syscall_trace(void) |
223 | { | 223 | { |
224 | if (!test_thread_flag(TIF_SYSCALL_TRACE)) | 224 | if (!test_thread_flag(TIF_SYSCALL_TRACE)) |
225 | return; | 225 | return; |
diff --git a/arch/h8300/kernel/signal.c b/arch/h8300/kernel/signal.c index 02955604d760..62ea12d339b9 100644 --- a/arch/h8300/kernel/signal.c +++ b/arch/h8300/kernel/signal.c | |||
@@ -547,3 +547,9 @@ asmlinkage int do_signal(struct pt_regs *regs, sigset_t *oldset) | |||
547 | } | 547 | } |
548 | return 0; | 548 | return 0; |
549 | } | 549 | } |
550 | |||
551 | asmlinkage void do_notify_resume(struct pt_regs *regs, u32 thread_info_flags) | ||
552 | { | ||
553 | if (thread_info_flags & (_TIF_SIGPENDING | _TIF_RESTORE_SIGMASK)) | ||
554 | do_signal(regs, NULL); | ||
555 | } | ||
diff --git a/arch/h8300/platform/h8300h/Makefile b/arch/h8300/platform/h8300h/Makefile index b24ea08aa0a7..c5096369ea50 100644 --- a/arch/h8300/platform/h8300h/Makefile +++ b/arch/h8300/platform/h8300h/Makefile | |||
@@ -4,4 +4,4 @@ | |||
4 | # Reuse any files we can from the H8/300H | 4 | # Reuse any files we can from the H8/300H |
5 | # | 5 | # |
6 | 6 | ||
7 | obj-y := entry.o irq_pin.o ptrace_h8300h.o | 7 | obj-y := irq_pin.o ptrace_h8300h.o |
diff --git a/arch/h8300/platform/h8300h/entry.S b/arch/h8300/platform/h8300h/entry.S deleted file mode 100644 index f86ac3b5d4de..000000000000 --- a/arch/h8300/platform/h8300h/entry.S +++ /dev/null | |||
@@ -1,332 +0,0 @@ | |||
1 | /* -*- mode: asm -*- | ||
2 | * | ||
3 | * linux/arch/h8300/platform/h8300h/entry.S | ||
4 | * | ||
5 | * Yoshinori Sato <ysato@users.sourceforge.jp> | ||
6 | * David McCullough <davidm@snapgear.com> | ||
7 | * | ||
8 | */ | ||
9 | |||
10 | /* | ||
11 | * entry.S | ||
12 | * include exception/interrupt gateway | ||
13 | * system call entry | ||
14 | */ | ||
15 | |||
16 | #include <linux/sys.h> | ||
17 | #include <asm/unistd.h> | ||
18 | #include <asm/setup.h> | ||
19 | #include <asm/segment.h> | ||
20 | #include <asm/linkage.h> | ||
21 | #include <asm/asm-offsets.h> | ||
22 | #include <asm/thread_info.h> | ||
23 | #include <asm/errno.h> | ||
24 | |||
25 | .h8300h | ||
26 | |||
27 | /* CPU context save/restore macros. */ | ||
28 | |||
29 | .macro SAVE_ALL | ||
30 | mov.l er0,@-sp | ||
31 | |||
32 | stc ccr,r0l /* check kernel mode */ | ||
33 | btst #4,r0l | ||
34 | bne 5f | ||
35 | |||
36 | mov.l sp,@SYMBOL_NAME(sw_usp) /* user mode */ | ||
37 | mov.l @sp,er0 | ||
38 | orc #0x10,ccr | ||
39 | mov.l @SYMBOL_NAME(sw_ksp),sp | ||
40 | sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */ | ||
41 | mov.l er0,@-sp | ||
42 | mov.l er1,@-sp | ||
43 | mov.l @SYMBOL_NAME(sw_usp),er0 | ||
44 | mov.l @(8:16,er0),er1 /* copy the RET addr */ | ||
45 | mov.l er1,@(LRET-LER1:16,sp) | ||
46 | |||
47 | mov.w e1,r1 /* e1 highbyte = ccr */ | ||
48 | and #0xef,r1h /* mask mode? flag */ | ||
49 | sub.w r0,r0 | ||
50 | mov.b r1h,r0l | ||
51 | mov.w r0,@(LCCR-LER1:16,sp) /* copy ccr */ | ||
52 | mov.l @(LORIG-LER1:16,sp),er0 | ||
53 | mov.l er0,@(LER0-LER1:16,sp) /* copy ER0 */ | ||
54 | bra 6f | ||
55 | 5: | ||
56 | mov.l @sp,er0 /* kernel mode */ | ||
57 | subs #2,sp /* dummy ccr */ | ||
58 | mov.l er0,@-sp | ||
59 | mov.l er1,@-sp | ||
60 | mov.w @(LRET-LER1:16,sp),r1 /* copy old ccr */ | ||
61 | mov.b r1h,r1l | ||
62 | mov.b #0,r1h | ||
63 | mov.w r1,@(LCCR-LER1:16,sp) /* set ccr */ | ||
64 | 6: | ||
65 | mov.l er2,@-sp | ||
66 | mov.l er3,@-sp | ||
67 | mov.l er6,@-sp /* syscall arg #6 */ | ||
68 | mov.l er5,@-sp /* syscall arg #5 */ | ||
69 | mov.l er4,@-sp /* syscall arg #4 */ | ||
70 | .endm | ||
71 | |||
72 | .macro RESTORE_ALL | ||
73 | mov.l @sp+,er4 | ||
74 | mov.l @sp+,er5 | ||
75 | mov.l @sp+,er6 | ||
76 | mov.l @sp+,er3 | ||
77 | mov.l @sp+,er2 | ||
78 | mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */ | ||
79 | btst #4,r0l | ||
80 | bne 7f | ||
81 | |||
82 | orc #0x80,ccr | ||
83 | mov.l @SYMBOL_NAME(sw_usp),er0 | ||
84 | mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */ | ||
85 | mov.l er1,@er0 | ||
86 | mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */ | ||
87 | mov.b r1l,r1h | ||
88 | mov.b @(LRET+1-LER1:16,sp),r1l | ||
89 | mov.w r1,e1 | ||
90 | mov.w @(LRET+2-LER1:16,sp),r1 | ||
91 | mov.l er1,@(8:16,er0) | ||
92 | |||
93 | mov.l @sp+,er1 | ||
94 | add.l #(LRET-LER1),sp /* remove LORIG - LRET */ | ||
95 | mov.l sp,@SYMBOL_NAME(sw_ksp) | ||
96 | mov.l er0,sp | ||
97 | bra 8f | ||
98 | 7: | ||
99 | mov.l @sp+,er1 | ||
100 | adds #4,sp | ||
101 | adds #2,sp | ||
102 | 8: | ||
103 | mov.l @sp+,er0 | ||
104 | adds #4,sp /* remove the sw created LVEC */ | ||
105 | rte | ||
106 | .endm | ||
107 | |||
108 | .globl SYMBOL_NAME(system_call) | ||
109 | .globl SYMBOL_NAME(ret_from_exception) | ||
110 | .globl SYMBOL_NAME(ret_from_fork) | ||
111 | .globl SYMBOL_NAME(ret_from_interrupt) | ||
112 | .globl SYMBOL_NAME(interrupt_redirect_table) | ||
113 | .globl SYMBOL_NAME(sw_ksp),SYMBOL_NAME(sw_usp) | ||
114 | .globl SYMBOL_NAME(resume) | ||
115 | .globl SYMBOL_NAME(interrupt_redirect_table) | ||
116 | .globl SYMBOL_NAME(interrupt_entry) | ||
117 | .globl SYMBOL_NAME(system_call) | ||
118 | .globl SYMBOL_NAME(trace_break) | ||
119 | |||
120 | #if defined(CONFIG_ROMKERNEL) | ||
121 | INTERRUPTS = 64 | ||
122 | .section .int_redirect,"ax" | ||
123 | SYMBOL_NAME_LABEL(interrupt_redirect_table) | ||
124 | .rept 7 | ||
125 | .long 0 | ||
126 | .endr | ||
127 | jsr @SYMBOL_NAME(interrupt_entry) /* NMI */ | ||
128 | jmp @SYMBOL_NAME(system_call) /* TRAPA #0 (System call) */ | ||
129 | .long 0 | ||
130 | .long 0 | ||
131 | jmp @SYMBOL_NAME(trace_break) /* TRAPA #3 (breakpoint) */ | ||
132 | .rept INTERRUPTS-12 | ||
133 | jsr @SYMBOL_NAME(interrupt_entry) | ||
134 | .endr | ||
135 | #endif | ||
136 | #if defined(CONFIG_RAMKERNEL) | ||
137 | .globl SYMBOL_NAME(interrupt_redirect_table) | ||
138 | .section .bss | ||
139 | SYMBOL_NAME_LABEL(interrupt_redirect_table) | ||
140 | .space 4 | ||
141 | #endif | ||
142 | |||
143 | .section .text | ||
144 | .align 2 | ||
145 | SYMBOL_NAME_LABEL(interrupt_entry) | ||
146 | SAVE_ALL | ||
147 | mov.w @(LCCR,sp),r0 | ||
148 | btst #4,r0l | ||
149 | bne 1f | ||
150 | mov.l @SYMBOL_NAME(sw_usp),er0 | ||
151 | mov.l @(4:16,er0),er0 | ||
152 | bra 2f | ||
153 | 1: | ||
154 | mov.l @(LVEC,sp),er0 | ||
155 | 2: | ||
156 | #if defined(CONFIG_ROMKERNEL) | ||
157 | sub.l #SYMBOL_NAME(interrupt_redirect_table),er0 | ||
158 | #endif | ||
159 | #if defined(CONFIG_RAMKERNEL) | ||
160 | mov.l @SYMBOL_NAME(interrupt_redirect_table),er1 | ||
161 | sub.l er1,er0 | ||
162 | #endif | ||
163 | shlr.l er0 | ||
164 | shlr.l er0 | ||
165 | dec.l #1,er0 | ||
166 | mov.l sp,er1 | ||
167 | subs #4,er1 /* adjust ret_pc */ | ||
168 | jsr @SYMBOL_NAME(do_IRQ) | ||
169 | mov.l @SYMBOL_NAME(irq_stat)+CPUSTAT_SOFTIRQ_PENDING,er0 | ||
170 | beq 1f | ||
171 | jsr @SYMBOL_NAME(do_softirq) | ||
172 | 1: | ||
173 | jmp @SYMBOL_NAME(ret_from_interrupt) | ||
174 | |||
175 | SYMBOL_NAME_LABEL(system_call) | ||
176 | subs #4,sp /* dummy LVEC */ | ||
177 | SAVE_ALL | ||
178 | mov.w @(LCCR:16,sp),r1 | ||
179 | bset #4,r1l | ||
180 | ldc r1l,ccr | ||
181 | mov.l er0,er4 | ||
182 | mov.l #-ENOSYS,er0 | ||
183 | mov.l er0,@(LER0:16,sp) | ||
184 | |||
185 | /* save top of frame */ | ||
186 | mov.l sp,er0 | ||
187 | jsr @SYMBOL_NAME(set_esp0) | ||
188 | cmp.l #NR_syscalls,er4 | ||
189 | bcc SYMBOL_NAME(ret_from_exception):16 | ||
190 | shll.l er4 | ||
191 | shll.l er4 | ||
192 | mov.l #SYMBOL_NAME(sys_call_table),er0 | ||
193 | add.l er4,er0 | ||
194 | mov.l @er0,er4 | ||
195 | beq SYMBOL_NAME(ret_from_exception):16 | ||
196 | mov.l sp,er2 | ||
197 | and.w #0xe000,r2 | ||
198 | mov.b @((TASK_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l | ||
199 | btst #(TIF_SYSCALL_TRACE & 7),r2l | ||
200 | bne 1f | ||
201 | mov.l @(LER1:16,sp),er0 | ||
202 | mov.l @(LER2:16,sp),er1 | ||
203 | mov.l @(LER3:16,sp),er2 | ||
204 | jsr @er4 | ||
205 | mov.l er0,@(LER0:16,sp) /* save the return value */ | ||
206 | #if defined(CONFIG_SYSCALL_PRINT) | ||
207 | jsr @SYMBOL_NAME(syscall_print) | ||
208 | #endif | ||
209 | bra SYMBOL_NAME(ret_from_exception):8 | ||
210 | 1: | ||
211 | jsr SYMBOL_NAME(syscall_trace) | ||
212 | mov.l @(LER1:16,sp),er0 | ||
213 | mov.l @(LER2:16,sp),er1 | ||
214 | mov.l @(LER3:16,sp),er2 | ||
215 | jsr @er4 | ||
216 | mov.l er0,@(LER0:16,sp) /* save the return value */ | ||
217 | jsr @SYMBOL_NAME(syscall_trace) | ||
218 | bra SYMBOL_NAME(ret_from_exception):8 | ||
219 | |||
220 | SYMBOL_NAME_LABEL(ret_from_fork) | ||
221 | mov.l er2,er0 | ||
222 | jsr @SYMBOL_NAME(schedule_tail) | ||
223 | bra SYMBOL_NAME(ret_from_exception):8 | ||
224 | |||
225 | SYMBOL_NAME_LABEL(reschedule) | ||
226 | /* save top of frame */ | ||
227 | mov.l sp,er0 | ||
228 | jsr @SYMBOL_NAME(set_esp0) | ||
229 | jsr @SYMBOL_NAME(schedule) | ||
230 | |||
231 | SYMBOL_NAME_LABEL(ret_from_exception) | ||
232 | #if defined(CONFIG_PREEMPT) | ||
233 | orc #0x80,ccr | ||
234 | #endif | ||
235 | SYMBOL_NAME_LABEL(ret_from_interrupt) | ||
236 | mov.b @(LCCR+1:16,sp),r0l | ||
237 | btst #4,r0l /* check if returning to kernel */ | ||
238 | bne done:8 /* if so, skip resched, signals */ | ||
239 | andc #0x7f,ccr | ||
240 | mov.l sp,er4 | ||
241 | and.w #0xe000,r4 | ||
242 | mov.l @(TI_FLAGS:16,er4),er1 | ||
243 | and.l #_TIF_WORK_MASK,er1 | ||
244 | beq done:8 | ||
245 | 1: | ||
246 | mov.l @(TI_FLAGS:16,er4),er1 | ||
247 | btst #TIF_NEED_RESCHED,r1l | ||
248 | bne SYMBOL_NAME(reschedule):16 | ||
249 | mov.l sp,er0 | ||
250 | subs #4,er0 /* adjust retpc */ | ||
251 | mov.l er2,er1 | ||
252 | jsr @SYMBOL_NAME(do_signal) | ||
253 | #if defined(CONFIG_PREEMPT) | ||
254 | bra done:8 /* userspace thoru */ | ||
255 | 3: | ||
256 | btst #4,r0l | ||
257 | beq done:8 /* userspace thoru */ | ||
258 | 4: | ||
259 | mov.l @(TI_PRE_COUNT:16,er4),er1 | ||
260 | bne done:8 | ||
261 | mov.l @(TI_FLAGS:16,er4),er1 | ||
262 | btst #TIF_NEED_RESCHED,r1l | ||
263 | beq done:8 | ||
264 | mov.b r0l,r0l | ||
265 | bpl done:8 /* interrupt off (exception path?) */ | ||
266 | mov.l #PREEMPT_ACTIVE,er1 | ||
267 | mov.l er1,@(TI_PRE_COUNT:16,er4) | ||
268 | andc #0x7f,ccr | ||
269 | jsr @SYMBOL_NAME(schedule) | ||
270 | sub.l er1,er1 | ||
271 | mov.l er1,@(TI_PRE_COUNT:16,er4) | ||
272 | orc #0x80,ccr | ||
273 | bra 4b:8 | ||
274 | #endif | ||
275 | done: | ||
276 | RESTORE_ALL /* Does RTE */ | ||
277 | |||
278 | SYMBOL_NAME_LABEL(resume) | ||
279 | /* | ||
280 | * Beware - when entering resume, offset of tss is in d1, | ||
281 | * prev (the current task) is in a0, next (the new task) | ||
282 | * is in a1 and d2.b is non-zero if the mm structure is | ||
283 | * shared between the tasks, so don't change these | ||
284 | * registers until their contents are no longer needed. | ||
285 | */ | ||
286 | |||
287 | /* save sr */ | ||
288 | sub.w r3,r3 | ||
289 | stc ccr,r3l | ||
290 | mov.w r3,@(THREAD_CCR+2:16,er0) | ||
291 | |||
292 | /* disable interrupts */ | ||
293 | orc #0x80,ccr | ||
294 | mov.l @SYMBOL_NAME(sw_usp),er3 | ||
295 | mov.l er3,@(THREAD_USP:16,er0) | ||
296 | mov.l sp,@(THREAD_KSP:16,er0) | ||
297 | |||
298 | /* Skip address space switching if they are the same. */ | ||
299 | /* FIXME: what did we hack out of here, this does nothing! */ | ||
300 | |||
301 | mov.l @(THREAD_USP:16,er1),er0 | ||
302 | mov.l er0,@SYMBOL_NAME(sw_usp) | ||
303 | mov.l @(THREAD_KSP:16,er1),sp | ||
304 | |||
305 | /* restore status register */ | ||
306 | mov.w @(THREAD_CCR+2:16,er1),r3 | ||
307 | |||
308 | ldc r3l,ccr | ||
309 | rts | ||
310 | |||
311 | SYMBOL_NAME_LABEL(trace_break) | ||
312 | subs #4,sp | ||
313 | SAVE_ALL | ||
314 | sub.l er1,er1 | ||
315 | dec.l #1,er1 | ||
316 | mov.l er1,@(LORIG,sp) | ||
317 | mov.l sp,er0 | ||
318 | jsr @SYMBOL_NAME(set_esp0) | ||
319 | mov.l @SYMBOL_NAME(sw_usp),er0 | ||
320 | mov.l @er0,er1 | ||
321 | subs #2,er1 | ||
322 | mov.l er1,@er0 | ||
323 | and.w #0xff,e1 | ||
324 | mov.l er1,er0 | ||
325 | jsr @SYMBOL_NAME(trace_trap) | ||
326 | jmp @SYMBOL_NAME(ret_from_exception) | ||
327 | |||
328 | .section .bss | ||
329 | SYMBOL_NAME_LABEL(sw_ksp) | ||
330 | .space 4 | ||
331 | SYMBOL_NAME_LABEL(sw_usp) | ||
332 | .space 4 | ||
diff --git a/arch/h8300/platform/h8s/Makefile b/arch/h8300/platform/h8s/Makefile index 0847b15d4256..bf1241883766 100644 --- a/arch/h8300/platform/h8s/Makefile +++ b/arch/h8300/platform/h8s/Makefile | |||
@@ -4,4 +4,4 @@ | |||
4 | # Reuse any files we can from the H8S | 4 | # Reuse any files we can from the H8S |
5 | # | 5 | # |
6 | 6 | ||
7 | obj-y := entry.o ints_h8s.o ptrace_h8s.o | 7 | obj-y := ints_h8s.o ptrace_h8s.o |
diff --git a/include/asm-h8300/thread_info.h b/include/asm-h8300/thread_info.h index 45f09dc9caff..aee4009a498e 100644 --- a/include/asm-h8300/thread_info.h +++ b/include/asm-h8300/thread_info.h | |||
@@ -92,6 +92,7 @@ static inline struct thread_info *current_thread_info(void) | |||
92 | #define TIF_POLLING_NRFLAG 4 /* true if poll_idle() is polling | 92 | #define TIF_POLLING_NRFLAG 4 /* true if poll_idle() is polling |
93 | TIF_NEED_RESCHED */ | 93 | TIF_NEED_RESCHED */ |
94 | #define TIF_MEMDIE 5 | 94 | #define TIF_MEMDIE 5 |
95 | #define TIF_RESTORE_SIGMASK 6 /* restore signal mask in do_signal() */ | ||
95 | 96 | ||
96 | /* as above, but as bit values */ | 97 | /* as above, but as bit values */ |
97 | #define _TIF_SYSCALL_TRACE (1<<TIF_SYSCALL_TRACE) | 98 | #define _TIF_SYSCALL_TRACE (1<<TIF_SYSCALL_TRACE) |
@@ -99,6 +100,7 @@ static inline struct thread_info *current_thread_info(void) | |||
99 | #define _TIF_SIGPENDING (1<<TIF_SIGPENDING) | 100 | #define _TIF_SIGPENDING (1<<TIF_SIGPENDING) |
100 | #define _TIF_NEED_RESCHED (1<<TIF_NEED_RESCHED) | 101 | #define _TIF_NEED_RESCHED (1<<TIF_NEED_RESCHED) |
101 | #define _TIF_POLLING_NRFLAG (1<<TIF_POLLING_NRFLAG) | 102 | #define _TIF_POLLING_NRFLAG (1<<TIF_POLLING_NRFLAG) |
103 | #define _TIF_RESTORE_SIGMASK (1<<TIF_RESTORE_SIGMASK) | ||
102 | 104 | ||
103 | #define _TIF_WORK_MASK 0x0000FFFE /* work to do on interrupt/exception return */ | 105 | #define _TIF_WORK_MASK 0x0000FFFE /* work to do on interrupt/exception return */ |
104 | 106 | ||