aboutsummaryrefslogtreecommitdiffstats
path: root/arch/h8300/kernel/entry.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/h8300/kernel/entry.S')
-rw-r--r--arch/h8300/kernel/entry.S402
1 files changed, 0 insertions, 402 deletions
diff --git a/arch/h8300/kernel/entry.S b/arch/h8300/kernel/entry.S
deleted file mode 100644
index 94bd30f11df6..000000000000
--- a/arch/h8300/kernel/entry.S
+++ /dev/null
@@ -1,402 +0,0 @@
1/* -*- mode: asm -*-
2 *
3 * linux/arch/h8300/platform/h8300h/entry.S
4 *
5 * Yoshinori Sato <ysato@users.sourceforge.jp>
6 * David McCullough <davidm@snapgear.com>
7 *
8 */
9
10/*
11 * entry.S
12 * include exception/interrupt gateway
13 * system call entry
14 */
15
16#include <linux/sys.h>
17#include <asm/unistd.h>
18#include <asm/setup.h>
19#include <asm/segment.h>
20#include <asm/linkage.h>
21#include <asm/asm-offsets.h>
22#include <asm/thread_info.h>
23#include <asm/errno.h>
24
25#if defined(CONFIG_CPU_H8300H)
26#define USERRET 8
27INTERRUPTS = 64
28 .h8300h
29 .macro SHLL2 reg
30 shll.l \reg
31 shll.l \reg
32 .endm
33 .macro SHLR2 reg
34 shlr.l \reg
35 shlr.l \reg
36 .endm
37 .macro SAVEREGS
38 mov.l er0,@-sp
39 mov.l er1,@-sp
40 mov.l er2,@-sp
41 mov.l er3,@-sp
42 .endm
43 .macro RESTOREREGS
44 mov.l @sp+,er3
45 mov.l @sp+,er2
46 .endm
47 .macro SAVEEXR
48 .endm
49 .macro RESTOREEXR
50 .endm
51#endif
52#if defined(CONFIG_CPU_H8S)
53#define USERRET 10
54#define USEREXR 8
55INTERRUPTS = 128
56 .h8300s
57 .macro SHLL2 reg
58 shll.l #2,\reg
59 .endm
60 .macro SHLR2 reg
61 shlr.l #2,\reg
62 .endm
63 .macro SAVEREGS
64 stm.l er0-er3,@-sp
65 .endm
66 .macro RESTOREREGS
67 ldm.l @sp+,er2-er3
68 .endm
69 .macro SAVEEXR
70 mov.w @(USEREXR:16,er0),r1
71 mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */
72 .endm
73 .macro RESTOREEXR
74 mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */
75 mov.b r1l,r1h
76 mov.w r1,@(USEREXR:16,er0)
77 .endm
78#endif
79
80
81/* CPU context save/restore macros. */
82
83 .macro SAVE_ALL
84 mov.l er0,@-sp
85 stc ccr,r0l /* check kernel mode */
86 btst #4,r0l
87 bne 5f
88
89 /* user mode */
90 mov.l sp,@_sw_usp
91 mov.l @sp,er0 /* restore saved er0 */
92 orc #0x10,ccr /* switch kernel stack */
93 mov.l @_sw_ksp,sp
94 sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */
95 SAVEREGS
96 mov.l @_sw_usp,er0
97 mov.l @(USERRET:16,er0),er1 /* copy the RET addr */
98 mov.l er1,@(LRET-LER3:16,sp)
99 SAVEEXR
100
101 mov.l @(LORIG-LER3:16,sp),er0
102 mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */
103 mov.w e1,r1 /* e1 highbyte = ccr */
104 and #0xef,r1h /* mask mode? flag */
105 bra 6f
1065:
107 /* kernel mode */
108 mov.l @sp,er0 /* restore saved er0 */
109 subs #2,sp /* set dummy ccr */
110 SAVEREGS
111 mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */
1126:
113 mov.b r1h,r1l
114 mov.b #0,r1h
115 mov.w r1,@(LCCR-LER3:16,sp) /* set ccr */
116 mov.l er6,@-sp /* syscall arg #6 */
117 mov.l er5,@-sp /* syscall arg #5 */
118 mov.l er4,@-sp /* syscall arg #4 */
119 .endm /* r1 = ccr */
120
121 .macro RESTORE_ALL
122 mov.l @sp+,er4
123 mov.l @sp+,er5
124 mov.l @sp+,er6
125 RESTOREREGS
126 mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */
127 btst #4,r0l
128 bne 7f
129
130 orc #0x80,ccr
131 mov.l @_sw_usp,er0
132 mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */
133 mov.l er1,@er0
134 RESTOREEXR
135 mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */
136 mov.b r1l,r1h
137 mov.b @(LRET+1-LER1:16,sp),r1l
138 mov.w r1,e1
139 mov.w @(LRET+2-LER1:16,sp),r1
140 mov.l er1,@(USERRET:16,er0)
141
142 mov.l @sp+,er1
143 add.l #(LRET-LER1),sp /* remove LORIG - LRET */
144 mov.l sp,@_sw_ksp
145 andc #0xef,ccr /* switch to user mode */
146 mov.l er0,sp
147 bra 8f
1487:
149 mov.l @sp+,er1
150 adds #4,sp
151 adds #2,sp
1528:
153 mov.l @sp+,er0
154 adds #4,sp /* remove the sw created LVEC */
155 rte
156 .endm
157
158.globl _system_call
159.globl _ret_from_exception
160.globl _ret_from_fork
161.globl _ret_from_kernel_thread
162.globl _ret_from_interrupt
163.globl _interrupt_redirect_table
164.globl _sw_ksp,_sw_usp
165.globl _resume
166.globl _interrupt_entry
167.globl _trace_break
168
169#if defined(CONFIG_ROMKERNEL)
170 .section .int_redirect,"ax"
171_interrupt_redirect_table:
172#if defined(CONFIG_CPU_H8300H)
173 .rept 7
174 .long 0
175 .endr
176#endif
177#if defined(CONFIG_CPU_H8S)
178 .rept 5
179 .long 0
180 .endr
181 jmp @_trace_break
182 .long 0
183#endif
184
185 jsr @_interrupt_entry /* NMI */
186 jmp @_system_call /* TRAPA #0 (System call) */
187 .long 0
188 .long 0
189 jmp @_trace_break /* TRAPA #3 (breakpoint) */
190 .rept INTERRUPTS-12
191 jsr @_interrupt_entry
192 .endr
193#endif
194#if defined(CONFIG_RAMKERNEL)
195.globl _interrupt_redirect_table
196 .section .bss
197_interrupt_redirect_table:
198 .space 4
199#endif
200
201 .section .text
202 .align 2
203_interrupt_entry:
204 SAVE_ALL
205 mov.l sp,er0
206 add.l #LVEC,er0
207 btst #4,r1l
208 bne 1f
209 /* user LVEC */
210 mov.l @_sw_usp,er0
211 adds #4,er0
2121:
213 mov.l @er0,er0 /* LVEC address */
214#if defined(CONFIG_ROMKERNEL)
215 sub.l #_interrupt_redirect_table,er0
216#endif
217#if defined(CONFIG_RAMKERNEL)
218 mov.l @_interrupt_redirect_table,er1
219 sub.l er1,er0
220#endif
221 SHLR2 er0
222 dec.l #1,er0
223 mov.l sp,er1
224 subs #4,er1 /* adjust ret_pc */
225 jsr @_do_IRQ
226 jmp @_ret_from_interrupt
227
228_system_call:
229 subs #4,sp /* dummy LVEC */
230 SAVE_ALL
231 andc #0x7f,ccr
232 mov.l er0,er4
233
234 /* save top of frame */
235 mov.l sp,er0
236 jsr @_set_esp0
237 mov.l sp,er2
238 and.w #0xe000,r2
239 mov.b @((TI_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l
240 btst #(TIF_SYSCALL_TRACE & 7),r2l
241 beq 1f
242 jsr @_do_syscall_trace
2431:
244 cmp.l #NR_syscalls,er4
245 bcc badsys
246 SHLL2 er4
247 mov.l #_sys_call_table,er0
248 add.l er4,er0
249 mov.l @er0,er4
250 beq _ret_from_exception:16
251 mov.l @(LER1:16,sp),er0
252 mov.l @(LER2:16,sp),er1
253 mov.l @(LER3:16,sp),er2
254 jsr @er4
255 mov.l er0,@(LER0:16,sp) /* save the return value */
256 mov.l sp,er2
257 and.w #0xe000,r2
258 mov.b @((TI_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l
259 btst #(TIF_SYSCALL_TRACE & 7),r2l
260 beq 2f
261 jsr @_do_syscall_trace
2622:
263#if defined(CONFIG_SYSCALL_PRINT)
264 jsr @_syscall_print
265#endif
266 orc #0x80,ccr
267 bra resume_userspace
268
269badsys:
270 mov.l #-ENOSYS,er0
271 mov.l er0,@(LER0:16,sp)
272 bra resume_userspace
273
274#if !defined(CONFIG_PREEMPT)
275#define resume_kernel restore_all
276#endif
277
278_ret_from_exception:
279#if defined(CONFIG_PREEMPT)
280 orc #0x80,ccr
281#endif
282_ret_from_interrupt:
283 mov.b @(LCCR+1:16,sp),r0l
284 btst #4,r0l
285 bne resume_kernel:8 /* return from kernel */
286resume_userspace:
287 andc #0x7f,ccr
288 mov.l sp,er4
289 and.w #0xe000,r4 /* er4 <- current thread info */
290 mov.l @(TI_FLAGS:16,er4),er1
291 and.l #_TIF_WORK_MASK,er1
292 beq restore_all:8
293work_pending:
294 btst #TIF_NEED_RESCHED,r1l
295 bne work_resched:8
296 /* work notifysig */
297 mov.l sp,er0
298 subs #4,er0 /* er0: pt_regs */
299 jsr @_do_notify_resume
300 bra restore_all:8
301work_resched:
302 mov.l sp,er0
303 jsr @_set_esp0
304 jsr @_schedule
305 bra resume_userspace:8
306restore_all:
307 RESTORE_ALL /* Does RTE */
308
309#if defined(CONFIG_PREEMPT)
310resume_kernel:
311 mov.l @(TI_PRE_COUNT:16,er4),er0
312 bne restore_all:8
313need_resched:
314 mov.l @(TI_FLAGS:16,er4),er0
315 btst #TIF_NEED_RESCHED,r0l
316 beq restore_all:8
317 mov.b @(LCCR+1:16,sp),r0l /* Interrupt Enabled? */
318 bmi restore_all:8
319 mov.l #PREEMPT_ACTIVE,er0
320 mov.l er0,@(TI_PRE_COUNT:16,er4)
321 andc #0x7f,ccr
322 mov.l sp,er0
323 jsr @_set_esp0
324 jsr @_schedule
325 orc #0x80,ccr
326 bra need_resched:8
327#endif
328
329_ret_from_fork:
330 mov.l er2,er0
331 jsr @_schedule_tail
332 jmp @_ret_from_exception
333
334_ret_from_kernel_thread:
335 mov.l er2,er0
336 jsr @_schedule_tail
337 mov.l @(LER4:16,sp),er0
338 mov.l @(LER5:16,sp),er1
339 jsr @er1
340 jmp @_ret_from_exception
341
342_resume:
343 /*
344 * Beware - when entering resume, offset of tss is in d1,
345 * prev (the current task) is in a0, next (the new task)
346 * is in a1 and d2.b is non-zero if the mm structure is
347 * shared between the tasks, so don't change these
348 * registers until their contents are no longer needed.
349 */
350
351 /* save sr */
352 sub.w r3,r3
353 stc ccr,r3l
354 mov.w r3,@(THREAD_CCR+2:16,er0)
355
356 /* disable interrupts */
357 orc #0x80,ccr
358 mov.l @_sw_usp,er3
359 mov.l er3,@(THREAD_USP:16,er0)
360 mov.l sp,@(THREAD_KSP:16,er0)
361
362 /* Skip address space switching if they are the same. */
363 /* FIXME: what did we hack out of here, this does nothing! */
364
365 mov.l @(THREAD_USP:16,er1),er0
366 mov.l er0,@_sw_usp
367 mov.l @(THREAD_KSP:16,er1),sp
368
369 /* restore status register */
370 mov.w @(THREAD_CCR+2:16,er1),r3
371
372 ldc r3l,ccr
373 rts
374
375_trace_break:
376 subs #4,sp
377 SAVE_ALL
378 sub.l er1,er1
379 dec.l #1,er1
380 mov.l er1,@(LORIG,sp)
381 mov.l sp,er0
382 jsr @_set_esp0
383 mov.l @_sw_usp,er0
384 mov.l @er0,er1
385 mov.w @(-2:16,er1),r2
386 cmp.w #0x5730,r2
387 beq 1f
388 subs #2,er1
389 mov.l er1,@er0
3901:
391 and.w #0xff,e1
392 mov.l er1,er0
393 jsr @_trace_trap
394 jmp @_ret_from_exception
395
396 .section .bss
397_sw_ksp:
398 .space 4
399_sw_usp:
400 .space 4
401
402 .end