diff options
author | Yoshinori Sato <ysato@users.sourceforge.jp> | 2015-05-10 13:32:13 -0400 |
---|---|---|
committer | Yoshinori Sato <ysato@users.sourceforge.jp> | 2015-06-23 00:35:51 -0400 |
commit | df2078b8daa7bb15cc8e3f791389b9f44e12bb25 (patch) | |
tree | 773e538c2f7e3470ae7e3853854c7bede57e5a27 /arch/h8300 | |
parent | d8b0bdb444ed0e2bd5f9d3ea7412cb90bdec58cf (diff) |
h8300: Low level entry
Signed-off-by: Yoshinori Sato <ysato@users.sourceforge.jp>
Diffstat (limited to 'arch/h8300')
-rw-r--r-- | arch/h8300/kernel/entry.S | 414 | ||||
-rw-r--r-- | arch/h8300/kernel/head_ram.S | 60 | ||||
-rw-r--r-- | arch/h8300/kernel/head_rom.S | 110 |
3 files changed, 584 insertions, 0 deletions
diff --git a/arch/h8300/kernel/entry.S b/arch/h8300/kernel/entry.S new file mode 100644 index 000000000000..797dfa8ddeb2 --- /dev/null +++ b/arch/h8300/kernel/entry.S | |||
@@ -0,0 +1,414 @@ | |||
1 | /* | ||
2 | * | ||
3 | * linux/arch/h8300/kernel/entry.S | ||
4 | * | ||
5 | * Yoshinori Sato <ysato@users.sourceforge.jp> | ||
6 | * David McCullough <davidm@snapgear.com> | ||
7 | * | ||
8 | */ | ||
9 | |||
10 | /* | ||
11 | * entry.S | ||
12 | * include exception/interrupt gateway | ||
13 | * system call entry | ||
14 | */ | ||
15 | |||
16 | #include <linux/sys.h> | ||
17 | #include <asm/unistd.h> | ||
18 | #include <asm/setup.h> | ||
19 | #include <asm/segment.h> | ||
20 | #include <asm/linkage.h> | ||
21 | #include <asm/asm-offsets.h> | ||
22 | #include <asm/thread_info.h> | ||
23 | #include <asm/errno.h> | ||
24 | |||
25 | #if defined(CONFIG_CPU_H8300H) | ||
26 | #define USERRET 8 | ||
27 | INTERRUPTS = 64 | ||
28 | .h8300h | ||
29 | .macro SHLL2 reg | ||
30 | shll.l \reg | ||
31 | shll.l \reg | ||
32 | .endm | ||
33 | .macro SHLR2 reg | ||
34 | shlr.l \reg | ||
35 | shlr.l \reg | ||
36 | .endm | ||
37 | .macro SAVEREGS | ||
38 | mov.l er0,@-sp | ||
39 | mov.l er1,@-sp | ||
40 | mov.l er2,@-sp | ||
41 | mov.l er3,@-sp | ||
42 | .endm | ||
43 | .macro RESTOREREGS | ||
44 | mov.l @sp+,er3 | ||
45 | mov.l @sp+,er2 | ||
46 | .endm | ||
47 | .macro SAVEEXR | ||
48 | .endm | ||
49 | .macro RESTOREEXR | ||
50 | .endm | ||
51 | #endif | ||
52 | #if defined(CONFIG_CPU_H8S) | ||
53 | #define USERRET 10 | ||
54 | #define USEREXR 8 | ||
55 | INTERRUPTS = 128 | ||
56 | .h8300s | ||
57 | .macro SHLL2 reg | ||
58 | shll.l #2,\reg | ||
59 | .endm | ||
60 | .macro SHLR2 reg | ||
61 | shlr.l #2,\reg | ||
62 | .endm | ||
63 | .macro SAVEREGS | ||
64 | stm.l er0-er3,@-sp | ||
65 | .endm | ||
66 | .macro RESTOREREGS | ||
67 | ldm.l @sp+,er2-er3 | ||
68 | .endm | ||
69 | .macro SAVEEXR | ||
70 | mov.w @(USEREXR:16,er0),r1 | ||
71 | mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */ | ||
72 | .endm | ||
73 | .macro RESTOREEXR | ||
74 | mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */ | ||
75 | mov.b r1l,r1h | ||
76 | mov.w r1,@(USEREXR:16,er0) | ||
77 | .endm | ||
78 | #endif | ||
79 | |||
80 | |||
81 | /* CPU context save/restore macros. */ | ||
82 | |||
83 | .macro SAVE_ALL | ||
84 | mov.l er0,@-sp | ||
85 | stc ccr,r0l /* check kernel mode */ | ||
86 | btst #4,r0l | ||
87 | bne 5f | ||
88 | |||
89 | /* user mode */ | ||
90 | mov.l sp,@_sw_usp | ||
91 | mov.l @sp,er0 /* restore saved er0 */ | ||
92 | orc #0x10,ccr /* switch kernel stack */ | ||
93 | mov.l @_sw_ksp,sp | ||
94 | sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */ | ||
95 | SAVEREGS | ||
96 | mov.l @_sw_usp,er0 | ||
97 | mov.l @(USERRET:16,er0),er1 /* copy the RET addr */ | ||
98 | mov.l er1,@(LRET-LER3:16,sp) | ||
99 | SAVEEXR | ||
100 | |||
101 | mov.l @(LORIG-LER3:16,sp),er0 | ||
102 | mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */ | ||
103 | mov.w e1,r1 /* e1 highbyte = ccr */ | ||
104 | and #0xef,r1h /* mask mode? flag */ | ||
105 | bra 6f | ||
106 | 5: | ||
107 | /* kernel mode */ | ||
108 | mov.l @sp,er0 /* restore saved er0 */ | ||
109 | subs #2,sp /* set dummy ccr */ | ||
110 | subs #4,sp /* set dummp sp */ | ||
111 | SAVEREGS | ||
112 | mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */ | ||
113 | 6: | ||
114 | mov.b r1h,r1l | ||
115 | mov.b #0,r1h | ||
116 | mov.w r1,@(LCCR-LER3:16,sp) /* set ccr */ | ||
117 | mov.l @_sw_usp,er2 | ||
118 | mov.l er2,@(LSP-LER3:16,sp) /* set usp */ | ||
119 | mov.l er6,@-sp /* syscall arg #6 */ | ||
120 | mov.l er5,@-sp /* syscall arg #5 */ | ||
121 | mov.l er4,@-sp /* syscall arg #4 */ | ||
122 | .endm /* r1 = ccr */ | ||
123 | |||
124 | .macro RESTORE_ALL | ||
125 | mov.l @sp+,er4 | ||
126 | mov.l @sp+,er5 | ||
127 | mov.l @sp+,er6 | ||
128 | RESTOREREGS | ||
129 | mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */ | ||
130 | btst #4,r0l | ||
131 | bne 7f | ||
132 | |||
133 | orc #0xc0,ccr | ||
134 | mov.l @(LSP-LER1:16,sp),er0 | ||
135 | mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */ | ||
136 | mov.l er1,@er0 | ||
137 | RESTOREEXR | ||
138 | mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */ | ||
139 | mov.b r1l,r1h | ||
140 | mov.b @(LRET+1-LER1:16,sp),r1l | ||
141 | mov.w r1,e1 | ||
142 | mov.w @(LRET+2-LER1:16,sp),r1 | ||
143 | mov.l er1,@(USERRET:16,er0) | ||
144 | |||
145 | mov.l @sp+,er1 | ||
146 | add.l #(LRET-LER1),sp /* remove LORIG - LRET */ | ||
147 | mov.l sp,@_sw_ksp | ||
148 | andc #0xef,ccr /* switch to user mode */ | ||
149 | mov.l er0,sp | ||
150 | bra 8f | ||
151 | 7: | ||
152 | mov.l @sp+,er1 | ||
153 | add.l #10,sp | ||
154 | 8: | ||
155 | mov.l @sp+,er0 | ||
156 | adds #4,sp /* remove the sw created LVEC */ | ||
157 | rte | ||
158 | .endm | ||
159 | |||
160 | .globl _system_call | ||
161 | .globl ret_from_exception | ||
162 | .globl ret_from_fork | ||
163 | .globl ret_from_kernel_thread | ||
164 | .globl ret_from_interrupt | ||
165 | .globl _interrupt_redirect_table | ||
166 | .globl _sw_ksp,_sw_usp | ||
167 | .globl _resume | ||
168 | .globl _interrupt_entry | ||
169 | .globl _trace_break | ||
170 | .globl _nmi | ||
171 | |||
172 | #if defined(CONFIG_ROMKERNEL) | ||
173 | .section .int_redirect,"ax" | ||
174 | _interrupt_redirect_table: | ||
175 | #if defined(CONFIG_CPU_H8300H) | ||
176 | .rept 7 | ||
177 | .long 0 | ||
178 | .endr | ||
179 | #endif | ||
180 | #if defined(CONFIG_CPU_H8S) | ||
181 | .rept 5 | ||
182 | .long 0 | ||
183 | .endr | ||
184 | jmp @_trace_break | ||
185 | .long 0 | ||
186 | #endif | ||
187 | |||
188 | jsr @_interrupt_entry /* NMI */ | ||
189 | jmp @_system_call /* TRAPA #0 (System call) */ | ||
190 | .long 0 | ||
191 | .long 0 | ||
192 | jmp @_trace_break /* TRAPA #3 (breakpoint) */ | ||
193 | .rept INTERRUPTS-12 | ||
194 | jsr @_interrupt_entry | ||
195 | .endr | ||
196 | #endif | ||
197 | #if defined(CONFIG_RAMKERNEL) | ||
198 | .globl _interrupt_redirect_table | ||
199 | .section .bss | ||
200 | _interrupt_redirect_table: | ||
201 | .space 4 | ||
202 | #endif | ||
203 | |||
204 | .section .text | ||
205 | .align 2 | ||
206 | _interrupt_entry: | ||
207 | SAVE_ALL | ||
208 | /* r1l is saved ccr */ | ||
209 | mov.l sp,er0 | ||
210 | add.l #LVEC,er0 | ||
211 | btst #4,r1l | ||
212 | bne 1f | ||
213 | /* user LVEC */ | ||
214 | mov.l @_sw_usp,er0 | ||
215 | adds #4,er0 | ||
216 | 1: | ||
217 | mov.l @er0,er0 /* LVEC address */ | ||
218 | #if defined(CONFIG_ROMKERNEL) | ||
219 | sub.l #_interrupt_redirect_table,er0 | ||
220 | #endif | ||
221 | #if defined(CONFIG_RAMKERNEL) | ||
222 | mov.l @_interrupt_redirect_table,er1 | ||
223 | sub.l er1,er0 | ||
224 | #endif | ||
225 | SHLR2 er0 | ||
226 | dec.l #1,er0 | ||
227 | mov.l sp,er1 | ||
228 | subs #4,er1 /* adjust ret_pc */ | ||
229 | #if defined(CONFIG_CPU_H8S) | ||
230 | orc #7,exr | ||
231 | #endif | ||
232 | jsr @do_IRQ | ||
233 | jmp @ret_from_interrupt | ||
234 | |||
235 | _system_call: | ||
236 | subs #4,sp /* dummy LVEC */ | ||
237 | SAVE_ALL | ||
238 | /* er0: syscall nr */ | ||
239 | andc #0xbf,ccr | ||
240 | mov.l er0,er4 | ||
241 | |||
242 | /* save top of frame */ | ||
243 | mov.l sp,er0 | ||
244 | jsr @set_esp0 | ||
245 | mov.l sp,er2 | ||
246 | and.w #0xe000,r2 | ||
247 | mov.l @(TI_FLAGS:16,er2),er2 | ||
248 | and.w #_TIF_WORK_SYSCALL_MASK,r2 | ||
249 | beq 1f | ||
250 | mov.l sp,er0 | ||
251 | jsr @do_syscall_trace_enter | ||
252 | 1: | ||
253 | cmp.l #__NR_syscalls,er4 | ||
254 | bcc badsys | ||
255 | SHLL2 er4 | ||
256 | mov.l #_sys_call_table,er0 | ||
257 | add.l er4,er0 | ||
258 | mov.l @er0,er4 | ||
259 | beq ret_from_exception:16 | ||
260 | mov.l @(LER1:16,sp),er0 | ||
261 | mov.l @(LER2:16,sp),er1 | ||
262 | mov.l @(LER3:16,sp),er2 | ||
263 | jsr @er4 | ||
264 | mov.l er0,@(LER0:16,sp) /* save the return value */ | ||
265 | mov.l sp,er2 | ||
266 | and.w #0xe000,r2 | ||
267 | mov.l @(TI_FLAGS:16,er2),er2 | ||
268 | and.w #_TIF_WORK_SYSCALL_MASK,r2 | ||
269 | beq 2f | ||
270 | mov.l sp,er0 | ||
271 | jsr @do_syscall_trace_leave | ||
272 | 2: | ||
273 | orc #0xc0,ccr | ||
274 | bra resume_userspace | ||
275 | |||
276 | badsys: | ||
277 | mov.l #-ENOSYS,er0 | ||
278 | mov.l er0,@(LER0:16,sp) | ||
279 | bra resume_userspace | ||
280 | |||
281 | #if !defined(CONFIG_PREEMPT) | ||
282 | #define resume_kernel restore_all | ||
283 | #endif | ||
284 | |||
285 | ret_from_exception: | ||
286 | #if defined(CONFIG_PREEMPT) | ||
287 | orc #0xc0,ccr | ||
288 | #endif | ||
289 | ret_from_interrupt: | ||
290 | mov.b @(LCCR+1:16,sp),r0l | ||
291 | btst #4,r0l | ||
292 | bne resume_kernel:16 /* return from kernel */ | ||
293 | resume_userspace: | ||
294 | andc #0xbf,ccr | ||
295 | mov.l sp,er4 | ||
296 | and.w #0xe000,r4 /* er4 <- current thread info */ | ||
297 | mov.l @(TI_FLAGS:16,er4),er1 | ||
298 | and.l #_TIF_WORK_MASK,er1 | ||
299 | beq restore_all:8 | ||
300 | work_pending: | ||
301 | btst #TIF_NEED_RESCHED,r1l | ||
302 | bne work_resched:8 | ||
303 | /* work notifysig */ | ||
304 | mov.l sp,er0 | ||
305 | subs #4,er0 /* er0: pt_regs */ | ||
306 | jsr @do_notify_resume | ||
307 | bra resume_userspace:8 | ||
308 | work_resched: | ||
309 | mov.l sp,er0 | ||
310 | jsr @set_esp0 | ||
311 | jsr @schedule | ||
312 | bra resume_userspace:8 | ||
313 | restore_all: | ||
314 | RESTORE_ALL /* Does RTE */ | ||
315 | |||
316 | #if defined(CONFIG_PREEMPT) | ||
317 | resume_kernel: | ||
318 | mov.l @(TI_PRE_COUNT:16,er4),er0 | ||
319 | bne restore_all:8 | ||
320 | need_resched: | ||
321 | mov.l @(TI_FLAGS:16,er4),er0 | ||
322 | btst #TIF_NEED_RESCHED,r0l | ||
323 | beq restore_all:8 | ||
324 | mov.b @(LCCR+1:16,sp),r0l /* Interrupt Enabled? */ | ||
325 | bmi restore_all:8 | ||
326 | mov.l sp,er0 | ||
327 | jsr @set_esp0 | ||
328 | jsr @preempt_schedule_irq | ||
329 | bra need_resched:8 | ||
330 | #endif | ||
331 | |||
332 | ret_from_fork: | ||
333 | mov.l er2,er0 | ||
334 | jsr @schedule_tail | ||
335 | jmp @ret_from_exception | ||
336 | |||
337 | ret_from_kernel_thread: | ||
338 | mov.l er2,er0 | ||
339 | jsr @schedule_tail | ||
340 | mov.l @(LER4:16,sp),er0 | ||
341 | mov.l @(LER5:16,sp),er1 | ||
342 | jsr @er1 | ||
343 | jmp @ret_from_exception | ||
344 | |||
345 | _resume: | ||
346 | /* | ||
347 | * Beware - when entering resume, offset of tss is in d1, | ||
348 | * prev (the current task) is in a0, next (the new task) | ||
349 | * is in a1 and d2.b is non-zero if the mm structure is | ||
350 | * shared between the tasks, so don't change these | ||
351 | * registers until their contents are no longer needed. | ||
352 | */ | ||
353 | |||
354 | /* save sr */ | ||
355 | sub.w r3,r3 | ||
356 | stc ccr,r3l | ||
357 | mov.w r3,@(THREAD_CCR+2:16,er0) | ||
358 | |||
359 | /* disable interrupts */ | ||
360 | orc #0xc0,ccr | ||
361 | mov.l @_sw_usp,er3 | ||
362 | mov.l er3,@(THREAD_USP:16,er0) | ||
363 | mov.l sp,@(THREAD_KSP:16,er0) | ||
364 | |||
365 | /* Skip address space switching if they are the same. */ | ||
366 | /* FIXME: what did we hack out of here, this does nothing! */ | ||
367 | |||
368 | mov.l @(THREAD_USP:16,er1),er0 | ||
369 | mov.l er0,@_sw_usp | ||
370 | mov.l @(THREAD_KSP:16,er1),sp | ||
371 | |||
372 | /* restore status register */ | ||
373 | mov.w @(THREAD_CCR+2:16,er1),r3 | ||
374 | |||
375 | ldc r3l,ccr | ||
376 | rts | ||
377 | |||
378 | _trace_break: | ||
379 | subs #4,sp | ||
380 | SAVE_ALL | ||
381 | sub.l er1,er1 | ||
382 | dec.l #1,er1 | ||
383 | mov.l er1,@(LORIG,sp) | ||
384 | mov.l sp,er0 | ||
385 | jsr @set_esp0 | ||
386 | mov.l @_sw_usp,er0 | ||
387 | mov.l @er0,er1 | ||
388 | mov.w @(-2:16,er1),r2 | ||
389 | cmp.w #0x5730,r2 | ||
390 | beq 1f | ||
391 | subs #2,er1 | ||
392 | mov.l er1,@er0 | ||
393 | 1: | ||
394 | and.w #0xff,e1 | ||
395 | mov.l er1,er0 | ||
396 | jsr @trace_trap | ||
397 | jmp @ret_from_exception | ||
398 | |||
399 | _nmi: | ||
400 | subs #4, sp | ||
401 | mov.l er0, @-sp | ||
402 | mov.l @_interrupt_redirect_table, er0 | ||
403 | add.l #8*4, er0 | ||
404 | mov.l er0, @(4,sp) | ||
405 | mov.l @sp+, er0 | ||
406 | jmp @_interrupt_entry | ||
407 | |||
408 | .section .bss | ||
409 | _sw_ksp: | ||
410 | .space 4 | ||
411 | _sw_usp: | ||
412 | .space 4 | ||
413 | |||
414 | .end | ||
diff --git a/arch/h8300/kernel/head_ram.S b/arch/h8300/kernel/head_ram.S new file mode 100644 index 000000000000..84ac5c3ed31a --- /dev/null +++ b/arch/h8300/kernel/head_ram.S | |||
@@ -0,0 +1,60 @@ | |||
1 | |||
2 | #include <linux/sys.h> | ||
3 | #include <linux/init.h> | ||
4 | #include <asm/unistd.h> | ||
5 | #include <asm/setup.h> | ||
6 | #include <asm/segment.h> | ||
7 | #include <asm/linkage.h> | ||
8 | #include <asm/asm-offsets.h> | ||
9 | #include <asm/thread_info.h> | ||
10 | #include <asm/errno.h> | ||
11 | |||
12 | #if defined(CONFIG_CPU_H8300H) | ||
13 | .h8300h | ||
14 | #define SYSCR 0xfee012 | ||
15 | #define IRAMTOP 0xffff20 | ||
16 | #endif | ||
17 | #if defined(CONFIG_CPU_H8S) | ||
18 | .h8300s | ||
19 | #define INTCR 0xffff31 | ||
20 | #define IRAMTOP 0xffc000 | ||
21 | #endif | ||
22 | |||
23 | __HEAD | ||
24 | .global _start | ||
25 | _start: | ||
26 | mov.l #IRAMTOP,sp | ||
27 | /* .bss clear */ | ||
28 | mov.l #_sbss,er5 | ||
29 | mov.l #_ebss,er4 | ||
30 | sub.l er5,er4 | ||
31 | shlr er4 | ||
32 | shlr er4 | ||
33 | sub.l er2,er2 | ||
34 | 1: | ||
35 | mov.l er2,@er5 | ||
36 | adds #4,er5 | ||
37 | dec.l #1,er4 | ||
38 | bne 1b | ||
39 | jsr @h8300_fdt_init | ||
40 | |||
41 | /* linux kernel start */ | ||
42 | #if defined(CONFIG_CPU_H8300H) | ||
43 | ldc #0xd0,ccr /* running kernel */ | ||
44 | mov.l #SYSCR,er0 | ||
45 | bclr #3,@er0 | ||
46 | #endif | ||
47 | #if defined(CONFIG_CPU_H8S) | ||
48 | ldc #0x07,exr | ||
49 | bclr #4,@INTCR:8 | ||
50 | bset #5,@INTCR:8 /* Interrupt mode 2 */ | ||
51 | ldc #0x90,ccr /* running kernel */ | ||
52 | #endif | ||
53 | mov.l #init_thread_union,sp | ||
54 | add.l #0x2000,sp | ||
55 | jsr @start_kernel | ||
56 | |||
57 | 1: | ||
58 | bra 1b | ||
59 | |||
60 | .end | ||
diff --git a/arch/h8300/kernel/head_rom.S b/arch/h8300/kernel/head_rom.S new file mode 100644 index 000000000000..9868a4121a1f --- /dev/null +++ b/arch/h8300/kernel/head_rom.S | |||
@@ -0,0 +1,110 @@ | |||
1 | #include <linux/init.h> | ||
2 | #include <asm/thread_info.h> | ||
3 | |||
4 | #if defined(CONFIG_CPU_H8300H) | ||
5 | .h8300h | ||
6 | #define SYSCR 0xfee012 | ||
7 | #define IRAMTOP 0xffff20 | ||
8 | #define NR_INT 64 | ||
9 | #endif | ||
10 | #if defined(CONFIG_CPU_H8S) | ||
11 | .h8300s | ||
12 | #define INTCR 0xffff31 | ||
13 | #define IRAMTOP 0xffc000 | ||
14 | #define NR_INT 128 | ||
15 | #endif | ||
16 | |||
17 | __HEAD | ||
18 | .global _start | ||
19 | _start: | ||
20 | mov.l #IRAMTOP,sp | ||
21 | #if !defined(CONFIG_H8300H_SIM) && \ | ||
22 | !defined(CONFIG_H8S_SIM) | ||
23 | jsr @lowlevel_init | ||
24 | |||
25 | /* copy .data */ | ||
26 | mov.l #_begin_data,er5 | ||
27 | mov.l #_sdata,er6 | ||
28 | mov.l #_edata,er4 | ||
29 | sub.l er6,er4 | ||
30 | shlr.l er4 | ||
31 | shlr.l er4 | ||
32 | 1: | ||
33 | mov.l @er5+,er0 | ||
34 | mov.l er0,@er6 | ||
35 | adds #4,er6 | ||
36 | dec.l #1,er4 | ||
37 | bne 1b | ||
38 | /* .bss clear */ | ||
39 | mov.l #_sbss,er5 | ||
40 | mov.l #_ebss,er4 | ||
41 | sub.l er5,er4 | ||
42 | shlr er4 | ||
43 | shlr er4 | ||
44 | sub.l er0,er0 | ||
45 | 1: | ||
46 | mov.l er0,@er5 | ||
47 | adds #4,er5 | ||
48 | dec.l #1,er4 | ||
49 | bne 1b | ||
50 | #else | ||
51 | /* get cmdline from gdb */ | ||
52 | jsr @0xcc | ||
53 | ;; er0 - argc | ||
54 | ;; er1 - argv | ||
55 | mov.l #command_line,er3 | ||
56 | adds #4,er1 | ||
57 | dec.l #1,er0 | ||
58 | beq 4f | ||
59 | 1: | ||
60 | mov.l @er1+,er2 | ||
61 | 2: | ||
62 | mov.b @er2+,r4l | ||
63 | beq 3f | ||
64 | mov.b r4l,@er3 | ||
65 | adds #1,er3 | ||
66 | bra 2b | ||
67 | 3: | ||
68 | mov.b #' ',r4l | ||
69 | mov.b r4l,@er3 | ||
70 | adds #1,er3 | ||
71 | dec.l #1,er0 | ||
72 | bne 1b | ||
73 | subs #1,er3 | ||
74 | mov.b #0,r4l | ||
75 | mov.b r4l,@er3 | ||
76 | 4: | ||
77 | #endif | ||
78 | sub.l er0,er0 | ||
79 | jsr @h8300_fdt_init | ||
80 | /* linux kernel start */ | ||
81 | #if defined(CONFIG_CPU_H8300H) | ||
82 | ldc #0xd0,ccr /* running kernel */ | ||
83 | mov.l #SYSCR,er0 | ||
84 | bclr #3,@er0 | ||
85 | #endif | ||
86 | #if defined(CONFIG_CPU_H8S) | ||
87 | ldc #0x07,exr | ||
88 | bclr #4,@INTCR:8 | ||
89 | bset #5,@INTCR:8 /* Interrupt mode 2 */ | ||
90 | ldc #0x90,ccr /* running kernel */ | ||
91 | #endif | ||
92 | mov.l #init_thread_union,sp | ||
93 | add.l #0x2000,sp | ||
94 | jsr @start_kernel | ||
95 | |||
96 | 1: | ||
97 | bra 1b | ||
98 | |||
99 | #if defined(CONFIG_ROMKERNEL) | ||
100 | /* interrupt vector */ | ||
101 | .section .vectors,"ax" | ||
102 | .long _start | ||
103 | .long _start | ||
104 | vector = 2 | ||
105 | .rept NR_INT - 2 | ||
106 | .long _interrupt_redirect_table+vector*4 | ||
107 | vector = vector + 1 | ||
108 | .endr | ||
109 | #endif | ||
110 | .end | ||