diff options
author | Russell King <rmk@dyn-67.arm.linux.org.uk> | 2005-06-18 04:06:59 -0400 |
---|---|---|
committer | Russell King <rmk@dyn-67.arm.linux.org.uk> | 2005-06-18 04:06:59 -0400 |
commit | 5ab6091db0b644f59c2f16c5e41028496b709160 (patch) | |
tree | d81a8e028cccd80a52fbe9b6386e2751949b978f /arch/arm | |
parent | 9ee1c939d1cb936b1f98e8d81aeffab57bae46ab (diff) | |
parent | ccea7a19e54349d4f40778304e1bb88da83d39e7 (diff) |
Merge with ../linux-2.6-smp
Diffstat (limited to 'arch/arm')
-rw-r--r-- | arch/arm/kernel/entry-armv.S | 254 | ||||
-rw-r--r-- | arch/arm/kernel/entry-header.S | 7 | ||||
-rw-r--r-- | arch/arm/kernel/setup.c | 52 |
3 files changed, 190 insertions, 123 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S index e14278d59882..39a6c1b0b9a3 100644 --- a/arch/arm/kernel/entry-armv.S +++ b/arch/arm/kernel/entry-armv.S | |||
@@ -24,48 +24,91 @@ | |||
24 | #include "entry-header.S" | 24 | #include "entry-header.S" |
25 | 25 | ||
26 | /* | 26 | /* |
27 | * Interrupt handling. Preserves r7, r8, r9 | ||
28 | */ | ||
29 | .macro irq_handler | ||
30 | 1: get_irqnr_and_base r0, r6, r5, lr | ||
31 | movne r1, sp | ||
32 | @ | ||
33 | @ routine called with r0 = irq number, r1 = struct pt_regs * | ||
34 | @ | ||
35 | adrne lr, 1b | ||
36 | bne asm_do_IRQ | ||
37 | |||
38 | #ifdef CONFIG_SMP | ||
39 | /* | ||
40 | * XXX | ||
41 | * | ||
42 | * this macro assumes that irqstat (r6) and base (r5) are | ||
43 | * preserved from get_irqnr_and_base above | ||
44 | */ | ||
45 | test_for_ipi r0, r6, r5, lr | ||
46 | movne r0, sp | ||
47 | adrne lr, 1b | ||
48 | bne do_IPI | ||
49 | #endif | ||
50 | |||
51 | .endm | ||
52 | |||
53 | /* | ||
27 | * Invalid mode handlers | 54 | * Invalid mode handlers |
28 | */ | 55 | */ |
29 | .macro inv_entry, sym, reason | 56 | .macro inv_entry, reason |
30 | sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go | 57 | sub sp, sp, #S_FRAME_SIZE |
31 | stmia sp, {r0 - lr} @ Save XXX r0 - lr | 58 | stmib sp, {r1 - lr} |
32 | ldr r4, .LC\sym | ||
33 | mov r1, #\reason | 59 | mov r1, #\reason |
34 | .endm | 60 | .endm |
35 | 61 | ||
36 | __pabt_invalid: | 62 | __pabt_invalid: |
37 | inv_entry abt, BAD_PREFETCH | 63 | inv_entry BAD_PREFETCH |
38 | b 1f | 64 | b common_invalid |
39 | 65 | ||
40 | __dabt_invalid: | 66 | __dabt_invalid: |
41 | inv_entry abt, BAD_DATA | 67 | inv_entry BAD_DATA |
42 | b 1f | 68 | b common_invalid |
43 | 69 | ||
44 | __irq_invalid: | 70 | __irq_invalid: |
45 | inv_entry irq, BAD_IRQ | 71 | inv_entry BAD_IRQ |
46 | b 1f | 72 | b common_invalid |
47 | 73 | ||
48 | __und_invalid: | 74 | __und_invalid: |
49 | inv_entry und, BAD_UNDEFINSTR | 75 | inv_entry BAD_UNDEFINSTR |
76 | |||
77 | @ | ||
78 | @ XXX fall through to common_invalid | ||
79 | @ | ||
80 | |||
81 | @ | ||
82 | @ common_invalid - generic code for failed exception (re-entrant version of handlers) | ||
83 | @ | ||
84 | common_invalid: | ||
85 | zero_fp | ||
86 | |||
87 | ldmia r0, {r4 - r6} | ||
88 | add r0, sp, #S_PC @ here for interlock avoidance | ||
89 | mov r7, #-1 @ "" "" "" "" | ||
90 | str r4, [sp] @ save preserved r0 | ||
91 | stmia r0, {r5 - r7} @ lr_<exception>, | ||
92 | @ cpsr_<exception>, "old_r0" | ||
50 | 93 | ||
51 | 1: zero_fp | ||
52 | ldmia r4, {r5 - r7} @ Get XXX pc, cpsr, old_r0 | ||
53 | add r4, sp, #S_PC | ||
54 | stmia r4, {r5 - r7} @ Save XXX pc, cpsr, old_r0 | ||
55 | mov r0, sp | 94 | mov r0, sp |
56 | and r2, r6, #31 @ int mode | 95 | and r2, r6, #0x1f |
57 | b bad_mode | 96 | b bad_mode |
58 | 97 | ||
59 | /* | 98 | /* |
60 | * SVC mode handlers | 99 | * SVC mode handlers |
61 | */ | 100 | */ |
62 | .macro svc_entry, sym | 101 | .macro svc_entry |
63 | sub sp, sp, #S_FRAME_SIZE | 102 | sub sp, sp, #S_FRAME_SIZE |
64 | stmia sp, {r0 - r12} @ save r0 - r12 | 103 | stmib sp, {r1 - r12} |
65 | ldr r2, .LC\sym | 104 | |
66 | add r0, sp, #S_FRAME_SIZE | 105 | ldmia r0, {r1 - r3} |
67 | ldmia r2, {r2 - r4} @ get pc, cpsr | 106 | add r5, sp, #S_SP @ here for interlock avoidance |
68 | add r5, sp, #S_SP | 107 | mov r4, #-1 @ "" "" "" "" |
108 | add r0, sp, #S_FRAME_SIZE @ "" "" "" "" | ||
109 | str r1, [sp] @ save the "real" r0 copied | ||
110 | @ from the exception stack | ||
111 | |||
69 | mov r1, lr | 112 | mov r1, lr |
70 | 113 | ||
71 | @ | 114 | @ |
@@ -82,7 +125,7 @@ __und_invalid: | |||
82 | 125 | ||
83 | .align 5 | 126 | .align 5 |
84 | __dabt_svc: | 127 | __dabt_svc: |
85 | svc_entry abt | 128 | svc_entry |
86 | 129 | ||
87 | @ | 130 | @ |
88 | @ get ready to re-enable interrupts if appropriate | 131 | @ get ready to re-enable interrupts if appropriate |
@@ -129,28 +172,24 @@ __dabt_svc: | |||
129 | 172 | ||
130 | .align 5 | 173 | .align 5 |
131 | __irq_svc: | 174 | __irq_svc: |
132 | svc_entry irq | 175 | svc_entry |
176 | |||
133 | #ifdef CONFIG_PREEMPT | 177 | #ifdef CONFIG_PREEMPT |
134 | get_thread_info r8 | 178 | get_thread_info tsk |
135 | ldr r9, [r8, #TI_PREEMPT] @ get preempt count | 179 | ldr r8, [tsk, #TI_PREEMPT] @ get preempt count |
136 | add r7, r9, #1 @ increment it | 180 | add r7, r8, #1 @ increment it |
137 | str r7, [r8, #TI_PREEMPT] | 181 | str r7, [tsk, #TI_PREEMPT] |
138 | #endif | 182 | #endif |
139 | 1: get_irqnr_and_base r0, r6, r5, lr | 183 | |
140 | movne r1, sp | 184 | irq_handler |
141 | @ | ||
142 | @ routine called with r0 = irq number, r1 = struct pt_regs * | ||
143 | @ | ||
144 | adrne lr, 1b | ||
145 | bne asm_do_IRQ | ||
146 | #ifdef CONFIG_PREEMPT | 185 | #ifdef CONFIG_PREEMPT |
147 | ldr r0, [r8, #TI_FLAGS] @ get flags | 186 | ldr r0, [tsk, #TI_FLAGS] @ get flags |
148 | tst r0, #_TIF_NEED_RESCHED | 187 | tst r0, #_TIF_NEED_RESCHED |
149 | blne svc_preempt | 188 | blne svc_preempt |
150 | preempt_return: | 189 | preempt_return: |
151 | ldr r0, [r8, #TI_PREEMPT] @ read preempt value | 190 | ldr r0, [tsk, #TI_PREEMPT] @ read preempt value |
191 | str r8, [tsk, #TI_PREEMPT] @ restore preempt count | ||
152 | teq r0, r7 | 192 | teq r0, r7 |
153 | str r9, [r8, #TI_PREEMPT] @ restore preempt count | ||
154 | strne r0, [r0, -r0] @ bug() | 193 | strne r0, [r0, -r0] @ bug() |
155 | #endif | 194 | #endif |
156 | ldr r0, [sp, #S_PSR] @ irqs are already disabled | 195 | ldr r0, [sp, #S_PSR] @ irqs are already disabled |
@@ -161,7 +200,7 @@ preempt_return: | |||
161 | 200 | ||
162 | #ifdef CONFIG_PREEMPT | 201 | #ifdef CONFIG_PREEMPT |
163 | svc_preempt: | 202 | svc_preempt: |
164 | teq r9, #0 @ was preempt count = 0 | 203 | teq r8, #0 @ was preempt count = 0 |
165 | ldreq r6, .LCirq_stat | 204 | ldreq r6, .LCirq_stat |
166 | movne pc, lr @ no | 205 | movne pc, lr @ no |
167 | ldr r0, [r6, #4] @ local_irq_count | 206 | ldr r0, [r6, #4] @ local_irq_count |
@@ -169,9 +208,9 @@ svc_preempt: | |||
169 | adds r0, r0, r1 | 208 | adds r0, r0, r1 |
170 | movne pc, lr | 209 | movne pc, lr |
171 | mov r7, #0 @ preempt_schedule_irq | 210 | mov r7, #0 @ preempt_schedule_irq |
172 | str r7, [r8, #TI_PREEMPT] @ expects preempt_count == 0 | 211 | str r7, [tsk, #TI_PREEMPT] @ expects preempt_count == 0 |
173 | 1: bl preempt_schedule_irq @ irq en/disable is done inside | 212 | 1: bl preempt_schedule_irq @ irq en/disable is done inside |
174 | ldr r0, [r8, #TI_FLAGS] @ get new tasks TI_FLAGS | 213 | ldr r0, [tsk, #TI_FLAGS] @ get new tasks TI_FLAGS |
175 | tst r0, #_TIF_NEED_RESCHED | 214 | tst r0, #_TIF_NEED_RESCHED |
176 | beq preempt_return @ go again | 215 | beq preempt_return @ go again |
177 | b 1b | 216 | b 1b |
@@ -179,7 +218,7 @@ svc_preempt: | |||
179 | 218 | ||
180 | .align 5 | 219 | .align 5 |
181 | __und_svc: | 220 | __und_svc: |
182 | svc_entry und | 221 | svc_entry |
183 | 222 | ||
184 | @ | 223 | @ |
185 | @ call emulation code, which returns using r9 if it has emulated | 224 | @ call emulation code, which returns using r9 if it has emulated |
@@ -209,7 +248,7 @@ __und_svc: | |||
209 | 248 | ||
210 | .align 5 | 249 | .align 5 |
211 | __pabt_svc: | 250 | __pabt_svc: |
212 | svc_entry abt | 251 | svc_entry |
213 | 252 | ||
214 | @ | 253 | @ |
215 | @ re-enable interrupts if appropriate | 254 | @ re-enable interrupts if appropriate |
@@ -242,12 +281,8 @@ __pabt_svc: | |||
242 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr | 281 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr |
243 | 282 | ||
244 | .align 5 | 283 | .align 5 |
245 | .LCirq: | 284 | .LCcralign: |
246 | .word __temp_irq | 285 | .word cr_alignment |
247 | .LCund: | ||
248 | .word __temp_und | ||
249 | .LCabt: | ||
250 | .word __temp_abt | ||
251 | #ifdef MULTI_ABORT | 286 | #ifdef MULTI_ABORT |
252 | .LCprocfns: | 287 | .LCprocfns: |
253 | .word processor | 288 | .word processor |
@@ -262,12 +297,16 @@ __pabt_svc: | |||
262 | /* | 297 | /* |
263 | * User mode handlers | 298 | * User mode handlers |
264 | */ | 299 | */ |
265 | .macro usr_entry, sym | 300 | .macro usr_entry |
266 | sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go | 301 | sub sp, sp, #S_FRAME_SIZE |
267 | stmia sp, {r0 - r12} @ save r0 - r12 | 302 | stmib sp, {r1 - r12} |
268 | ldr r7, .LC\sym | 303 | |
269 | add r5, sp, #S_PC | 304 | ldmia r0, {r1 - r3} |
270 | ldmia r7, {r2 - r4} @ Get USR pc, cpsr | 305 | add r0, sp, #S_PC @ here for interlock avoidance |
306 | mov r4, #-1 @ "" "" "" "" | ||
307 | |||
308 | str r1, [sp] @ save the "real" r0 copied | ||
309 | @ from the exception stack | ||
271 | 310 | ||
272 | #if __LINUX_ARM_ARCH__ < 6 && !defined(CONFIG_NEEDS_SYSCALL_FOR_CMPXCHG) | 311 | #if __LINUX_ARM_ARCH__ < 6 && !defined(CONFIG_NEEDS_SYSCALL_FOR_CMPXCHG) |
273 | @ make sure our user space atomic helper is aborted | 312 | @ make sure our user space atomic helper is aborted |
@@ -284,13 +323,13 @@ __pabt_svc: | |||
284 | @ | 323 | @ |
285 | @ Also, separately save sp_usr and lr_usr | 324 | @ Also, separately save sp_usr and lr_usr |
286 | @ | 325 | @ |
287 | stmia r5, {r2 - r4} | 326 | stmia r0, {r2 - r4} |
288 | stmdb r5, {sp, lr}^ | 327 | stmdb r0, {sp, lr}^ |
289 | 328 | ||
290 | @ | 329 | @ |
291 | @ Enable the alignment trap while in kernel mode | 330 | @ Enable the alignment trap while in kernel mode |
292 | @ | 331 | @ |
293 | alignment_trap r7, r0, __temp_\sym | 332 | alignment_trap r0 |
294 | 333 | ||
295 | @ | 334 | @ |
296 | @ Clear FP to mark the first stack frame | 335 | @ Clear FP to mark the first stack frame |
@@ -300,7 +339,7 @@ __pabt_svc: | |||
300 | 339 | ||
301 | .align 5 | 340 | .align 5 |
302 | __dabt_usr: | 341 | __dabt_usr: |
303 | usr_entry abt | 342 | usr_entry |
304 | 343 | ||
305 | @ | 344 | @ |
306 | @ Call the processor-specific abort handler: | 345 | @ Call the processor-specific abort handler: |
@@ -329,30 +368,23 @@ __dabt_usr: | |||
329 | 368 | ||
330 | .align 5 | 369 | .align 5 |
331 | __irq_usr: | 370 | __irq_usr: |
332 | usr_entry irq | 371 | usr_entry |
333 | 372 | ||
373 | get_thread_info tsk | ||
334 | #ifdef CONFIG_PREEMPT | 374 | #ifdef CONFIG_PREEMPT |
335 | get_thread_info r8 | 375 | ldr r8, [tsk, #TI_PREEMPT] @ get preempt count |
336 | ldr r9, [r8, #TI_PREEMPT] @ get preempt count | 376 | add r7, r8, #1 @ increment it |
337 | add r7, r9, #1 @ increment it | 377 | str r7, [tsk, #TI_PREEMPT] |
338 | str r7, [r8, #TI_PREEMPT] | ||
339 | #endif | 378 | #endif |
340 | 1: get_irqnr_and_base r0, r6, r5, lr | 379 | |
341 | movne r1, sp | 380 | irq_handler |
342 | adrne lr, 1b | ||
343 | @ | ||
344 | @ routine called with r0 = irq number, r1 = struct pt_regs * | ||
345 | @ | ||
346 | bne asm_do_IRQ | ||
347 | #ifdef CONFIG_PREEMPT | 381 | #ifdef CONFIG_PREEMPT |
348 | ldr r0, [r8, #TI_PREEMPT] | 382 | ldr r0, [tsk, #TI_PREEMPT] |
383 | str r8, [tsk, #TI_PREEMPT] | ||
349 | teq r0, r7 | 384 | teq r0, r7 |
350 | str r9, [r8, #TI_PREEMPT] | ||
351 | strne r0, [r0, -r0] | 385 | strne r0, [r0, -r0] |
352 | mov tsk, r8 | ||
353 | #else | ||
354 | get_thread_info tsk | ||
355 | #endif | 386 | #endif |
387 | |||
356 | mov why, #0 | 388 | mov why, #0 |
357 | b ret_to_user | 389 | b ret_to_user |
358 | 390 | ||
@@ -360,7 +392,7 @@ __irq_usr: | |||
360 | 392 | ||
361 | .align 5 | 393 | .align 5 |
362 | __und_usr: | 394 | __und_usr: |
363 | usr_entry und | 395 | usr_entry |
364 | 396 | ||
365 | tst r3, #PSR_T_BIT @ Thumb mode? | 397 | tst r3, #PSR_T_BIT @ Thumb mode? |
366 | bne fpundefinstr @ ignore FP | 398 | bne fpundefinstr @ ignore FP |
@@ -476,7 +508,7 @@ fpundefinstr: | |||
476 | 508 | ||
477 | .align 5 | 509 | .align 5 |
478 | __pabt_usr: | 510 | __pabt_usr: |
479 | usr_entry abt | 511 | usr_entry |
480 | 512 | ||
481 | enable_irq @ Enable interrupts | 513 | enable_irq @ Enable interrupts |
482 | mov r0, r2 @ address (pc) | 514 | mov r0, r2 @ address (pc) |
@@ -741,29 +773,41 @@ __kuser_helper_end: | |||
741 | * | 773 | * |
742 | * Common stub entry macro: | 774 | * Common stub entry macro: |
743 | * Enter in IRQ mode, spsr = SVC/USR CPSR, lr = SVC/USR PC | 775 | * Enter in IRQ mode, spsr = SVC/USR CPSR, lr = SVC/USR PC |
776 | * | ||
777 | * SP points to a minimal amount of processor-private memory, the address | ||
778 | * of which is copied into r0 for the mode specific abort handler. | ||
744 | */ | 779 | */ |
745 | .macro vector_stub, name, sym, correction=0 | 780 | .macro vector_stub, name, correction=0 |
746 | .align 5 | 781 | .align 5 |
747 | 782 | ||
748 | vector_\name: | 783 | vector_\name: |
749 | ldr r13, .LCs\sym | ||
750 | .if \correction | 784 | .if \correction |
751 | sub lr, lr, #\correction | 785 | sub lr, lr, #\correction |
752 | .endif | 786 | .endif |
753 | str lr, [r13] @ save lr_IRQ | 787 | |
788 | @ | ||
789 | @ Save r0, lr_<exception> (parent PC) and spsr_<exception> | ||
790 | @ (parent CPSR) | ||
791 | @ | ||
792 | stmia sp, {r0, lr} @ save r0, lr | ||
754 | mrs lr, spsr | 793 | mrs lr, spsr |
755 | str lr, [r13, #4] @ save spsr_IRQ | 794 | str lr, [sp, #8] @ save spsr |
795 | |||
756 | @ | 796 | @ |
757 | @ now branch to the relevant MODE handling routine | 797 | @ Prepare for SVC32 mode. IRQs remain disabled. |
758 | @ | 798 | @ |
759 | mrs r13, cpsr | 799 | mrs r0, cpsr |
760 | bic r13, r13, #MODE_MASK | 800 | bic r0, r0, #MODE_MASK |
761 | orr r13, r13, #SVC_MODE | 801 | orr r0, r0, #SVC_MODE |
762 | msr spsr_cxsf, r13 @ switch to SVC_32 mode | 802 | msr spsr_cxsf, r0 |
763 | 803 | ||
764 | and lr, lr, #15 | 804 | @ |
805 | @ the branch table must immediately follow this code | ||
806 | @ | ||
807 | mov r0, sp | ||
808 | and lr, lr, #0x0f | ||
765 | ldr lr, [pc, lr, lsl #2] | 809 | ldr lr, [pc, lr, lsl #2] |
766 | movs pc, lr @ Changes mode and branches | 810 | movs pc, lr @ branch to handler in SVC mode |
767 | .endm | 811 | .endm |
768 | 812 | ||
769 | .globl __stubs_start | 813 | .globl __stubs_start |
@@ -771,7 +815,7 @@ __stubs_start: | |||
771 | /* | 815 | /* |
772 | * Interrupt dispatcher | 816 | * Interrupt dispatcher |
773 | */ | 817 | */ |
774 | vector_stub irq, irq, 4 | 818 | vector_stub irq, 4 |
775 | 819 | ||
776 | .long __irq_usr @ 0 (USR_26 / USR_32) | 820 | .long __irq_usr @ 0 (USR_26 / USR_32) |
777 | .long __irq_invalid @ 1 (FIQ_26 / FIQ_32) | 821 | .long __irq_invalid @ 1 (FIQ_26 / FIQ_32) |
@@ -794,7 +838,7 @@ __stubs_start: | |||
794 | * Data abort dispatcher | 838 | * Data abort dispatcher |
795 | * Enter in ABT mode, spsr = USR CPSR, lr = USR PC | 839 | * Enter in ABT mode, spsr = USR CPSR, lr = USR PC |
796 | */ | 840 | */ |
797 | vector_stub dabt, abt, 8 | 841 | vector_stub dabt, 8 |
798 | 842 | ||
799 | .long __dabt_usr @ 0 (USR_26 / USR_32) | 843 | .long __dabt_usr @ 0 (USR_26 / USR_32) |
800 | .long __dabt_invalid @ 1 (FIQ_26 / FIQ_32) | 844 | .long __dabt_invalid @ 1 (FIQ_26 / FIQ_32) |
@@ -817,7 +861,7 @@ __stubs_start: | |||
817 | * Prefetch abort dispatcher | 861 | * Prefetch abort dispatcher |
818 | * Enter in ABT mode, spsr = USR CPSR, lr = USR PC | 862 | * Enter in ABT mode, spsr = USR CPSR, lr = USR PC |
819 | */ | 863 | */ |
820 | vector_stub pabt, abt, 4 | 864 | vector_stub pabt, 4 |
821 | 865 | ||
822 | .long __pabt_usr @ 0 (USR_26 / USR_32) | 866 | .long __pabt_usr @ 0 (USR_26 / USR_32) |
823 | .long __pabt_invalid @ 1 (FIQ_26 / FIQ_32) | 867 | .long __pabt_invalid @ 1 (FIQ_26 / FIQ_32) |
@@ -840,7 +884,7 @@ __stubs_start: | |||
840 | * Undef instr entry dispatcher | 884 | * Undef instr entry dispatcher |
841 | * Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC | 885 | * Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC |
842 | */ | 886 | */ |
843 | vector_stub und, und | 887 | vector_stub und |
844 | 888 | ||
845 | .long __und_usr @ 0 (USR_26 / USR_32) | 889 | .long __und_usr @ 0 (USR_26 / USR_32) |
846 | .long __und_invalid @ 1 (FIQ_26 / FIQ_32) | 890 | .long __und_invalid @ 1 (FIQ_26 / FIQ_32) |
@@ -894,13 +938,6 @@ vector_addrexcptn: | |||
894 | .LCvswi: | 938 | .LCvswi: |
895 | .word vector_swi | 939 | .word vector_swi |
896 | 940 | ||
897 | .LCsirq: | ||
898 | .word __temp_irq | ||
899 | .LCsund: | ||
900 | .word __temp_und | ||
901 | .LCsabt: | ||
902 | .word __temp_abt | ||
903 | |||
904 | .globl __stubs_end | 941 | .globl __stubs_end |
905 | __stubs_end: | 942 | __stubs_end: |
906 | 943 | ||
@@ -922,23 +959,6 @@ __vectors_end: | |||
922 | 959 | ||
923 | .data | 960 | .data |
924 | 961 | ||
925 | /* | ||
926 | * Do not reorder these, and do not insert extra data between... | ||
927 | */ | ||
928 | |||
929 | __temp_irq: | ||
930 | .word 0 @ saved lr_irq | ||
931 | .word 0 @ saved spsr_irq | ||
932 | .word -1 @ old_r0 | ||
933 | __temp_und: | ||
934 | .word 0 @ Saved lr_und | ||
935 | .word 0 @ Saved spsr_und | ||
936 | .word -1 @ old_r0 | ||
937 | __temp_abt: | ||
938 | .word 0 @ Saved lr_abt | ||
939 | .word 0 @ Saved spsr_abt | ||
940 | .word -1 @ old_r0 | ||
941 | |||
942 | .globl cr_alignment | 962 | .globl cr_alignment |
943 | .globl cr_no_alignment | 963 | .globl cr_no_alignment |
944 | cr_alignment: | 964 | cr_alignment: |
diff --git a/arch/arm/kernel/entry-header.S b/arch/arm/kernel/entry-header.S index a3d40a0e2b04..afef21273963 100644 --- a/arch/arm/kernel/entry-header.S +++ b/arch/arm/kernel/entry-header.S | |||
@@ -59,11 +59,10 @@ | |||
59 | mov \rd, \rd, lsl #13 | 59 | mov \rd, \rd, lsl #13 |
60 | .endm | 60 | .endm |
61 | 61 | ||
62 | .macro alignment_trap, rbase, rtemp, sym | 62 | .macro alignment_trap, rtemp |
63 | #ifdef CONFIG_ALIGNMENT_TRAP | 63 | #ifdef CONFIG_ALIGNMENT_TRAP |
64 | #define OFF_CR_ALIGNMENT(x) cr_alignment - x | 64 | ldr \rtemp, .LCcralign |
65 | 65 | ldr \rtemp, [\rtemp] | |
66 | ldr \rtemp, [\rbase, #OFF_CR_ALIGNMENT(\sym)] | ||
67 | mcr p15, 0, \rtemp, c1, c0 | 66 | mcr p15, 0, \rtemp, c1, c0 |
68 | #endif | 67 | #endif |
69 | .endm | 68 | .endm |
diff --git a/arch/arm/kernel/setup.c b/arch/arm/kernel/setup.c index c2a7da3ac0f1..7ecdda3f1253 100644 --- a/arch/arm/kernel/setup.c +++ b/arch/arm/kernel/setup.c | |||
@@ -92,6 +92,14 @@ struct cpu_user_fns cpu_user; | |||
92 | struct cpu_cache_fns cpu_cache; | 92 | struct cpu_cache_fns cpu_cache; |
93 | #endif | 93 | #endif |
94 | 94 | ||
95 | struct stack { | ||
96 | u32 irq[3]; | ||
97 | u32 abt[3]; | ||
98 | u32 und[3]; | ||
99 | } ____cacheline_aligned; | ||
100 | |||
101 | static struct stack stacks[NR_CPUS]; | ||
102 | |||
95 | char elf_platform[ELF_PLATFORM_SIZE]; | 103 | char elf_platform[ELF_PLATFORM_SIZE]; |
96 | EXPORT_SYMBOL(elf_platform); | 104 | EXPORT_SYMBOL(elf_platform); |
97 | 105 | ||
@@ -307,8 +315,6 @@ static void __init setup_processor(void) | |||
307 | cpu_name, processor_id, (int)processor_id & 15, | 315 | cpu_name, processor_id, (int)processor_id & 15, |
308 | proc_arch[cpu_architecture()]); | 316 | proc_arch[cpu_architecture()]); |
309 | 317 | ||
310 | dump_cpu_info(smp_processor_id()); | ||
311 | |||
312 | sprintf(system_utsname.machine, "%s%c", list->arch_name, ENDIANNESS); | 318 | sprintf(system_utsname.machine, "%s%c", list->arch_name, ENDIANNESS); |
313 | sprintf(elf_platform, "%s%c", list->elf_name, ENDIANNESS); | 319 | sprintf(elf_platform, "%s%c", list->elf_name, ENDIANNESS); |
314 | elf_hwcap = list->elf_hwcap; | 320 | elf_hwcap = list->elf_hwcap; |
@@ -316,6 +322,46 @@ static void __init setup_processor(void) | |||
316 | cpu_proc_init(); | 322 | cpu_proc_init(); |
317 | } | 323 | } |
318 | 324 | ||
325 | /* | ||
326 | * cpu_init - initialise one CPU. | ||
327 | * | ||
328 | * cpu_init dumps the cache information, initialises SMP specific | ||
329 | * information, and sets up the per-CPU stacks. | ||
330 | */ | ||
331 | void __init cpu_init(void) | ||
332 | { | ||
333 | unsigned int cpu = smp_processor_id(); | ||
334 | struct stack *stk = &stacks[cpu]; | ||
335 | |||
336 | if (cpu >= NR_CPUS) { | ||
337 | printk(KERN_CRIT "CPU%u: bad primary CPU number\n", cpu); | ||
338 | BUG(); | ||
339 | } | ||
340 | |||
341 | dump_cpu_info(cpu); | ||
342 | |||
343 | /* | ||
344 | * setup stacks for re-entrant exception handlers | ||
345 | */ | ||
346 | __asm__ ( | ||
347 | "msr cpsr_c, %1\n\t" | ||
348 | "add sp, %0, %2\n\t" | ||
349 | "msr cpsr_c, %3\n\t" | ||
350 | "add sp, %0, %4\n\t" | ||
351 | "msr cpsr_c, %5\n\t" | ||
352 | "add sp, %0, %6\n\t" | ||
353 | "msr cpsr_c, %7" | ||
354 | : | ||
355 | : "r" (stk), | ||
356 | "I" (PSR_F_BIT | PSR_I_BIT | IRQ_MODE), | ||
357 | "I" (offsetof(struct stack, irq[0])), | ||
358 | "I" (PSR_F_BIT | PSR_I_BIT | ABT_MODE), | ||
359 | "I" (offsetof(struct stack, abt[0])), | ||
360 | "I" (PSR_F_BIT | PSR_I_BIT | UND_MODE), | ||
361 | "I" (offsetof(struct stack, und[0])), | ||
362 | "I" (PSR_F_BIT | PSR_I_BIT | SVC_MODE)); | ||
363 | } | ||
364 | |||
319 | static struct machine_desc * __init setup_machine(unsigned int nr) | 365 | static struct machine_desc * __init setup_machine(unsigned int nr) |
320 | { | 366 | { |
321 | struct machine_desc *list; | 367 | struct machine_desc *list; |
@@ -715,6 +761,8 @@ void __init setup_arch(char **cmdline_p) | |||
715 | paging_init(&meminfo, mdesc); | 761 | paging_init(&meminfo, mdesc); |
716 | request_standard_resources(&meminfo, mdesc); | 762 | request_standard_resources(&meminfo, mdesc); |
717 | 763 | ||
764 | cpu_init(); | ||
765 | |||
718 | /* | 766 | /* |
719 | * Set up various architecture-specific pointers | 767 | * Set up various architecture-specific pointers |
720 | */ | 768 | */ |