aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/kernel/entry-armv.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/kernel/entry-armv.S')
-rw-r--r--arch/arm/kernel/entry-armv.S272
1 files changed, 149 insertions, 123 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S
index 4eb36155dc93..39a6c1b0b9a3 100644
--- a/arch/arm/kernel/entry-armv.S
+++ b/arch/arm/kernel/entry-armv.S
@@ -24,48 +24,91 @@
24#include "entry-header.S" 24#include "entry-header.S"
25 25
26/* 26/*
27 * Interrupt handling. Preserves r7, r8, r9
28 */
29 .macro irq_handler
301: get_irqnr_and_base r0, r6, r5, lr
31 movne r1, sp
32 @
33 @ routine called with r0 = irq number, r1 = struct pt_regs *
34 @
35 adrne lr, 1b
36 bne asm_do_IRQ
37
38#ifdef CONFIG_SMP
39 /*
40 * XXX
41 *
42 * this macro assumes that irqstat (r6) and base (r5) are
43 * preserved from get_irqnr_and_base above
44 */
45 test_for_ipi r0, r6, r5, lr
46 movne r0, sp
47 adrne lr, 1b
48 bne do_IPI
49#endif
50
51 .endm
52
53/*
27 * Invalid mode handlers 54 * Invalid mode handlers
28 */ 55 */
29 .macro inv_entry, sym, reason 56 .macro inv_entry, reason
30 sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go 57 sub sp, sp, #S_FRAME_SIZE
31 stmia sp, {r0 - lr} @ Save XXX r0 - lr 58 stmib sp, {r1 - lr}
32 ldr r4, .LC\sym
33 mov r1, #\reason 59 mov r1, #\reason
34 .endm 60 .endm
35 61
36__pabt_invalid: 62__pabt_invalid:
37 inv_entry abt, BAD_PREFETCH 63 inv_entry BAD_PREFETCH
38 b 1f 64 b common_invalid
39 65
40__dabt_invalid: 66__dabt_invalid:
41 inv_entry abt, BAD_DATA 67 inv_entry BAD_DATA
42 b 1f 68 b common_invalid
43 69
44__irq_invalid: 70__irq_invalid:
45 inv_entry irq, BAD_IRQ 71 inv_entry BAD_IRQ
46 b 1f 72 b common_invalid
47 73
48__und_invalid: 74__und_invalid:
49 inv_entry und, BAD_UNDEFINSTR 75 inv_entry BAD_UNDEFINSTR
76
77 @
78 @ XXX fall through to common_invalid
79 @
80
81@
82@ common_invalid - generic code for failed exception (re-entrant version of handlers)
83@
84common_invalid:
85 zero_fp
86
87 ldmia r0, {r4 - r6}
88 add r0, sp, #S_PC @ here for interlock avoidance
89 mov r7, #-1 @ "" "" "" ""
90 str r4, [sp] @ save preserved r0
91 stmia r0, {r5 - r7} @ lr_<exception>,
92 @ cpsr_<exception>, "old_r0"
50 93
511: zero_fp
52 ldmia r4, {r5 - r7} @ Get XXX pc, cpsr, old_r0
53 add r4, sp, #S_PC
54 stmia r4, {r5 - r7} @ Save XXX pc, cpsr, old_r0
55 mov r0, sp 94 mov r0, sp
56 and r2, r6, #31 @ int mode 95 and r2, r6, #0x1f
57 b bad_mode 96 b bad_mode
58 97
59/* 98/*
60 * SVC mode handlers 99 * SVC mode handlers
61 */ 100 */
62 .macro svc_entry, sym 101 .macro svc_entry
63 sub sp, sp, #S_FRAME_SIZE 102 sub sp, sp, #S_FRAME_SIZE
64 stmia sp, {r0 - r12} @ save r0 - r12 103 stmib sp, {r1 - r12}
65 ldr r2, .LC\sym 104
66 add r0, sp, #S_FRAME_SIZE 105 ldmia r0, {r1 - r3}
67 ldmia r2, {r2 - r4} @ get pc, cpsr 106 add r5, sp, #S_SP @ here for interlock avoidance
68 add r5, sp, #S_SP 107 mov r4, #-1 @ "" "" "" ""
108 add r0, sp, #S_FRAME_SIZE @ "" "" "" ""
109 str r1, [sp] @ save the "real" r0 copied
110 @ from the exception stack
111
69 mov r1, lr 112 mov r1, lr
70 113
71 @ 114 @
@@ -82,7 +125,7 @@ __und_invalid:
82 125
83 .align 5 126 .align 5
84__dabt_svc: 127__dabt_svc:
85 svc_entry abt 128 svc_entry
86 129
87 @ 130 @
88 @ get ready to re-enable interrupts if appropriate 131 @ get ready to re-enable interrupts if appropriate
@@ -129,28 +172,24 @@ __dabt_svc:
129 172
130 .align 5 173 .align 5
131__irq_svc: 174__irq_svc:
132 svc_entry irq 175 svc_entry
176
133#ifdef CONFIG_PREEMPT 177#ifdef CONFIG_PREEMPT
134 get_thread_info r8 178 get_thread_info tsk
135 ldr r9, [r8, #TI_PREEMPT] @ get preempt count 179 ldr r8, [tsk, #TI_PREEMPT] @ get preempt count
136 add r7, r9, #1 @ increment it 180 add r7, r8, #1 @ increment it
137 str r7, [r8, #TI_PREEMPT] 181 str r7, [tsk, #TI_PREEMPT]
138#endif 182#endif
1391: get_irqnr_and_base r0, r6, r5, lr 183
140 movne r1, sp 184 irq_handler
141 @
142 @ routine called with r0 = irq number, r1 = struct pt_regs *
143 @
144 adrne lr, 1b
145 bne asm_do_IRQ
146#ifdef CONFIG_PREEMPT 185#ifdef CONFIG_PREEMPT
147 ldr r0, [r8, #TI_FLAGS] @ get flags 186 ldr r0, [tsk, #TI_FLAGS] @ get flags
148 tst r0, #_TIF_NEED_RESCHED 187 tst r0, #_TIF_NEED_RESCHED
149 blne svc_preempt 188 blne svc_preempt
150preempt_return: 189preempt_return:
151 ldr r0, [r8, #TI_PREEMPT] @ read preempt value 190 ldr r0, [tsk, #TI_PREEMPT] @ read preempt value
191 str r8, [tsk, #TI_PREEMPT] @ restore preempt count
152 teq r0, r7 192 teq r0, r7
153 str r9, [r8, #TI_PREEMPT] @ restore preempt count
154 strne r0, [r0, -r0] @ bug() 193 strne r0, [r0, -r0] @ bug()
155#endif 194#endif
156 ldr r0, [sp, #S_PSR] @ irqs are already disabled 195 ldr r0, [sp, #S_PSR] @ irqs are already disabled
@@ -161,7 +200,7 @@ preempt_return:
161 200
162#ifdef CONFIG_PREEMPT 201#ifdef CONFIG_PREEMPT
163svc_preempt: 202svc_preempt:
164 teq r9, #0 @ was preempt count = 0 203 teq r8, #0 @ was preempt count = 0
165 ldreq r6, .LCirq_stat 204 ldreq r6, .LCirq_stat
166 movne pc, lr @ no 205 movne pc, lr @ no
167 ldr r0, [r6, #4] @ local_irq_count 206 ldr r0, [r6, #4] @ local_irq_count
@@ -169,9 +208,9 @@ svc_preempt:
169 adds r0, r0, r1 208 adds r0, r0, r1
170 movne pc, lr 209 movne pc, lr
171 mov r7, #0 @ preempt_schedule_irq 210 mov r7, #0 @ preempt_schedule_irq
172 str r7, [r8, #TI_PREEMPT] @ expects preempt_count == 0 211 str r7, [tsk, #TI_PREEMPT] @ expects preempt_count == 0
1731: bl preempt_schedule_irq @ irq en/disable is done inside 2121: bl preempt_schedule_irq @ irq en/disable is done inside
174 ldr r0, [r8, #TI_FLAGS] @ get new tasks TI_FLAGS 213 ldr r0, [tsk, #TI_FLAGS] @ get new tasks TI_FLAGS
175 tst r0, #_TIF_NEED_RESCHED 214 tst r0, #_TIF_NEED_RESCHED
176 beq preempt_return @ go again 215 beq preempt_return @ go again
177 b 1b 216 b 1b
@@ -179,7 +218,7 @@ svc_preempt:
179 218
180 .align 5 219 .align 5
181__und_svc: 220__und_svc:
182 svc_entry und 221 svc_entry
183 222
184 @ 223 @
185 @ call emulation code, which returns using r9 if it has emulated 224 @ call emulation code, which returns using r9 if it has emulated
@@ -209,7 +248,7 @@ __und_svc:
209 248
210 .align 5 249 .align 5
211__pabt_svc: 250__pabt_svc:
212 svc_entry abt 251 svc_entry
213 252
214 @ 253 @
215 @ re-enable interrupts if appropriate 254 @ re-enable interrupts if appropriate
@@ -242,12 +281,8 @@ __pabt_svc:
242 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr 281 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
243 282
244 .align 5 283 .align 5
245.LCirq: 284.LCcralign:
246 .word __temp_irq 285 .word cr_alignment
247.LCund:
248 .word __temp_und
249.LCabt:
250 .word __temp_abt
251#ifdef MULTI_ABORT 286#ifdef MULTI_ABORT
252.LCprocfns: 287.LCprocfns:
253 .word processor 288 .word processor
@@ -262,14 +297,18 @@ __pabt_svc:
262/* 297/*
263 * User mode handlers 298 * User mode handlers
264 */ 299 */
265 .macro usr_entry, sym 300 .macro usr_entry
266 sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go 301 sub sp, sp, #S_FRAME_SIZE
267 stmia sp, {r0 - r12} @ save r0 - r12 302 stmib sp, {r1 - r12}
268 ldr r7, .LC\sym 303
269 add r5, sp, #S_PC 304 ldmia r0, {r1 - r3}
270 ldmia r7, {r2 - r4} @ Get USR pc, cpsr 305 add r0, sp, #S_PC @ here for interlock avoidance
271 306 mov r4, #-1 @ "" "" "" ""
272#if __LINUX_ARM_ARCH__ < 6 307
308 str r1, [sp] @ save the "real" r0 copied
309 @ from the exception stack
310
311#if __LINUX_ARM_ARCH__ < 6 && !defined(CONFIG_NEEDS_SYSCALL_FOR_CMPXCHG)
273 @ make sure our user space atomic helper is aborted 312 @ make sure our user space atomic helper is aborted
274 cmp r2, #VIRT_OFFSET 313 cmp r2, #VIRT_OFFSET
275 bichs r3, r3, #PSR_Z_BIT 314 bichs r3, r3, #PSR_Z_BIT
@@ -284,13 +323,13 @@ __pabt_svc:
284 @ 323 @
285 @ Also, separately save sp_usr and lr_usr 324 @ Also, separately save sp_usr and lr_usr
286 @ 325 @
287 stmia r5, {r2 - r4} 326 stmia r0, {r2 - r4}
288 stmdb r5, {sp, lr}^ 327 stmdb r0, {sp, lr}^
289 328
290 @ 329 @
291 @ Enable the alignment trap while in kernel mode 330 @ Enable the alignment trap while in kernel mode
292 @ 331 @
293 alignment_trap r7, r0, __temp_\sym 332 alignment_trap r0
294 333
295 @ 334 @
296 @ Clear FP to mark the first stack frame 335 @ Clear FP to mark the first stack frame
@@ -300,7 +339,7 @@ __pabt_svc:
300 339
301 .align 5 340 .align 5
302__dabt_usr: 341__dabt_usr:
303 usr_entry abt 342 usr_entry
304 343
305 @ 344 @
306 @ Call the processor-specific abort handler: 345 @ Call the processor-specific abort handler:
@@ -329,30 +368,23 @@ __dabt_usr:
329 368
330 .align 5 369 .align 5
331__irq_usr: 370__irq_usr:
332 usr_entry irq 371 usr_entry
333 372
373 get_thread_info tsk
334#ifdef CONFIG_PREEMPT 374#ifdef CONFIG_PREEMPT
335 get_thread_info r8 375 ldr r8, [tsk, #TI_PREEMPT] @ get preempt count
336 ldr r9, [r8, #TI_PREEMPT] @ get preempt count 376 add r7, r8, #1 @ increment it
337 add r7, r9, #1 @ increment it 377 str r7, [tsk, #TI_PREEMPT]
338 str r7, [r8, #TI_PREEMPT]
339#endif 378#endif
3401: get_irqnr_and_base r0, r6, r5, lr 379
341 movne r1, sp 380 irq_handler
342 adrne lr, 1b
343 @
344 @ routine called with r0 = irq number, r1 = struct pt_regs *
345 @
346 bne asm_do_IRQ
347#ifdef CONFIG_PREEMPT 381#ifdef CONFIG_PREEMPT
348 ldr r0, [r8, #TI_PREEMPT] 382 ldr r0, [tsk, #TI_PREEMPT]
383 str r8, [tsk, #TI_PREEMPT]
349 teq r0, r7 384 teq r0, r7
350 str r9, [r8, #TI_PREEMPT]
351 strne r0, [r0, -r0] 385 strne r0, [r0, -r0]
352 mov tsk, r8
353#else
354 get_thread_info tsk
355#endif 386#endif
387
356 mov why, #0 388 mov why, #0
357 b ret_to_user 389 b ret_to_user
358 390
@@ -360,7 +392,7 @@ __irq_usr:
360 392
361 .align 5 393 .align 5
362__und_usr: 394__und_usr:
363 usr_entry und 395 usr_entry
364 396
365 tst r3, #PSR_T_BIT @ Thumb mode? 397 tst r3, #PSR_T_BIT @ Thumb mode?
366 bne fpundefinstr @ ignore FP 398 bne fpundefinstr @ ignore FP
@@ -476,7 +508,7 @@ fpundefinstr:
476 508
477 .align 5 509 .align 5
478__pabt_usr: 510__pabt_usr:
479 usr_entry abt 511 usr_entry
480 512
481 enable_irq @ Enable interrupts 513 enable_irq @ Enable interrupts
482 mov r0, r2 @ address (pc) 514 mov r0, r2 @ address (pc)
@@ -616,11 +648,17 @@ __kuser_helper_start:
616 648
617__kuser_cmpxchg: @ 0xffff0fc0 649__kuser_cmpxchg: @ 0xffff0fc0
618 650
619#if __LINUX_ARM_ARCH__ < 6 651#if defined(CONFIG_NEEDS_SYSCALL_FOR_CMPXCHG)
620 652
621#ifdef CONFIG_SMP /* sanity check */ 653 /*
622#error "CONFIG_SMP on a machine supporting pre-ARMv6 processors?" 654 * Poor you. No fast solution possible...
623#endif 655 * The kernel itself must perform the operation.
656 * A special ghost syscall is used for that (see traps.c).
657 */
658 swi #0x9ffff0
659 mov pc, lr
660
661#elif __LINUX_ARM_ARCH__ < 6
624 662
625 /* 663 /*
626 * Theory of operation: 664 * Theory of operation:
@@ -735,29 +773,41 @@ __kuser_helper_end:
735 * 773 *
736 * Common stub entry macro: 774 * Common stub entry macro:
737 * Enter in IRQ mode, spsr = SVC/USR CPSR, lr = SVC/USR PC 775 * Enter in IRQ mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
776 *
777 * SP points to a minimal amount of processor-private memory, the address
778 * of which is copied into r0 for the mode specific abort handler.
738 */ 779 */
739 .macro vector_stub, name, sym, correction=0 780 .macro vector_stub, name, correction=0
740 .align 5 781 .align 5
741 782
742vector_\name: 783vector_\name:
743 ldr r13, .LCs\sym
744 .if \correction 784 .if \correction
745 sub lr, lr, #\correction 785 sub lr, lr, #\correction
746 .endif 786 .endif
747 str lr, [r13] @ save lr_IRQ 787
788 @
789 @ Save r0, lr_<exception> (parent PC) and spsr_<exception>
790 @ (parent CPSR)
791 @
792 stmia sp, {r0, lr} @ save r0, lr
748 mrs lr, spsr 793 mrs lr, spsr
749 str lr, [r13, #4] @ save spsr_IRQ 794 str lr, [sp, #8] @ save spsr
795
750 @ 796 @
751 @ now branch to the relevant MODE handling routine 797 @ Prepare for SVC32 mode. IRQs remain disabled.
752 @ 798 @
753 mrs r13, cpsr 799 mrs r0, cpsr
754 bic r13, r13, #MODE_MASK 800 bic r0, r0, #MODE_MASK
755 orr r13, r13, #SVC_MODE 801 orr r0, r0, #SVC_MODE
756 msr spsr_cxsf, r13 @ switch to SVC_32 mode 802 msr spsr_cxsf, r0
757 803
758 and lr, lr, #15 804 @
805 @ the branch table must immediately follow this code
806 @
807 mov r0, sp
808 and lr, lr, #0x0f
759 ldr lr, [pc, lr, lsl #2] 809 ldr lr, [pc, lr, lsl #2]
760 movs pc, lr @ Changes mode and branches 810 movs pc, lr @ branch to handler in SVC mode
761 .endm 811 .endm
762 812
763 .globl __stubs_start 813 .globl __stubs_start
@@ -765,7 +815,7 @@ __stubs_start:
765/* 815/*
766 * Interrupt dispatcher 816 * Interrupt dispatcher
767 */ 817 */
768 vector_stub irq, irq, 4 818 vector_stub irq, 4
769 819
770 .long __irq_usr @ 0 (USR_26 / USR_32) 820 .long __irq_usr @ 0 (USR_26 / USR_32)
771 .long __irq_invalid @ 1 (FIQ_26 / FIQ_32) 821 .long __irq_invalid @ 1 (FIQ_26 / FIQ_32)
@@ -788,7 +838,7 @@ __stubs_start:
788 * Data abort dispatcher 838 * Data abort dispatcher
789 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC 839 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC
790 */ 840 */
791 vector_stub dabt, abt, 8 841 vector_stub dabt, 8
792 842
793 .long __dabt_usr @ 0 (USR_26 / USR_32) 843 .long __dabt_usr @ 0 (USR_26 / USR_32)
794 .long __dabt_invalid @ 1 (FIQ_26 / FIQ_32) 844 .long __dabt_invalid @ 1 (FIQ_26 / FIQ_32)
@@ -811,7 +861,7 @@ __stubs_start:
811 * Prefetch abort dispatcher 861 * Prefetch abort dispatcher
812 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC 862 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC
813 */ 863 */
814 vector_stub pabt, abt, 4 864 vector_stub pabt, 4
815 865
816 .long __pabt_usr @ 0 (USR_26 / USR_32) 866 .long __pabt_usr @ 0 (USR_26 / USR_32)
817 .long __pabt_invalid @ 1 (FIQ_26 / FIQ_32) 867 .long __pabt_invalid @ 1 (FIQ_26 / FIQ_32)
@@ -834,7 +884,7 @@ __stubs_start:
834 * Undef instr entry dispatcher 884 * Undef instr entry dispatcher
835 * Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC 885 * Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
836 */ 886 */
837 vector_stub und, und 887 vector_stub und
838 888
839 .long __und_usr @ 0 (USR_26 / USR_32) 889 .long __und_usr @ 0 (USR_26 / USR_32)
840 .long __und_invalid @ 1 (FIQ_26 / FIQ_32) 890 .long __und_invalid @ 1 (FIQ_26 / FIQ_32)
@@ -888,13 +938,6 @@ vector_addrexcptn:
888.LCvswi: 938.LCvswi:
889 .word vector_swi 939 .word vector_swi
890 940
891.LCsirq:
892 .word __temp_irq
893.LCsund:
894 .word __temp_und
895.LCsabt:
896 .word __temp_abt
897
898 .globl __stubs_end 941 .globl __stubs_end
899__stubs_end: 942__stubs_end:
900 943
@@ -916,23 +959,6 @@ __vectors_end:
916 959
917 .data 960 .data
918 961
919/*
920 * Do not reorder these, and do not insert extra data between...
921 */
922
923__temp_irq:
924 .word 0 @ saved lr_irq
925 .word 0 @ saved spsr_irq
926 .word -1 @ old_r0
927__temp_und:
928 .word 0 @ Saved lr_und
929 .word 0 @ Saved spsr_und
930 .word -1 @ old_r0
931__temp_abt:
932 .word 0 @ Saved lr_abt
933 .word 0 @ Saved spsr_abt
934 .word -1 @ old_r0
935
936 .globl cr_alignment 962 .globl cr_alignment
937 .globl cr_no_alignment 963 .globl cr_no_alignment
938cr_alignment: 964cr_alignment: