diff options
Diffstat (limited to 'arch/arm/kernel/entry-armv.S')
-rw-r--r-- | arch/arm/kernel/entry-armv.S | 179 |
1 files changed, 106 insertions, 73 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S index fc8af43c5000..3d727a8a23bc 100644 --- a/arch/arm/kernel/entry-armv.S +++ b/arch/arm/kernel/entry-armv.S | |||
@@ -34,7 +34,7 @@ | |||
34 | @ | 34 | @ |
35 | @ routine called with r0 = irq number, r1 = struct pt_regs * | 35 | @ routine called with r0 = irq number, r1 = struct pt_regs * |
36 | @ | 36 | @ |
37 | adrne lr, 1b | 37 | adrne lr, BSYM(1b) |
38 | bne asm_do_IRQ | 38 | bne asm_do_IRQ |
39 | 39 | ||
40 | #ifdef CONFIG_SMP | 40 | #ifdef CONFIG_SMP |
@@ -46,13 +46,13 @@ | |||
46 | */ | 46 | */ |
47 | test_for_ipi r0, r6, r5, lr | 47 | test_for_ipi r0, r6, r5, lr |
48 | movne r0, sp | 48 | movne r0, sp |
49 | adrne lr, 1b | 49 | adrne lr, BSYM(1b) |
50 | bne do_IPI | 50 | bne do_IPI |
51 | 51 | ||
52 | #ifdef CONFIG_LOCAL_TIMERS | 52 | #ifdef CONFIG_LOCAL_TIMERS |
53 | test_for_ltirq r0, r6, r5, lr | 53 | test_for_ltirq r0, r6, r5, lr |
54 | movne r0, sp | 54 | movne r0, sp |
55 | adrne lr, 1b | 55 | adrne lr, BSYM(1b) |
56 | bne do_local_timer | 56 | bne do_local_timer |
57 | #endif | 57 | #endif |
58 | #endif | 58 | #endif |
@@ -70,7 +70,10 @@ | |||
70 | */ | 70 | */ |
71 | .macro inv_entry, reason | 71 | .macro inv_entry, reason |
72 | sub sp, sp, #S_FRAME_SIZE | 72 | sub sp, sp, #S_FRAME_SIZE |
73 | stmib sp, {r1 - lr} | 73 | ARM( stmib sp, {r1 - lr} ) |
74 | THUMB( stmia sp, {r0 - r12} ) | ||
75 | THUMB( str sp, [sp, #S_SP] ) | ||
76 | THUMB( str lr, [sp, #S_LR] ) | ||
74 | mov r1, #\reason | 77 | mov r1, #\reason |
75 | .endm | 78 | .endm |
76 | 79 | ||
@@ -126,17 +129,24 @@ ENDPROC(__und_invalid) | |||
126 | .macro svc_entry, stack_hole=0 | 129 | .macro svc_entry, stack_hole=0 |
127 | UNWIND(.fnstart ) | 130 | UNWIND(.fnstart ) |
128 | UNWIND(.save {r0 - pc} ) | 131 | UNWIND(.save {r0 - pc} ) |
129 | sub sp, sp, #(S_FRAME_SIZE + \stack_hole) | 132 | sub sp, sp, #(S_FRAME_SIZE + \stack_hole - 4) |
133 | #ifdef CONFIG_THUMB2_KERNEL | ||
134 | SPFIX( str r0, [sp] ) @ temporarily saved | ||
135 | SPFIX( mov r0, sp ) | ||
136 | SPFIX( tst r0, #4 ) @ test original stack alignment | ||
137 | SPFIX( ldr r0, [sp] ) @ restored | ||
138 | #else | ||
130 | SPFIX( tst sp, #4 ) | 139 | SPFIX( tst sp, #4 ) |
131 | SPFIX( bicne sp, sp, #4 ) | 140 | #endif |
132 | stmib sp, {r1 - r12} | 141 | SPFIX( subeq sp, sp, #4 ) |
142 | stmia sp, {r1 - r12} | ||
133 | 143 | ||
134 | ldmia r0, {r1 - r3} | 144 | ldmia r0, {r1 - r3} |
135 | add r5, sp, #S_SP @ here for interlock avoidance | 145 | add r5, sp, #S_SP - 4 @ here for interlock avoidance |
136 | mov r4, #-1 @ "" "" "" "" | 146 | mov r4, #-1 @ "" "" "" "" |
137 | add r0, sp, #(S_FRAME_SIZE + \stack_hole) | 147 | add r0, sp, #(S_FRAME_SIZE + \stack_hole - 4) |
138 | SPFIX( addne r0, r0, #4 ) | 148 | SPFIX( addeq r0, r0, #4 ) |
139 | str r1, [sp] @ save the "real" r0 copied | 149 | str r1, [sp, #-4]! @ save the "real" r0 copied |
140 | @ from the exception stack | 150 | @ from the exception stack |
141 | 151 | ||
142 | mov r1, lr | 152 | mov r1, lr |
@@ -151,6 +161,8 @@ ENDPROC(__und_invalid) | |||
151 | @ r4 - orig_r0 (see pt_regs definition in ptrace.h) | 161 | @ r4 - orig_r0 (see pt_regs definition in ptrace.h) |
152 | @ | 162 | @ |
153 | stmia r5, {r0 - r4} | 163 | stmia r5, {r0 - r4} |
164 | |||
165 | asm_trace_hardirqs_off | ||
154 | .endm | 166 | .endm |
155 | 167 | ||
156 | .align 5 | 168 | .align 5 |
@@ -196,9 +208,8 @@ __dabt_svc: | |||
196 | @ | 208 | @ |
197 | @ restore SPSR and restart the instruction | 209 | @ restore SPSR and restart the instruction |
198 | @ | 210 | @ |
199 | ldr r0, [sp, #S_PSR] | 211 | ldr r2, [sp, #S_PSR] |
200 | msr spsr_cxsf, r0 | 212 | svc_exit r2 @ return from exception |
201 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr | ||
202 | UNWIND(.fnend ) | 213 | UNWIND(.fnend ) |
203 | ENDPROC(__dabt_svc) | 214 | ENDPROC(__dabt_svc) |
204 | 215 | ||
@@ -206,9 +217,6 @@ ENDPROC(__dabt_svc) | |||
206 | __irq_svc: | 217 | __irq_svc: |
207 | svc_entry | 218 | svc_entry |
208 | 219 | ||
209 | #ifdef CONFIG_TRACE_IRQFLAGS | ||
210 | bl trace_hardirqs_off | ||
211 | #endif | ||
212 | #ifdef CONFIG_PREEMPT | 220 | #ifdef CONFIG_PREEMPT |
213 | get_thread_info tsk | 221 | get_thread_info tsk |
214 | ldr r8, [tsk, #TI_PREEMPT] @ get preempt count | 222 | ldr r8, [tsk, #TI_PREEMPT] @ get preempt count |
@@ -225,13 +233,12 @@ __irq_svc: | |||
225 | tst r0, #_TIF_NEED_RESCHED | 233 | tst r0, #_TIF_NEED_RESCHED |
226 | blne svc_preempt | 234 | blne svc_preempt |
227 | #endif | 235 | #endif |
228 | ldr r0, [sp, #S_PSR] @ irqs are already disabled | 236 | ldr r4, [sp, #S_PSR] @ irqs are already disabled |
229 | msr spsr_cxsf, r0 | ||
230 | #ifdef CONFIG_TRACE_IRQFLAGS | 237 | #ifdef CONFIG_TRACE_IRQFLAGS |
231 | tst r0, #PSR_I_BIT | 238 | tst r4, #PSR_I_BIT |
232 | bleq trace_hardirqs_on | 239 | bleq trace_hardirqs_on |
233 | #endif | 240 | #endif |
234 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr | 241 | svc_exit r4 @ return from exception |
235 | UNWIND(.fnend ) | 242 | UNWIND(.fnend ) |
236 | ENDPROC(__irq_svc) | 243 | ENDPROC(__irq_svc) |
237 | 244 | ||
@@ -266,7 +273,7 @@ __und_svc: | |||
266 | @ r0 - instruction | 273 | @ r0 - instruction |
267 | @ | 274 | @ |
268 | ldr r0, [r2, #-4] | 275 | ldr r0, [r2, #-4] |
269 | adr r9, 1f | 276 | adr r9, BSYM(1f) |
270 | bl call_fpe | 277 | bl call_fpe |
271 | 278 | ||
272 | mov r0, sp @ struct pt_regs *regs | 279 | mov r0, sp @ struct pt_regs *regs |
@@ -280,9 +287,8 @@ __und_svc: | |||
280 | @ | 287 | @ |
281 | @ restore SPSR and restart the instruction | 288 | @ restore SPSR and restart the instruction |
282 | @ | 289 | @ |
283 | ldr lr, [sp, #S_PSR] @ Get SVC cpsr | 290 | ldr r2, [sp, #S_PSR] @ Get SVC cpsr |
284 | msr spsr_cxsf, lr | 291 | svc_exit r2 @ return from exception |
285 | ldmia sp, {r0 - pc}^ @ Restore SVC registers | ||
286 | UNWIND(.fnend ) | 292 | UNWIND(.fnend ) |
287 | ENDPROC(__und_svc) | 293 | ENDPROC(__und_svc) |
288 | 294 | ||
@@ -323,9 +329,8 @@ __pabt_svc: | |||
323 | @ | 329 | @ |
324 | @ restore SPSR and restart the instruction | 330 | @ restore SPSR and restart the instruction |
325 | @ | 331 | @ |
326 | ldr r0, [sp, #S_PSR] | 332 | ldr r2, [sp, #S_PSR] |
327 | msr spsr_cxsf, r0 | 333 | svc_exit r2 @ return from exception |
328 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr | ||
329 | UNWIND(.fnend ) | 334 | UNWIND(.fnend ) |
330 | ENDPROC(__pabt_svc) | 335 | ENDPROC(__pabt_svc) |
331 | 336 | ||
@@ -353,7 +358,8 @@ ENDPROC(__pabt_svc) | |||
353 | UNWIND(.fnstart ) | 358 | UNWIND(.fnstart ) |
354 | UNWIND(.cantunwind ) @ don't unwind the user space | 359 | UNWIND(.cantunwind ) @ don't unwind the user space |
355 | sub sp, sp, #S_FRAME_SIZE | 360 | sub sp, sp, #S_FRAME_SIZE |
356 | stmib sp, {r1 - r12} | 361 | ARM( stmib sp, {r1 - r12} ) |
362 | THUMB( stmia sp, {r0 - r12} ) | ||
357 | 363 | ||
358 | ldmia r0, {r1 - r3} | 364 | ldmia r0, {r1 - r3} |
359 | add r0, sp, #S_PC @ here for interlock avoidance | 365 | add r0, sp, #S_PC @ here for interlock avoidance |
@@ -372,7 +378,8 @@ ENDPROC(__pabt_svc) | |||
372 | @ Also, separately save sp_usr and lr_usr | 378 | @ Also, separately save sp_usr and lr_usr |
373 | @ | 379 | @ |
374 | stmia r0, {r2 - r4} | 380 | stmia r0, {r2 - r4} |
375 | stmdb r0, {sp, lr}^ | 381 | ARM( stmdb r0, {sp, lr}^ ) |
382 | THUMB( store_user_sp_lr r0, r1, S_SP - S_PC ) | ||
376 | 383 | ||
377 | @ | 384 | @ |
378 | @ Enable the alignment trap while in kernel mode | 385 | @ Enable the alignment trap while in kernel mode |
@@ -383,6 +390,8 @@ ENDPROC(__pabt_svc) | |||
383 | @ Clear FP to mark the first stack frame | 390 | @ Clear FP to mark the first stack frame |
384 | @ | 391 | @ |
385 | zero_fp | 392 | zero_fp |
393 | |||
394 | asm_trace_hardirqs_off | ||
386 | .endm | 395 | .endm |
387 | 396 | ||
388 | .macro kuser_cmpxchg_check | 397 | .macro kuser_cmpxchg_check |
@@ -427,7 +436,7 @@ __dabt_usr: | |||
427 | @ | 436 | @ |
428 | enable_irq | 437 | enable_irq |
429 | mov r2, sp | 438 | mov r2, sp |
430 | adr lr, ret_from_exception | 439 | adr lr, BSYM(ret_from_exception) |
431 | b do_DataAbort | 440 | b do_DataAbort |
432 | UNWIND(.fnend ) | 441 | UNWIND(.fnend ) |
433 | ENDPROC(__dabt_usr) | 442 | ENDPROC(__dabt_usr) |
@@ -437,9 +446,6 @@ __irq_usr: | |||
437 | usr_entry | 446 | usr_entry |
438 | kuser_cmpxchg_check | 447 | kuser_cmpxchg_check |
439 | 448 | ||
440 | #ifdef CONFIG_TRACE_IRQFLAGS | ||
441 | bl trace_hardirqs_off | ||
442 | #endif | ||
443 | get_thread_info tsk | 449 | get_thread_info tsk |
444 | #ifdef CONFIG_PREEMPT | 450 | #ifdef CONFIG_PREEMPT |
445 | ldr r8, [tsk, #TI_PREEMPT] @ get preempt count | 451 | ldr r8, [tsk, #TI_PREEMPT] @ get preempt count |
@@ -452,7 +458,9 @@ __irq_usr: | |||
452 | ldr r0, [tsk, #TI_PREEMPT] | 458 | ldr r0, [tsk, #TI_PREEMPT] |
453 | str r8, [tsk, #TI_PREEMPT] | 459 | str r8, [tsk, #TI_PREEMPT] |
454 | teq r0, r7 | 460 | teq r0, r7 |
455 | strne r0, [r0, -r0] | 461 | ARM( strne r0, [r0, -r0] ) |
462 | THUMB( movne r0, #0 ) | ||
463 | THUMB( strne r0, [r0] ) | ||
456 | #endif | 464 | #endif |
457 | #ifdef CONFIG_TRACE_IRQFLAGS | 465 | #ifdef CONFIG_TRACE_IRQFLAGS |
458 | bl trace_hardirqs_on | 466 | bl trace_hardirqs_on |
@@ -476,9 +484,10 @@ __und_usr: | |||
476 | @ | 484 | @ |
477 | @ r0 - instruction | 485 | @ r0 - instruction |
478 | @ | 486 | @ |
479 | adr r9, ret_from_exception | 487 | adr r9, BSYM(ret_from_exception) |
480 | adr lr, __und_usr_unknown | 488 | adr lr, BSYM(__und_usr_unknown) |
481 | tst r3, #PSR_T_BIT @ Thumb mode? | 489 | tst r3, #PSR_T_BIT @ Thumb mode? |
490 | itet eq @ explicit IT needed for the 1f label | ||
482 | subeq r4, r2, #4 @ ARM instr at LR - 4 | 491 | subeq r4, r2, #4 @ ARM instr at LR - 4 |
483 | subne r4, r2, #2 @ Thumb instr at LR - 2 | 492 | subne r4, r2, #2 @ Thumb instr at LR - 2 |
484 | 1: ldreqt r0, [r4] | 493 | 1: ldreqt r0, [r4] |
@@ -488,7 +497,10 @@ __und_usr: | |||
488 | beq call_fpe | 497 | beq call_fpe |
489 | @ Thumb instruction | 498 | @ Thumb instruction |
490 | #if __LINUX_ARM_ARCH__ >= 7 | 499 | #if __LINUX_ARM_ARCH__ >= 7 |
491 | 2: ldrht r5, [r4], #2 | 500 | 2: |
501 | ARM( ldrht r5, [r4], #2 ) | ||
502 | THUMB( ldrht r5, [r4] ) | ||
503 | THUMB( add r4, r4, #2 ) | ||
492 | and r0, r5, #0xf800 @ mask bits 111x x... .... .... | 504 | and r0, r5, #0xf800 @ mask bits 111x x... .... .... |
493 | cmp r0, #0xe800 @ 32bit instruction if xx != 0 | 505 | cmp r0, #0xe800 @ 32bit instruction if xx != 0 |
494 | blo __und_usr_unknown | 506 | blo __und_usr_unknown |
@@ -577,9 +589,11 @@ call_fpe: | |||
577 | moveq pc, lr | 589 | moveq pc, lr |
578 | get_thread_info r10 @ get current thread | 590 | get_thread_info r10 @ get current thread |
579 | and r8, r0, #0x00000f00 @ mask out CP number | 591 | and r8, r0, #0x00000f00 @ mask out CP number |
592 | THUMB( lsr r8, r8, #8 ) | ||
580 | mov r7, #1 | 593 | mov r7, #1 |
581 | add r6, r10, #TI_USED_CP | 594 | add r6, r10, #TI_USED_CP |
582 | strb r7, [r6, r8, lsr #8] @ set appropriate used_cp[] | 595 | ARM( strb r7, [r6, r8, lsr #8] ) @ set appropriate used_cp[] |
596 | THUMB( strb r7, [r6, r8] ) @ set appropriate used_cp[] | ||
583 | #ifdef CONFIG_IWMMXT | 597 | #ifdef CONFIG_IWMMXT |
584 | @ Test if we need to give access to iWMMXt coprocessors | 598 | @ Test if we need to give access to iWMMXt coprocessors |
585 | ldr r5, [r10, #TI_FLAGS] | 599 | ldr r5, [r10, #TI_FLAGS] |
@@ -587,36 +601,38 @@ call_fpe: | |||
587 | movcss r7, r5, lsr #(TIF_USING_IWMMXT + 1) | 601 | movcss r7, r5, lsr #(TIF_USING_IWMMXT + 1) |
588 | bcs iwmmxt_task_enable | 602 | bcs iwmmxt_task_enable |
589 | #endif | 603 | #endif |
590 | add pc, pc, r8, lsr #6 | 604 | ARM( add pc, pc, r8, lsr #6 ) |
591 | mov r0, r0 | 605 | THUMB( lsl r8, r8, #2 ) |
592 | 606 | THUMB( add pc, r8 ) | |
593 | mov pc, lr @ CP#0 | 607 | nop |
594 | b do_fpe @ CP#1 (FPE) | 608 | |
595 | b do_fpe @ CP#2 (FPE) | 609 | W(mov) pc, lr @ CP#0 |
596 | mov pc, lr @ CP#3 | 610 | W(b) do_fpe @ CP#1 (FPE) |
611 | W(b) do_fpe @ CP#2 (FPE) | ||
612 | W(mov) pc, lr @ CP#3 | ||
597 | #ifdef CONFIG_CRUNCH | 613 | #ifdef CONFIG_CRUNCH |
598 | b crunch_task_enable @ CP#4 (MaverickCrunch) | 614 | b crunch_task_enable @ CP#4 (MaverickCrunch) |
599 | b crunch_task_enable @ CP#5 (MaverickCrunch) | 615 | b crunch_task_enable @ CP#5 (MaverickCrunch) |
600 | b crunch_task_enable @ CP#6 (MaverickCrunch) | 616 | b crunch_task_enable @ CP#6 (MaverickCrunch) |
601 | #else | 617 | #else |
602 | mov pc, lr @ CP#4 | 618 | W(mov) pc, lr @ CP#4 |
603 | mov pc, lr @ CP#5 | 619 | W(mov) pc, lr @ CP#5 |
604 | mov pc, lr @ CP#6 | 620 | W(mov) pc, lr @ CP#6 |
605 | #endif | 621 | #endif |
606 | mov pc, lr @ CP#7 | 622 | W(mov) pc, lr @ CP#7 |
607 | mov pc, lr @ CP#8 | 623 | W(mov) pc, lr @ CP#8 |
608 | mov pc, lr @ CP#9 | 624 | W(mov) pc, lr @ CP#9 |
609 | #ifdef CONFIG_VFP | 625 | #ifdef CONFIG_VFP |
610 | b do_vfp @ CP#10 (VFP) | 626 | W(b) do_vfp @ CP#10 (VFP) |
611 | b do_vfp @ CP#11 (VFP) | 627 | W(b) do_vfp @ CP#11 (VFP) |
612 | #else | 628 | #else |
613 | mov pc, lr @ CP#10 (VFP) | 629 | W(mov) pc, lr @ CP#10 (VFP) |
614 | mov pc, lr @ CP#11 (VFP) | 630 | W(mov) pc, lr @ CP#11 (VFP) |
615 | #endif | 631 | #endif |
616 | mov pc, lr @ CP#12 | 632 | W(mov) pc, lr @ CP#12 |
617 | mov pc, lr @ CP#13 | 633 | W(mov) pc, lr @ CP#13 |
618 | mov pc, lr @ CP#14 (Debug) | 634 | W(mov) pc, lr @ CP#14 (Debug) |
619 | mov pc, lr @ CP#15 (Control) | 635 | W(mov) pc, lr @ CP#15 (Control) |
620 | 636 | ||
621 | #ifdef CONFIG_NEON | 637 | #ifdef CONFIG_NEON |
622 | .align 6 | 638 | .align 6 |
@@ -667,7 +683,7 @@ no_fp: mov pc, lr | |||
667 | __und_usr_unknown: | 683 | __und_usr_unknown: |
668 | enable_irq | 684 | enable_irq |
669 | mov r0, sp | 685 | mov r0, sp |
670 | adr lr, ret_from_exception | 686 | adr lr, BSYM(ret_from_exception) |
671 | b do_undefinstr | 687 | b do_undefinstr |
672 | ENDPROC(__und_usr_unknown) | 688 | ENDPROC(__und_usr_unknown) |
673 | 689 | ||
@@ -711,7 +727,10 @@ ENTRY(__switch_to) | |||
711 | UNWIND(.cantunwind ) | 727 | UNWIND(.cantunwind ) |
712 | add ip, r1, #TI_CPU_SAVE | 728 | add ip, r1, #TI_CPU_SAVE |
713 | ldr r3, [r2, #TI_TP_VALUE] | 729 | ldr r3, [r2, #TI_TP_VALUE] |
714 | stmia ip!, {r4 - sl, fp, sp, lr} @ Store most regs on stack | 730 | ARM( stmia ip!, {r4 - sl, fp, sp, lr} ) @ Store most regs on stack |
731 | THUMB( stmia ip!, {r4 - sl, fp} ) @ Store most regs on stack | ||
732 | THUMB( str sp, [ip], #4 ) | ||
733 | THUMB( str lr, [ip], #4 ) | ||
715 | #ifdef CONFIG_MMU | 734 | #ifdef CONFIG_MMU |
716 | ldr r6, [r2, #TI_CPU_DOMAIN] | 735 | ldr r6, [r2, #TI_CPU_DOMAIN] |
717 | #endif | 736 | #endif |
@@ -736,8 +755,12 @@ ENTRY(__switch_to) | |||
736 | ldr r0, =thread_notify_head | 755 | ldr r0, =thread_notify_head |
737 | mov r1, #THREAD_NOTIFY_SWITCH | 756 | mov r1, #THREAD_NOTIFY_SWITCH |
738 | bl atomic_notifier_call_chain | 757 | bl atomic_notifier_call_chain |
758 | THUMB( mov ip, r4 ) | ||
739 | mov r0, r5 | 759 | mov r0, r5 |
740 | ldmia r4, {r4 - sl, fp, sp, pc} @ Load all regs saved previously | 760 | ARM( ldmia r4, {r4 - sl, fp, sp, pc} ) @ Load all regs saved previously |
761 | THUMB( ldmia ip!, {r4 - sl, fp} ) @ Load all regs saved previously | ||
762 | THUMB( ldr sp, [ip], #4 ) | ||
763 | THUMB( ldr pc, [ip] ) | ||
741 | UNWIND(.fnend ) | 764 | UNWIND(.fnend ) |
742 | ENDPROC(__switch_to) | 765 | ENDPROC(__switch_to) |
743 | 766 | ||
@@ -772,6 +795,7 @@ ENDPROC(__switch_to) | |||
772 | * if your compiled code is not going to use the new instructions for other | 795 | * if your compiled code is not going to use the new instructions for other |
773 | * purpose. | 796 | * purpose. |
774 | */ | 797 | */ |
798 | THUMB( .arm ) | ||
775 | 799 | ||
776 | .macro usr_ret, reg | 800 | .macro usr_ret, reg |
777 | #ifdef CONFIG_ARM_THUMB | 801 | #ifdef CONFIG_ARM_THUMB |
@@ -1020,6 +1044,7 @@ __kuser_helper_version: @ 0xffff0ffc | |||
1020 | .globl __kuser_helper_end | 1044 | .globl __kuser_helper_end |
1021 | __kuser_helper_end: | 1045 | __kuser_helper_end: |
1022 | 1046 | ||
1047 | THUMB( .thumb ) | ||
1023 | 1048 | ||
1024 | /* | 1049 | /* |
1025 | * Vector stubs. | 1050 | * Vector stubs. |
@@ -1054,17 +1079,23 @@ vector_\name: | |||
1054 | @ Prepare for SVC32 mode. IRQs remain disabled. | 1079 | @ Prepare for SVC32 mode. IRQs remain disabled. |
1055 | @ | 1080 | @ |
1056 | mrs r0, cpsr | 1081 | mrs r0, cpsr |
1057 | eor r0, r0, #(\mode ^ SVC_MODE) | 1082 | eor r0, r0, #(\mode ^ SVC_MODE | PSR_ISETSTATE) |
1058 | msr spsr_cxsf, r0 | 1083 | msr spsr_cxsf, r0 |
1059 | 1084 | ||
1060 | @ | 1085 | @ |
1061 | @ the branch table must immediately follow this code | 1086 | @ the branch table must immediately follow this code |
1062 | @ | 1087 | @ |
1063 | and lr, lr, #0x0f | 1088 | and lr, lr, #0x0f |
1089 | THUMB( adr r0, 1f ) | ||
1090 | THUMB( ldr lr, [r0, lr, lsl #2] ) | ||
1064 | mov r0, sp | 1091 | mov r0, sp |
1065 | ldr lr, [pc, lr, lsl #2] | 1092 | ARM( ldr lr, [pc, lr, lsl #2] ) |
1066 | movs pc, lr @ branch to handler in SVC mode | 1093 | movs pc, lr @ branch to handler in SVC mode |
1067 | ENDPROC(vector_\name) | 1094 | ENDPROC(vector_\name) |
1095 | |||
1096 | .align 2 | ||
1097 | @ handler addresses follow this label | ||
1098 | 1: | ||
1068 | .endm | 1099 | .endm |
1069 | 1100 | ||
1070 | .globl __stubs_start | 1101 | .globl __stubs_start |
@@ -1202,14 +1233,16 @@ __stubs_end: | |||
1202 | 1233 | ||
1203 | .globl __vectors_start | 1234 | .globl __vectors_start |
1204 | __vectors_start: | 1235 | __vectors_start: |
1205 | swi SYS_ERROR0 | 1236 | ARM( swi SYS_ERROR0 ) |
1206 | b vector_und + stubs_offset | 1237 | THUMB( svc #0 ) |
1207 | ldr pc, .LCvswi + stubs_offset | 1238 | THUMB( nop ) |
1208 | b vector_pabt + stubs_offset | 1239 | W(b) vector_und + stubs_offset |
1209 | b vector_dabt + stubs_offset | 1240 | W(ldr) pc, .LCvswi + stubs_offset |
1210 | b vector_addrexcptn + stubs_offset | 1241 | W(b) vector_pabt + stubs_offset |
1211 | b vector_irq + stubs_offset | 1242 | W(b) vector_dabt + stubs_offset |
1212 | b vector_fiq + stubs_offset | 1243 | W(b) vector_addrexcptn + stubs_offset |
1244 | W(b) vector_irq + stubs_offset | ||
1245 | W(b) vector_fiq + stubs_offset | ||
1213 | 1246 | ||
1214 | .globl __vectors_end | 1247 | .globl __vectors_end |
1215 | __vectors_end: | 1248 | __vectors_end: |