aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm
diff options
context:
space:
mode:
authorNicolas Pitre <nico@cam.org>2005-11-06 09:42:37 -0500
committerRussell King <rmk+kernel@arm.linux.org.uk>2005-11-06 09:42:37 -0500
commitb7ec479553b8755dd95ee988a957cbf2aef351dc (patch)
treeb2066a113c7e0bb1546564038c692f6f423315bf /arch/arm
parent7240f1f183f085f6b7af44ec274b5b6123dfdead (diff)
[ARM] 3115/1: small optimizations to exception vector entry code
Patch from Nicolas Pitre Since we know the value of cpsr on entry, we can replace the bic+orr with a single eor. Also remove a possible result delay (at least on XScale). Signed-off-by: Nicolas Pitre <nico@cam.org> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm')
-rw-r--r--arch/arm/kernel/entry-armv.S15
1 files changed, 7 insertions, 8 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S
index be439cab92c6..a511ec5b11a3 100644
--- a/arch/arm/kernel/entry-armv.S
+++ b/arch/arm/kernel/entry-armv.S
@@ -785,7 +785,7 @@ __kuser_helper_end:
785 * SP points to a minimal amount of processor-private memory, the address 785 * SP points to a minimal amount of processor-private memory, the address
786 * of which is copied into r0 for the mode specific abort handler. 786 * of which is copied into r0 for the mode specific abort handler.
787 */ 787 */
788 .macro vector_stub, name, correction=0 788 .macro vector_stub, name, mode, correction=0
789 .align 5 789 .align 5
790 790
791vector_\name: 791vector_\name:
@@ -805,15 +805,14 @@ vector_\name:
805 @ Prepare for SVC32 mode. IRQs remain disabled. 805 @ Prepare for SVC32 mode. IRQs remain disabled.
806 @ 806 @
807 mrs r0, cpsr 807 mrs r0, cpsr
808 bic r0, r0, #MODE_MASK 808 eor r0, r0, #(\mode ^ SVC_MODE)
809 orr r0, r0, #SVC_MODE
810 msr spsr_cxsf, r0 809 msr spsr_cxsf, r0
811 810
812 @ 811 @
813 @ the branch table must immediately follow this code 812 @ the branch table must immediately follow this code
814 @ 813 @
815 mov r0, sp
816 and lr, lr, #0x0f 814 and lr, lr, #0x0f
815 mov r0, sp
817 ldr lr, [pc, lr, lsl #2] 816 ldr lr, [pc, lr, lsl #2]
818 movs pc, lr @ branch to handler in SVC mode 817 movs pc, lr @ branch to handler in SVC mode
819 .endm 818 .endm
@@ -823,7 +822,7 @@ __stubs_start:
823/* 822/*
824 * Interrupt dispatcher 823 * Interrupt dispatcher
825 */ 824 */
826 vector_stub irq, 4 825 vector_stub irq, IRQ_MODE, 4
827 826
828 .long __irq_usr @ 0 (USR_26 / USR_32) 827 .long __irq_usr @ 0 (USR_26 / USR_32)
829 .long __irq_invalid @ 1 (FIQ_26 / FIQ_32) 828 .long __irq_invalid @ 1 (FIQ_26 / FIQ_32)
@@ -846,7 +845,7 @@ __stubs_start:
846 * Data abort dispatcher 845 * Data abort dispatcher
847 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC 846 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC
848 */ 847 */
849 vector_stub dabt, 8 848 vector_stub dabt, ABT_MODE, 8
850 849
851 .long __dabt_usr @ 0 (USR_26 / USR_32) 850 .long __dabt_usr @ 0 (USR_26 / USR_32)
852 .long __dabt_invalid @ 1 (FIQ_26 / FIQ_32) 851 .long __dabt_invalid @ 1 (FIQ_26 / FIQ_32)
@@ -869,7 +868,7 @@ __stubs_start:
869 * Prefetch abort dispatcher 868 * Prefetch abort dispatcher
870 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC 869 * Enter in ABT mode, spsr = USR CPSR, lr = USR PC
871 */ 870 */
872 vector_stub pabt, 4 871 vector_stub pabt, ABT_MODE, 4
873 872
874 .long __pabt_usr @ 0 (USR_26 / USR_32) 873 .long __pabt_usr @ 0 (USR_26 / USR_32)
875 .long __pabt_invalid @ 1 (FIQ_26 / FIQ_32) 874 .long __pabt_invalid @ 1 (FIQ_26 / FIQ_32)
@@ -892,7 +891,7 @@ __stubs_start:
892 * Undef instr entry dispatcher 891 * Undef instr entry dispatcher
893 * Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC 892 * Enter in UND mode, spsr = SVC/USR CPSR, lr = SVC/USR PC
894 */ 893 */
895 vector_stub und 894 vector_stub und, UND_MODE
896 895
897 .long __und_usr @ 0 (USR_26 / USR_32) 896 .long __und_usr @ 0 (USR_26 / USR_32)
898 .long __und_invalid @ 1 (FIQ_26 / FIQ_32) 897 .long __und_invalid @ 1 (FIQ_26 / FIQ_32)