aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/kernel/entry-header.S
diff options
context:
space:
mode:
authorCatalin Marinas <catalin.marinas@arm.com>2009-09-18 18:27:05 -0400
committerCatalin Marinas <catalin.marinas@arm.com>2009-09-18 18:30:11 -0400
commit200b812d0084f800bc52465e273b118ff5f8141f (patch)
tree6dbef78960ad7b83a76df064751275913583068c /arch/arm/kernel/entry-header.S
parentdf58bee21ed218cb7dfb561a590b1bd2a99531cf (diff)
Clear the exclusive monitor when returning from an exception
The patch adds a CLREX or dummy STREX to the exception return path. This is needed because several atomic/locking operations use a pair of LDREX/STREXEQ and the EQ condition may not always be satisfied. This would leave the exclusive monitor status set and may cause problems with atomic/locking operations in the interrupted code. With this patch, the atomic_set() operation can be a simple STR instruction (on SMP systems, the global exclusive monitor is cleared by STR anyway). Clearing the exclusive monitor during context switch is no longer needed as this is handled by the exception return path anyway. Signed-off-by: Catalin Marinas <catalin.marinas@arm.com> Reported-by: Jamie Lokier <jamie@shareable.org>
Diffstat (limited to 'arch/arm/kernel/entry-header.S')
-rw-r--r--arch/arm/kernel/entry-header.S14
1 files changed, 14 insertions, 0 deletions
diff --git a/arch/arm/kernel/entry-header.S b/arch/arm/kernel/entry-header.S
index a4eaf4f920c5..e17e3c30d957 100644
--- a/arch/arm/kernel/entry-header.S
+++ b/arch/arm/kernel/entry-header.S
@@ -76,13 +76,25 @@
76#ifndef CONFIG_THUMB2_KERNEL 76#ifndef CONFIG_THUMB2_KERNEL
77 .macro svc_exit, rpsr 77 .macro svc_exit, rpsr
78 msr spsr_cxsf, \rpsr 78 msr spsr_cxsf, \rpsr
79#if defined(CONFIG_CPU_32v6K)
80 clrex @ clear the exclusive monitor
79 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr 81 ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
82#elif defined (CONFIG_CPU_V6)
83 ldr r0, [sp]
84 strex r1, r2, [sp] @ clear the exclusive monitor
85 ldmib sp, {r1 - pc}^ @ load r1 - pc, cpsr
86#endif
80 .endm 87 .endm
81 88
82 .macro restore_user_regs, fast = 0, offset = 0 89 .macro restore_user_regs, fast = 0, offset = 0
83 ldr r1, [sp, #\offset + S_PSR] @ get calling cpsr 90 ldr r1, [sp, #\offset + S_PSR] @ get calling cpsr
84 ldr lr, [sp, #\offset + S_PC]! @ get pc 91 ldr lr, [sp, #\offset + S_PC]! @ get pc
85 msr spsr_cxsf, r1 @ save in spsr_svc 92 msr spsr_cxsf, r1 @ save in spsr_svc
93#if defined(CONFIG_CPU_32v6K)
94 clrex @ clear the exclusive monitor
95#elif defined (CONFIG_CPU_V6)
96 strex r1, r2, [sp] @ clear the exclusive monitor
97#endif
86 .if \fast 98 .if \fast
87 ldmdb sp, {r1 - lr}^ @ get calling r1 - lr 99 ldmdb sp, {r1 - lr}^ @ get calling r1 - lr
88 .else 100 .else
@@ -98,6 +110,7 @@
98 .endm 110 .endm
99#else /* CONFIG_THUMB2_KERNEL */ 111#else /* CONFIG_THUMB2_KERNEL */
100 .macro svc_exit, rpsr 112 .macro svc_exit, rpsr
113 clrex @ clear the exclusive monitor
101 ldr r0, [sp, #S_SP] @ top of the stack 114 ldr r0, [sp, #S_SP] @ top of the stack
102 ldr r1, [sp, #S_PC] @ return address 115 ldr r1, [sp, #S_PC] @ return address
103 tst r0, #4 @ orig stack 8-byte aligned? 116 tst r0, #4 @ orig stack 8-byte aligned?
@@ -110,6 +123,7 @@
110 .endm 123 .endm
111 124
112 .macro restore_user_regs, fast = 0, offset = 0 125 .macro restore_user_regs, fast = 0, offset = 0
126 clrex @ clear the exclusive monitor
113 mov r2, sp 127 mov r2, sp
114 load_user_sp_lr r2, r3, \offset + S_SP @ calling sp, lr 128 load_user_sp_lr r2, r3, \offset + S_SP @ calling sp, lr
115 ldr r1, [sp, #\offset + S_PSR] @ get calling cpsr 129 ldr r1, [sp, #\offset + S_PSR] @ get calling cpsr