diff options
Diffstat (limited to 'arch/arm/kernel')
-rw-r--r-- | arch/arm/kernel/entry-armv.S | 7 | ||||
-rw-r--r-- | arch/arm/kernel/entry-header.S | 14 |
2 files changed, 14 insertions, 7 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S index 3d727a8a23bc..a332bc7225bf 100644 --- a/arch/arm/kernel/entry-armv.S +++ b/arch/arm/kernel/entry-armv.S | |||
@@ -734,13 +734,6 @@ ENTRY(__switch_to) | |||
734 | #ifdef CONFIG_MMU | 734 | #ifdef CONFIG_MMU |
735 | ldr r6, [r2, #TI_CPU_DOMAIN] | 735 | ldr r6, [r2, #TI_CPU_DOMAIN] |
736 | #endif | 736 | #endif |
737 | #if __LINUX_ARM_ARCH__ >= 6 | ||
738 | #ifdef CONFIG_CPU_32v6K | ||
739 | clrex | ||
740 | #else | ||
741 | strex r5, r4, [ip] @ Clear exclusive monitor | ||
742 | #endif | ||
743 | #endif | ||
744 | #if defined(CONFIG_HAS_TLS_REG) | 737 | #if defined(CONFIG_HAS_TLS_REG) |
745 | mcr p15, 0, r3, c13, c0, 3 @ set TLS register | 738 | mcr p15, 0, r3, c13, c0, 3 @ set TLS register |
746 | #elif !defined(CONFIG_TLS_REG_EMUL) | 739 | #elif !defined(CONFIG_TLS_REG_EMUL) |
diff --git a/arch/arm/kernel/entry-header.S b/arch/arm/kernel/entry-header.S index a4eaf4f920c5..e17e3c30d957 100644 --- a/arch/arm/kernel/entry-header.S +++ b/arch/arm/kernel/entry-header.S | |||
@@ -76,13 +76,25 @@ | |||
76 | #ifndef CONFIG_THUMB2_KERNEL | 76 | #ifndef CONFIG_THUMB2_KERNEL |
77 | .macro svc_exit, rpsr | 77 | .macro svc_exit, rpsr |
78 | msr spsr_cxsf, \rpsr | 78 | msr spsr_cxsf, \rpsr |
79 | #if defined(CONFIG_CPU_32v6K) | ||
80 | clrex @ clear the exclusive monitor | ||
79 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr | 81 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr |
82 | #elif defined (CONFIG_CPU_V6) | ||
83 | ldr r0, [sp] | ||
84 | strex r1, r2, [sp] @ clear the exclusive monitor | ||
85 | ldmib sp, {r1 - pc}^ @ load r1 - pc, cpsr | ||
86 | #endif | ||
80 | .endm | 87 | .endm |
81 | 88 | ||
82 | .macro restore_user_regs, fast = 0, offset = 0 | 89 | .macro restore_user_regs, fast = 0, offset = 0 |
83 | ldr r1, [sp, #\offset + S_PSR] @ get calling cpsr | 90 | ldr r1, [sp, #\offset + S_PSR] @ get calling cpsr |
84 | ldr lr, [sp, #\offset + S_PC]! @ get pc | 91 | ldr lr, [sp, #\offset + S_PC]! @ get pc |
85 | msr spsr_cxsf, r1 @ save in spsr_svc | 92 | msr spsr_cxsf, r1 @ save in spsr_svc |
93 | #if defined(CONFIG_CPU_32v6K) | ||
94 | clrex @ clear the exclusive monitor | ||
95 | #elif defined (CONFIG_CPU_V6) | ||
96 | strex r1, r2, [sp] @ clear the exclusive monitor | ||
97 | #endif | ||
86 | .if \fast | 98 | .if \fast |
87 | ldmdb sp, {r1 - lr}^ @ get calling r1 - lr | 99 | ldmdb sp, {r1 - lr}^ @ get calling r1 - lr |
88 | .else | 100 | .else |
@@ -98,6 +110,7 @@ | |||
98 | .endm | 110 | .endm |
99 | #else /* CONFIG_THUMB2_KERNEL */ | 111 | #else /* CONFIG_THUMB2_KERNEL */ |
100 | .macro svc_exit, rpsr | 112 | .macro svc_exit, rpsr |
113 | clrex @ clear the exclusive monitor | ||
101 | ldr r0, [sp, #S_SP] @ top of the stack | 114 | ldr r0, [sp, #S_SP] @ top of the stack |
102 | ldr r1, [sp, #S_PC] @ return address | 115 | ldr r1, [sp, #S_PC] @ return address |
103 | tst r0, #4 @ orig stack 8-byte aligned? | 116 | tst r0, #4 @ orig stack 8-byte aligned? |
@@ -110,6 +123,7 @@ | |||
110 | .endm | 123 | .endm |
111 | 124 | ||
112 | .macro restore_user_regs, fast = 0, offset = 0 | 125 | .macro restore_user_regs, fast = 0, offset = 0 |
126 | clrex @ clear the exclusive monitor | ||
113 | mov r2, sp | 127 | mov r2, sp |
114 | load_user_sp_lr r2, r3, \offset + S_SP @ calling sp, lr | 128 | load_user_sp_lr r2, r3, \offset + S_SP @ calling sp, lr |
115 | ldr r1, [sp, #\offset + S_PSR] @ get calling cpsr | 129 | ldr r1, [sp, #\offset + S_PSR] @ get calling cpsr |