diff options
Diffstat (limited to 'arch/arm/kernel/entry-header.S')
-rw-r--r-- | arch/arm/kernel/entry-header.S | 124 |
1 files changed, 124 insertions, 0 deletions
diff --git a/arch/arm/kernel/entry-header.S b/arch/arm/kernel/entry-header.S index 160f3376ba6d..de23a9beed13 100644 --- a/arch/arm/kernel/entry-header.S +++ b/arch/arm/kernel/entry-header.S | |||
@@ -5,6 +5,7 @@ | |||
5 | #include <asm/asm-offsets.h> | 5 | #include <asm/asm-offsets.h> |
6 | #include <asm/errno.h> | 6 | #include <asm/errno.h> |
7 | #include <asm/thread_info.h> | 7 | #include <asm/thread_info.h> |
8 | #include <asm/v7m.h> | ||
8 | 9 | ||
9 | @ Bad Abort numbers | 10 | @ Bad Abort numbers |
10 | @ ----------------- | 11 | @ ----------------- |
@@ -44,6 +45,116 @@ | |||
44 | #endif | 45 | #endif |
45 | .endm | 46 | .endm |
46 | 47 | ||
48 | #ifdef CONFIG_CPU_V7M | ||
49 | /* | ||
50 | * ARMv7-M exception entry/exit macros. | ||
51 | * | ||
52 | * xPSR, ReturnAddress(), LR (R14), R12, R3, R2, R1, and R0 are | ||
53 | * automatically saved on the current stack (32 words) before | ||
54 | * switching to the exception stack (SP_main). | ||
55 | * | ||
56 | * If exception is taken while in user mode, SP_main is | ||
57 | * empty. Otherwise, SP_main is aligned to 64 bit automatically | ||
58 | * (CCR.STKALIGN set). | ||
59 | * | ||
60 | * Linux assumes that the interrupts are disabled when entering an | ||
61 | * exception handler and it may BUG if this is not the case. Interrupts | ||
62 | * are disabled during entry and reenabled in the exit macro. | ||
63 | * | ||
64 | * v7m_exception_slow_exit is used when returning from SVC or PendSV. | ||
65 | * When returning to kernel mode, we don't return from exception. | ||
66 | */ | ||
67 | .macro v7m_exception_entry | ||
68 | @ determine the location of the registers saved by the core during | ||
69 | @ exception entry. Depending on the mode the cpu was in when the | ||
70 | @ exception happend that is either on the main or the process stack. | ||
71 | @ Bit 2 of EXC_RETURN stored in the lr register specifies which stack | ||
72 | @ was used. | ||
73 | tst lr, #EXC_RET_STACK_MASK | ||
74 | mrsne r12, psp | ||
75 | moveq r12, sp | ||
76 | |||
77 | @ we cannot rely on r0-r3 and r12 matching the value saved in the | ||
78 | @ exception frame because of tail-chaining. So these have to be | ||
79 | @ reloaded. | ||
80 | ldmia r12!, {r0-r3} | ||
81 | |||
82 | @ Linux expects to have irqs off. Do it here before taking stack space | ||
83 | cpsid i | ||
84 | |||
85 | sub sp, #S_FRAME_SIZE-S_IP | ||
86 | stmdb sp!, {r0-r11} | ||
87 | |||
88 | @ load saved r12, lr, return address and xPSR. | ||
89 | @ r0-r7 are used for signals and never touched from now on. Clobbering | ||
90 | @ r8-r12 is OK. | ||
91 | mov r9, r12 | ||
92 | ldmia r9!, {r8, r10-r12} | ||
93 | |||
94 | @ calculate the original stack pointer value. | ||
95 | @ r9 currently points to the memory location just above the auto saved | ||
96 | @ xPSR. | ||
97 | @ The cpu might automatically 8-byte align the stack. Bit 9 | ||
98 | @ of the saved xPSR specifies if stack aligning took place. In this case | ||
99 | @ another 32-bit value is included in the stack. | ||
100 | |||
101 | tst r12, V7M_xPSR_FRAMEPTRALIGN | ||
102 | addne r9, r9, #4 | ||
103 | |||
104 | @ store saved r12 using str to have a register to hold the base for stm | ||
105 | str r8, [sp, #S_IP] | ||
106 | add r8, sp, #S_SP | ||
107 | @ store r13-r15, xPSR | ||
108 | stmia r8!, {r9-r12} | ||
109 | @ store old_r0 | ||
110 | str r0, [r8] | ||
111 | .endm | ||
112 | |||
113 | /* | ||
114 | * PENDSV and SVCALL are configured to have the same exception | ||
115 | * priorities. As a kernel thread runs at SVCALL execution priority it | ||
116 | * can never be preempted and so we will never have to return to a | ||
117 | * kernel thread here. | ||
118 | */ | ||
119 | .macro v7m_exception_slow_exit ret_r0 | ||
120 | cpsid i | ||
121 | ldr lr, =EXC_RET_THREADMODE_PROCESSSTACK | ||
122 | |||
123 | @ read original r12, sp, lr, pc and xPSR | ||
124 | add r12, sp, #S_IP | ||
125 | ldmia r12, {r1-r5} | ||
126 | |||
127 | @ an exception frame is always 8-byte aligned. To tell the hardware if | ||
128 | @ the sp to be restored is aligned or not set bit 9 of the saved xPSR | ||
129 | @ accordingly. | ||
130 | tst r2, #4 | ||
131 | subne r2, r2, #4 | ||
132 | orrne r5, V7M_xPSR_FRAMEPTRALIGN | ||
133 | biceq r5, V7M_xPSR_FRAMEPTRALIGN | ||
134 | |||
135 | @ write basic exception frame | ||
136 | stmdb r2!, {r1, r3-r5} | ||
137 | ldmia sp, {r1, r3-r5} | ||
138 | .if \ret_r0 | ||
139 | stmdb r2!, {r0, r3-r5} | ||
140 | .else | ||
141 | stmdb r2!, {r1, r3-r5} | ||
142 | .endif | ||
143 | |||
144 | @ restore process sp | ||
145 | msr psp, r2 | ||
146 | |||
147 | @ restore original r4-r11 | ||
148 | ldmia sp!, {r0-r11} | ||
149 | |||
150 | @ restore main sp | ||
151 | add sp, sp, #S_FRAME_SIZE-S_IP | ||
152 | |||
153 | cpsie i | ||
154 | bx lr | ||
155 | .endm | ||
156 | #endif /* CONFIG_CPU_V7M */ | ||
157 | |||
47 | @ | 158 | @ |
48 | @ Store/load the USER SP and LR registers by switching to the SYS | 159 | @ Store/load the USER SP and LR registers by switching to the SYS |
49 | @ mode. Useful in Thumb-2 mode where "stm/ldm rd, {sp, lr}^" is not | 160 | @ mode. Useful in Thumb-2 mode where "stm/ldm rd, {sp, lr}^" is not |
@@ -165,6 +276,18 @@ | |||
165 | rfeia sp! | 276 | rfeia sp! |
166 | .endm | 277 | .endm |
167 | 278 | ||
279 | #ifdef CONFIG_CPU_V7M | ||
280 | /* | ||
281 | * Note we don't need to do clrex here as clearing the local monitor is | ||
282 | * part of each exception entry and exit sequence. | ||
283 | */ | ||
284 | .macro restore_user_regs, fast = 0, offset = 0 | ||
285 | .if \offset | ||
286 | add sp, #\offset | ||
287 | .endif | ||
288 | v7m_exception_slow_exit ret_r0 = \fast | ||
289 | .endm | ||
290 | #else /* ifdef CONFIG_CPU_V7M */ | ||
168 | .macro restore_user_regs, fast = 0, offset = 0 | 291 | .macro restore_user_regs, fast = 0, offset = 0 |
169 | clrex @ clear the exclusive monitor | 292 | clrex @ clear the exclusive monitor |
170 | mov r2, sp | 293 | mov r2, sp |
@@ -181,6 +304,7 @@ | |||
181 | add sp, sp, #S_FRAME_SIZE - S_SP | 304 | add sp, sp, #S_FRAME_SIZE - S_SP |
182 | movs pc, lr @ return & move spsr_svc into cpsr | 305 | movs pc, lr @ return & move spsr_svc into cpsr |
183 | .endm | 306 | .endm |
307 | #endif /* ifdef CONFIG_CPU_V7M / else */ | ||
184 | 308 | ||
185 | .macro get_thread_info, rd | 309 | .macro get_thread_info, rd |
186 | mov \rd, sp | 310 | mov \rd, sp |