diff options
-rw-r--r-- | arch/arm/kernel/entry-armv.S | 81 |
1 files changed, 43 insertions, 38 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S index fd42e667a81b..582bb2310446 100644 --- a/arch/arm/kernel/entry-armv.S +++ b/arch/arm/kernel/entry-armv.S | |||
@@ -45,7 +45,7 @@ | |||
45 | .endm | 45 | .endm |
46 | 46 | ||
47 | .macro pabt_helper | 47 | .macro pabt_helper |
48 | mov r0, r2 @ pass address of aborted instruction. | 48 | mov r0, r4 @ pass address of aborted instruction. |
49 | #ifdef MULTI_PABORT | 49 | #ifdef MULTI_PABORT |
50 | ldr ip, .LCprocfns | 50 | ldr ip, .LCprocfns |
51 | mov lr, pc | 51 | mov lr, pc |
@@ -56,6 +56,8 @@ | |||
56 | .endm | 56 | .endm |
57 | 57 | ||
58 | .macro dabt_helper | 58 | .macro dabt_helper |
59 | mov r2, r4 | ||
60 | mov r3, r5 | ||
59 | 61 | ||
60 | @ | 62 | @ |
61 | @ Call the processor-specific abort handler: | 63 | @ Call the processor-specific abort handler: |
@@ -157,26 +159,26 @@ ENDPROC(__und_invalid) | |||
157 | SPFIX( subeq sp, sp, #4 ) | 159 | SPFIX( subeq sp, sp, #4 ) |
158 | stmia sp, {r1 - r12} | 160 | stmia sp, {r1 - r12} |
159 | 161 | ||
160 | ldmia r0, {r1 - r3} | 162 | ldmia r0, {r3 - r5} |
161 | add r5, sp, #S_SP - 4 @ here for interlock avoidance | 163 | add r7, sp, #S_SP - 4 @ here for interlock avoidance |
162 | mov r4, #-1 @ "" "" "" "" | 164 | mov r6, #-1 @ "" "" "" "" |
163 | add r0, sp, #(S_FRAME_SIZE + \stack_hole - 4) | 165 | add r2, sp, #(S_FRAME_SIZE + \stack_hole - 4) |
164 | SPFIX( addeq r0, r0, #4 ) | 166 | SPFIX( addeq r2, r2, #4 ) |
165 | str r1, [sp, #-4]! @ save the "real" r0 copied | 167 | str r3, [sp, #-4]! @ save the "real" r0 copied |
166 | @ from the exception stack | 168 | @ from the exception stack |
167 | 169 | ||
168 | mov r1, lr | 170 | mov r3, lr |
169 | 171 | ||
170 | @ | 172 | @ |
171 | @ We are now ready to fill in the remaining blanks on the stack: | 173 | @ We are now ready to fill in the remaining blanks on the stack: |
172 | @ | 174 | @ |
173 | @ r0 - sp_svc | 175 | @ r2 - sp_svc |
174 | @ r1 - lr_svc | 176 | @ r3 - lr_svc |
175 | @ r2 - lr_<exception>, already fixed up for correct return/restart | 177 | @ r4 - lr_<exception>, already fixed up for correct return/restart |
176 | @ r3 - spsr_<exception> | 178 | @ r5 - spsr_<exception> |
177 | @ r4 - orig_r0 (see pt_regs definition in ptrace.h) | 179 | @ r6 - orig_r0 (see pt_regs definition in ptrace.h) |
178 | @ | 180 | @ |
179 | stmia r5, {r0 - r4} | 181 | stmia r7, {r2 - r6} |
180 | .endm | 182 | .endm |
181 | 183 | ||
182 | .align 5 | 184 | .align 5 |
@@ -187,7 +189,7 @@ __dabt_svc: | |||
187 | @ get ready to re-enable interrupts if appropriate | 189 | @ get ready to re-enable interrupts if appropriate |
188 | @ | 190 | @ |
189 | mrs r9, cpsr | 191 | mrs r9, cpsr |
190 | tst r3, #PSR_I_BIT | 192 | tst r5, #PSR_I_BIT |
191 | biceq r9, r9, #PSR_I_BIT | 193 | biceq r9, r9, #PSR_I_BIT |
192 | 194 | ||
193 | dabt_helper | 195 | dabt_helper |
@@ -208,8 +210,8 @@ __dabt_svc: | |||
208 | @ | 210 | @ |
209 | @ restore SPSR and restart the instruction | 211 | @ restore SPSR and restart the instruction |
210 | @ | 212 | @ |
211 | ldr r2, [sp, #S_PSR] | 213 | ldr r5, [sp, #S_PSR] |
212 | svc_exit r2 @ return from exception | 214 | svc_exit r5 @ return from exception |
213 | UNWIND(.fnend ) | 215 | UNWIND(.fnend ) |
214 | ENDPROC(__dabt_svc) | 216 | ENDPROC(__dabt_svc) |
215 | 217 | ||
@@ -232,13 +234,13 @@ __irq_svc: | |||
232 | tst r0, #_TIF_NEED_RESCHED | 234 | tst r0, #_TIF_NEED_RESCHED |
233 | blne svc_preempt | 235 | blne svc_preempt |
234 | #endif | 236 | #endif |
235 | ldr r4, [sp, #S_PSR] @ irqs are already disabled | 237 | ldr r5, [sp, #S_PSR] |
236 | #ifdef CONFIG_TRACE_IRQFLAGS | 238 | #ifdef CONFIG_TRACE_IRQFLAGS |
237 | @ The parent context IRQs must have been enabled to get here in | 239 | @ The parent context IRQs must have been enabled to get here in |
238 | @ the first place, so there's no point checking the PSR I bit. | 240 | @ the first place, so there's no point checking the PSR I bit. |
239 | bl trace_hardirqs_on | 241 | bl trace_hardirqs_on |
240 | #endif | 242 | #endif |
241 | svc_exit r4 @ return from exception | 243 | svc_exit r5 @ return from exception |
242 | UNWIND(.fnend ) | 244 | UNWIND(.fnend ) |
243 | ENDPROC(__irq_svc) | 245 | ENDPROC(__irq_svc) |
244 | 246 | ||
@@ -273,15 +275,16 @@ __und_svc: | |||
273 | @ r0 - instruction | 275 | @ r0 - instruction |
274 | @ | 276 | @ |
275 | #ifndef CONFIG_THUMB2_KERNEL | 277 | #ifndef CONFIG_THUMB2_KERNEL |
276 | ldr r0, [r2, #-4] | 278 | ldr r0, [r4, #-4] |
277 | #else | 279 | #else |
278 | ldrh r0, [r2, #-2] @ Thumb instruction at LR - 2 | 280 | ldrh r0, [r4, #-2] @ Thumb instruction at LR - 2 |
279 | and r9, r0, #0xf800 | 281 | and r9, r0, #0xf800 |
280 | cmp r9, #0xe800 @ 32-bit instruction if xx >= 0 | 282 | cmp r9, #0xe800 @ 32-bit instruction if xx >= 0 |
281 | ldrhhs r9, [r2] @ bottom 16 bits | 283 | ldrhhs r9, [r4] @ bottom 16 bits |
282 | orrhs r0, r9, r0, lsl #16 | 284 | orrhs r0, r9, r0, lsl #16 |
283 | #endif | 285 | #endif |
284 | adr r9, BSYM(1f) | 286 | adr r9, BSYM(1f) |
287 | mov r2, r4 | ||
285 | bl call_fpe | 288 | bl call_fpe |
286 | 289 | ||
287 | mov r0, sp @ struct pt_regs *regs | 290 | mov r0, sp @ struct pt_regs *regs |
@@ -295,8 +298,8 @@ __und_svc: | |||
295 | @ | 298 | @ |
296 | @ restore SPSR and restart the instruction | 299 | @ restore SPSR and restart the instruction |
297 | @ | 300 | @ |
298 | ldr r2, [sp, #S_PSR] @ Get SVC cpsr | 301 | ldr r5, [sp, #S_PSR] @ Get SVC cpsr |
299 | svc_exit r2 @ return from exception | 302 | svc_exit r5 @ return from exception |
300 | UNWIND(.fnend ) | 303 | UNWIND(.fnend ) |
301 | ENDPROC(__und_svc) | 304 | ENDPROC(__und_svc) |
302 | 305 | ||
@@ -308,7 +311,7 @@ __pabt_svc: | |||
308 | @ re-enable interrupts if appropriate | 311 | @ re-enable interrupts if appropriate |
309 | @ | 312 | @ |
310 | mrs r9, cpsr | 313 | mrs r9, cpsr |
311 | tst r3, #PSR_I_BIT | 314 | tst r5, #PSR_I_BIT |
312 | biceq r9, r9, #PSR_I_BIT | 315 | biceq r9, r9, #PSR_I_BIT |
313 | 316 | ||
314 | pabt_helper | 317 | pabt_helper |
@@ -325,8 +328,8 @@ __pabt_svc: | |||
325 | @ | 328 | @ |
326 | @ restore SPSR and restart the instruction | 329 | @ restore SPSR and restart the instruction |
327 | @ | 330 | @ |
328 | ldr r2, [sp, #S_PSR] | 331 | ldr r5, [sp, #S_PSR] |
329 | svc_exit r2 @ return from exception | 332 | svc_exit r5 @ return from exception |
330 | UNWIND(.fnend ) | 333 | UNWIND(.fnend ) |
331 | ENDPROC(__pabt_svc) | 334 | ENDPROC(__pabt_svc) |
332 | 335 | ||
@@ -357,23 +360,23 @@ ENDPROC(__pabt_svc) | |||
357 | ARM( stmib sp, {r1 - r12} ) | 360 | ARM( stmib sp, {r1 - r12} ) |
358 | THUMB( stmia sp, {r0 - r12} ) | 361 | THUMB( stmia sp, {r0 - r12} ) |
359 | 362 | ||
360 | ldmia r0, {r1 - r3} | 363 | ldmia r0, {r3 - r5} |
361 | add r0, sp, #S_PC @ here for interlock avoidance | 364 | add r0, sp, #S_PC @ here for interlock avoidance |
362 | mov r4, #-1 @ "" "" "" "" | 365 | mov r6, #-1 @ "" "" "" "" |
363 | 366 | ||
364 | str r1, [sp] @ save the "real" r0 copied | 367 | str r3, [sp] @ save the "real" r0 copied |
365 | @ from the exception stack | 368 | @ from the exception stack |
366 | 369 | ||
367 | @ | 370 | @ |
368 | @ We are now ready to fill in the remaining blanks on the stack: | 371 | @ We are now ready to fill in the remaining blanks on the stack: |
369 | @ | 372 | @ |
370 | @ r2 - lr_<exception>, already fixed up for correct return/restart | 373 | @ r4 - lr_<exception>, already fixed up for correct return/restart |
371 | @ r3 - spsr_<exception> | 374 | @ r5 - spsr_<exception> |
372 | @ r4 - orig_r0 (see pt_regs definition in ptrace.h) | 375 | @ r6 - orig_r0 (see pt_regs definition in ptrace.h) |
373 | @ | 376 | @ |
374 | @ Also, separately save sp_usr and lr_usr | 377 | @ Also, separately save sp_usr and lr_usr |
375 | @ | 378 | @ |
376 | stmia r0, {r2 - r4} | 379 | stmia r0, {r4 - r6} |
377 | ARM( stmdb r0, {sp, lr}^ ) | 380 | ARM( stmdb r0, {sp, lr}^ ) |
378 | THUMB( store_user_sp_lr r0, r1, S_SP - S_PC ) | 381 | THUMB( store_user_sp_lr r0, r1, S_SP - S_PC ) |
379 | 382 | ||
@@ -397,7 +400,7 @@ ENDPROC(__pabt_svc) | |||
397 | @ if it was interrupted in a critical region. Here we | 400 | @ if it was interrupted in a critical region. Here we |
398 | @ perform a quick test inline since it should be false | 401 | @ perform a quick test inline since it should be false |
399 | @ 99.9999% of the time. The rest is done out of line. | 402 | @ 99.9999% of the time. The rest is done out of line. |
400 | cmp r2, #TASK_SIZE | 403 | cmp r4, #TASK_SIZE |
401 | blhs kuser_cmpxchg_fixup | 404 | blhs kuser_cmpxchg_fixup |
402 | #endif | 405 | #endif |
403 | #endif | 406 | #endif |
@@ -441,6 +444,8 @@ ENDPROC(__irq_usr) | |||
441 | .align 5 | 444 | .align 5 |
442 | __und_usr: | 445 | __und_usr: |
443 | usr_entry | 446 | usr_entry |
447 | mov r2, r4 | ||
448 | mov r3, r5 | ||
444 | 449 | ||
445 | @ | 450 | @ |
446 | @ fall through to the emulation code, which returns using r9 if | 451 | @ fall through to the emulation code, which returns using r9 if |
@@ -894,13 +899,13 @@ __kuser_cmpxchg: @ 0xffff0fc0 | |||
894 | .text | 899 | .text |
895 | kuser_cmpxchg_fixup: | 900 | kuser_cmpxchg_fixup: |
896 | @ Called from kuser_cmpxchg_check macro. | 901 | @ Called from kuser_cmpxchg_check macro. |
897 | @ r2 = address of interrupted insn (must be preserved). | 902 | @ r4 = address of interrupted insn (must be preserved). |
898 | @ sp = saved regs. r7 and r8 are clobbered. | 903 | @ sp = saved regs. r7 and r8 are clobbered. |
899 | @ 1b = first critical insn, 2b = last critical insn. | 904 | @ 1b = first critical insn, 2b = last critical insn. |
900 | @ If r2 >= 1b and r2 <= 2b then saved pc_usr is set to 1b. | 905 | @ If r4 >= 1b and r4 <= 2b then saved pc_usr is set to 1b. |
901 | mov r7, #0xffff0fff | 906 | mov r7, #0xffff0fff |
902 | sub r7, r7, #(0xffff0fff - (0xffff0fc0 + (1b - __kuser_cmpxchg))) | 907 | sub r7, r7, #(0xffff0fff - (0xffff0fc0 + (1b - __kuser_cmpxchg))) |
903 | subs r8, r2, r7 | 908 | subs r8, r4, r7 |
904 | rsbcss r8, r8, #(2b - 1b) | 909 | rsbcss r8, r8, #(2b - 1b) |
905 | strcs r7, [sp, #S_PC] | 910 | strcs r7, [sp, #S_PC] |
906 | mov pc, lr | 911 | mov pc, lr |