aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/kernel/entry-armv.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/kernel/entry-armv.S')
-rw-r--r--arch/arm/kernel/entry-armv.S111
1 files changed, 72 insertions, 39 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S
index 0d1851ca6eb9..0f82098c9bfe 100644
--- a/arch/arm/kernel/entry-armv.S
+++ b/arch/arm/kernel/entry-armv.S
@@ -244,6 +244,19 @@ svc_preempt:
244 b 1b 244 b 1b
245#endif 245#endif
246 246
247__und_fault:
248 @ Correct the PC such that it is pointing at the instruction
249 @ which caused the fault. If the faulting instruction was ARM
250 @ the PC will be pointing at the next instruction, and have to
251 @ subtract 4. Otherwise, it is Thumb, and the PC will be
252 @ pointing at the second half of the Thumb instruction. We
253 @ have to subtract 2.
254 ldr r2, [r0, #S_PC]
255 sub r2, r2, r1
256 str r2, [r0, #S_PC]
257 b do_undefinstr
258ENDPROC(__und_fault)
259
247 .align 5 260 .align 5
248__und_svc: 261__und_svc:
249#ifdef CONFIG_KPROBES 262#ifdef CONFIG_KPROBES
@@ -261,25 +274,32 @@ __und_svc:
261 @ 274 @
262 @ r0 - instruction 275 @ r0 - instruction
263 @ 276 @
264#ifndef CONFIG_THUMB2_KERNEL 277#ifndef CONFIG_THUMB2_KERNEL
265 ldr r0, [r4, #-4] 278 ldr r0, [r4, #-4]
266#else 279#else
280 mov r1, #2
267 ldrh r0, [r4, #-2] @ Thumb instruction at LR - 2 281 ldrh r0, [r4, #-2] @ Thumb instruction at LR - 2
268 cmp r0, #0xe800 @ 32-bit instruction if xx >= 0 282 cmp r0, #0xe800 @ 32-bit instruction if xx >= 0
269 ldrhhs r9, [r4] @ bottom 16 bits 283 blo __und_svc_fault
270 orrhs r0, r9, r0, lsl #16 284 ldrh r9, [r4] @ bottom 16 bits
285 add r4, r4, #2
286 str r4, [sp, #S_PC]
287 orr r0, r9, r0, lsl #16
271#endif 288#endif
272 adr r9, BSYM(1f) 289 adr r9, BSYM(__und_svc_finish)
273 mov r2, r4 290 mov r2, r4
274 bl call_fpe 291 bl call_fpe
275 292
293 mov r1, #4 @ PC correction to apply
294__und_svc_fault:
276 mov r0, sp @ struct pt_regs *regs 295 mov r0, sp @ struct pt_regs *regs
277 bl do_undefinstr 296 bl __und_fault
278 297
279 @ 298 @
280 @ IRQs off again before pulling preserved data off the stack 299 @ IRQs off again before pulling preserved data off the stack
281 @ 300 @
2821: disable_irq_notrace 301__und_svc_finish:
302 disable_irq_notrace
283 303
284 @ 304 @
285 @ restore SPSR and restart the instruction 305 @ restore SPSR and restart the instruction
@@ -423,25 +443,33 @@ __und_usr:
423 mov r2, r4 443 mov r2, r4
424 mov r3, r5 444 mov r3, r5
425 445
446 @ r2 = regs->ARM_pc, which is either 2 or 4 bytes ahead of the
447 @ faulting instruction depending on Thumb mode.
448 @ r3 = regs->ARM_cpsr
426 @ 449 @
427 @ fall through to the emulation code, which returns using r9 if 450 @ The emulation code returns using r9 if it has emulated the
428 @ it has emulated the instruction, or the more conventional lr 451 @ instruction, or the more conventional lr if we are to treat
429 @ if we are to treat this as a real undefined instruction 452 @ this as a real undefined instruction
430 @
431 @ r0 - instruction
432 @ 453 @
433 adr r9, BSYM(ret_from_exception) 454 adr r9, BSYM(ret_from_exception)
434 adr lr, BSYM(__und_usr_unknown) 455
435 tst r3, #PSR_T_BIT @ Thumb mode? 456 tst r3, #PSR_T_BIT @ Thumb mode?
436 itet eq @ explicit IT needed for the 1f label 457 bne __und_usr_thumb
437 subeq r4, r2, #4 @ ARM instr at LR - 4 458 sub r4, r2, #4 @ ARM instr at LR - 4
438 subne r4, r2, #2 @ Thumb instr at LR - 2 4591: ldrt r0, [r4]
4391: ldreqt r0, [r4]
440#ifdef CONFIG_CPU_ENDIAN_BE8 460#ifdef CONFIG_CPU_ENDIAN_BE8
441 reveq r0, r0 @ little endian instruction 461 rev r0, r0 @ little endian instruction
442#endif 462#endif
443 beq call_fpe 463 @ r0 = 32-bit ARM instruction which caused the exception
464 @ r2 = PC value for the following instruction (:= regs->ARM_pc)
465 @ r4 = PC value for the faulting instruction
466 @ lr = 32-bit undefined instruction function
467 adr lr, BSYM(__und_usr_fault_32)
468 b call_fpe
469
470__und_usr_thumb:
444 @ Thumb instruction 471 @ Thumb instruction
472 sub r4, r2, #2 @ First half of thumb instr at LR - 2
445#if CONFIG_ARM_THUMB && __LINUX_ARM_ARCH__ >= 6 && CONFIG_CPU_V7 473#if CONFIG_ARM_THUMB && __LINUX_ARM_ARCH__ >= 6 && CONFIG_CPU_V7
446/* 474/*
447 * Thumb-2 instruction handling. Note that because pre-v6 and >= v6 platforms 475 * Thumb-2 instruction handling. Note that because pre-v6 and >= v6 platforms
@@ -455,7 +483,7 @@ __und_usr:
455 ldr r5, .LCcpu_architecture 483 ldr r5, .LCcpu_architecture
456 ldr r5, [r5] 484 ldr r5, [r5]
457 cmp r5, #CPU_ARCH_ARMv7 485 cmp r5, #CPU_ARCH_ARMv7
458 blo __und_usr_unknown 486 blo __und_usr_fault_16 @ 16bit undefined instruction
459/* 487/*
460 * The following code won't get run unless the running CPU really is v7, so 488 * The following code won't get run unless the running CPU really is v7, so
461 * coding round the lack of ldrht on older arches is pointless. Temporarily 489 * coding round the lack of ldrht on older arches is pointless. Temporarily
@@ -463,15 +491,18 @@ __und_usr:
463 */ 491 */
464 .arch armv6t2 492 .arch armv6t2
465#endif 493#endif
4662: 4942: ldrht r5, [r4]
467 ARM( ldrht r5, [r4], #2 )
468 THUMB( ldrht r5, [r4] )
469 THUMB( add r4, r4, #2 )
470 cmp r5, #0xe800 @ 32bit instruction if xx != 0 495 cmp r5, #0xe800 @ 32bit instruction if xx != 0
471 blo __und_usr_unknown 496 blo __und_usr_fault_16 @ 16bit undefined instruction
4723: ldrht r0, [r4] 4973: ldrht r0, [r2]
473 add r2, r2, #2 @ r2 is PC + 2, make it PC + 4 498 add r2, r2, #2 @ r2 is PC + 2, make it PC + 4
499 str r2, [sp, #S_PC] @ it's a 2x16bit instr, update
474 orr r0, r0, r5, lsl #16 500 orr r0, r0, r5, lsl #16
501 adr lr, BSYM(__und_usr_fault_32)
502 @ r0 = the two 16-bit Thumb instructions which caused the exception
503 @ r2 = PC value for the following Thumb instruction (:= regs->ARM_pc)
504 @ r4 = PC value for the first 16-bit Thumb instruction
505 @ lr = 32bit undefined instruction function
475 506
476#if __LINUX_ARM_ARCH__ < 7 507#if __LINUX_ARM_ARCH__ < 7
477/* If the target arch was overridden, change it back: */ 508/* If the target arch was overridden, change it back: */
@@ -482,17 +513,13 @@ __und_usr:
482#endif 513#endif
483#endif /* __LINUX_ARM_ARCH__ < 7 */ 514#endif /* __LINUX_ARM_ARCH__ < 7 */
484#else /* !(CONFIG_ARM_THUMB && __LINUX_ARM_ARCH__ >= 6 && CONFIG_CPU_V7) */ 515#else /* !(CONFIG_ARM_THUMB && __LINUX_ARM_ARCH__ >= 6 && CONFIG_CPU_V7) */
485 b __und_usr_unknown 516 b __und_usr_fault_16
486#endif 517#endif
487 UNWIND(.fnend ) 518 UNWIND(.fnend)
488ENDPROC(__und_usr) 519ENDPROC(__und_usr)
489 520
490 @
491 @ fallthrough to call_fpe
492 @
493
494/* 521/*
495 * The out of line fixup for the ldrt above. 522 * The out of line fixup for the ldrt instructions above.
496 */ 523 */
497 .pushsection .fixup, "ax" 524 .pushsection .fixup, "ax"
498 .align 2 525 .align 2
@@ -524,11 +551,12 @@ ENDPROC(__und_usr)
524 * NEON handler code. 551 * NEON handler code.
525 * 552 *
526 * Emulators may wish to make use of the following registers: 553 * Emulators may wish to make use of the following registers:
527 * r0 = instruction opcode. 554 * r0 = instruction opcode (32-bit ARM or two 16-bit Thumb)
528 * r2 = PC+4 555 * r2 = PC value to resume execution after successful emulation
529 * r9 = normal "successful" return address 556 * r9 = normal "successful" return address
530 * r10 = this threads thread_info structure. 557 * r10 = this threads thread_info structure
531 * lr = unrecognised instruction return address 558 * lr = unrecognised instruction return address
559 * IRQs disabled, FIQs enabled.
532 */ 560 */
533 @ 561 @
534 @ Fall-through from Thumb-2 __und_usr 562 @ Fall-through from Thumb-2 __und_usr
@@ -659,12 +687,17 @@ ENTRY(no_fp)
659 mov pc, lr 687 mov pc, lr
660ENDPROC(no_fp) 688ENDPROC(no_fp)
661 689
662__und_usr_unknown: 690__und_usr_fault_32:
663 enable_irq 691 mov r1, #4
692 b 1f
693__und_usr_fault_16:
694 mov r1, #2
6951: enable_irq
664 mov r0, sp 696 mov r0, sp
665 adr lr, BSYM(ret_from_exception) 697 adr lr, BSYM(ret_from_exception)
666 b do_undefinstr 698 b __und_fault
667ENDPROC(__und_usr_unknown) 699ENDPROC(__und_usr_fault_32)
700ENDPROC(__und_usr_fault_16)
668 701
669 .align 5 702 .align 5
670__pabt_usr: 703__pabt_usr: