diff options
Diffstat (limited to 'arch/arm/kernel/entry-armv.S')
-rw-r--r-- | arch/arm/kernel/entry-armv.S | 16 |
1 files changed, 16 insertions, 0 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S index 617e509d60df..77b047475539 100644 --- a/arch/arm/kernel/entry-armv.S +++ b/arch/arm/kernel/entry-armv.S | |||
@@ -76,14 +76,17 @@ | |||
76 | __pabt_invalid: | 76 | __pabt_invalid: |
77 | inv_entry BAD_PREFETCH | 77 | inv_entry BAD_PREFETCH |
78 | b common_invalid | 78 | b common_invalid |
79 | ENDPROC(__pabt_invalid) | ||
79 | 80 | ||
80 | __dabt_invalid: | 81 | __dabt_invalid: |
81 | inv_entry BAD_DATA | 82 | inv_entry BAD_DATA |
82 | b common_invalid | 83 | b common_invalid |
84 | ENDPROC(__dabt_invalid) | ||
83 | 85 | ||
84 | __irq_invalid: | 86 | __irq_invalid: |
85 | inv_entry BAD_IRQ | 87 | inv_entry BAD_IRQ |
86 | b common_invalid | 88 | b common_invalid |
89 | ENDPROC(__irq_invalid) | ||
87 | 90 | ||
88 | __und_invalid: | 91 | __und_invalid: |
89 | inv_entry BAD_UNDEFINSTR | 92 | inv_entry BAD_UNDEFINSTR |
@@ -107,6 +110,7 @@ common_invalid: | |||
107 | 110 | ||
108 | mov r0, sp | 111 | mov r0, sp |
109 | b bad_mode | 112 | b bad_mode |
113 | ENDPROC(__und_invalid) | ||
110 | 114 | ||
111 | /* | 115 | /* |
112 | * SVC mode handlers | 116 | * SVC mode handlers |
@@ -192,6 +196,7 @@ __dabt_svc: | |||
192 | ldr r0, [sp, #S_PSR] | 196 | ldr r0, [sp, #S_PSR] |
193 | msr spsr_cxsf, r0 | 197 | msr spsr_cxsf, r0 |
194 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr | 198 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr |
199 | ENDPROC(__dabt_svc) | ||
195 | 200 | ||
196 | .align 5 | 201 | .align 5 |
197 | __irq_svc: | 202 | __irq_svc: |
@@ -223,6 +228,7 @@ __irq_svc: | |||
223 | bleq trace_hardirqs_on | 228 | bleq trace_hardirqs_on |
224 | #endif | 229 | #endif |
225 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr | 230 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr |
231 | ENDPROC(__irq_svc) | ||
226 | 232 | ||
227 | .ltorg | 233 | .ltorg |
228 | 234 | ||
@@ -272,6 +278,7 @@ __und_svc: | |||
272 | ldr lr, [sp, #S_PSR] @ Get SVC cpsr | 278 | ldr lr, [sp, #S_PSR] @ Get SVC cpsr |
273 | msr spsr_cxsf, lr | 279 | msr spsr_cxsf, lr |
274 | ldmia sp, {r0 - pc}^ @ Restore SVC registers | 280 | ldmia sp, {r0 - pc}^ @ Restore SVC registers |
281 | ENDPROC(__und_svc) | ||
275 | 282 | ||
276 | .align 5 | 283 | .align 5 |
277 | __pabt_svc: | 284 | __pabt_svc: |
@@ -313,6 +320,7 @@ __pabt_svc: | |||
313 | ldr r0, [sp, #S_PSR] | 320 | ldr r0, [sp, #S_PSR] |
314 | msr spsr_cxsf, r0 | 321 | msr spsr_cxsf, r0 |
315 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr | 322 | ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr |
323 | ENDPROC(__pabt_svc) | ||
316 | 324 | ||
317 | .align 5 | 325 | .align 5 |
318 | .LCcralign: | 326 | .LCcralign: |
@@ -412,6 +420,7 @@ __dabt_usr: | |||
412 | mov r2, sp | 420 | mov r2, sp |
413 | adr lr, ret_from_exception | 421 | adr lr, ret_from_exception |
414 | b do_DataAbort | 422 | b do_DataAbort |
423 | ENDPROC(__dabt_usr) | ||
415 | 424 | ||
416 | .align 5 | 425 | .align 5 |
417 | __irq_usr: | 426 | __irq_usr: |
@@ -441,6 +450,7 @@ __irq_usr: | |||
441 | 450 | ||
442 | mov why, #0 | 451 | mov why, #0 |
443 | b ret_to_user | 452 | b ret_to_user |
453 | ENDPROC(__irq_usr) | ||
444 | 454 | ||
445 | .ltorg | 455 | .ltorg |
446 | 456 | ||
@@ -474,6 +484,7 @@ __und_usr: | |||
474 | #else | 484 | #else |
475 | b __und_usr_unknown | 485 | b __und_usr_unknown |
476 | #endif | 486 | #endif |
487 | ENDPROC(__und_usr) | ||
477 | 488 | ||
478 | @ | 489 | @ |
479 | @ fallthrough to call_fpe | 490 | @ fallthrough to call_fpe |
@@ -642,6 +653,7 @@ __und_usr_unknown: | |||
642 | mov r0, sp | 653 | mov r0, sp |
643 | adr lr, ret_from_exception | 654 | adr lr, ret_from_exception |
644 | b do_undefinstr | 655 | b do_undefinstr |
656 | ENDPROC(__und_usr_unknown) | ||
645 | 657 | ||
646 | .align 5 | 658 | .align 5 |
647 | __pabt_usr: | 659 | __pabt_usr: |
@@ -666,6 +678,8 @@ ENTRY(ret_from_exception) | |||
666 | get_thread_info tsk | 678 | get_thread_info tsk |
667 | mov why, #0 | 679 | mov why, #0 |
668 | b ret_to_user | 680 | b ret_to_user |
681 | ENDPROC(__pabt_usr) | ||
682 | ENDPROC(ret_from_exception) | ||
669 | 683 | ||
670 | /* | 684 | /* |
671 | * Register switch for ARMv3 and ARMv4 processors | 685 | * Register switch for ARMv3 and ARMv4 processors |
@@ -702,6 +716,7 @@ ENTRY(__switch_to) | |||
702 | bl atomic_notifier_call_chain | 716 | bl atomic_notifier_call_chain |
703 | mov r0, r5 | 717 | mov r0, r5 |
704 | ldmia r4, {r4 - sl, fp, sp, pc} @ Load all regs saved previously | 718 | ldmia r4, {r4 - sl, fp, sp, pc} @ Load all regs saved previously |
719 | ENDPROC(__switch_to) | ||
705 | 720 | ||
706 | __INIT | 721 | __INIT |
707 | 722 | ||
@@ -1029,6 +1044,7 @@ vector_\name: | |||
1029 | mov r0, sp | 1044 | mov r0, sp |
1030 | ldr lr, [pc, lr, lsl #2] | 1045 | ldr lr, [pc, lr, lsl #2] |
1031 | movs pc, lr @ branch to handler in SVC mode | 1046 | movs pc, lr @ branch to handler in SVC mode |
1047 | ENDPROC(vector_\name) | ||
1032 | .endm | 1048 | .endm |
1033 | 1049 | ||
1034 | .globl __stubs_start | 1050 | .globl __stubs_start |