aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/kernel
diff options
context:
space:
mode:
authorUwe Kleine-König <u.kleine-koenig@pengutronix.de>2009-03-04 05:48:46 -0500
committerRussell King <rmk+kernel@arm.linux.org.uk>2009-03-05 08:47:15 -0500
commitd4cc510c61b050ef38e842a12feef71c56d7cf81 (patch)
tree12b3bcee76dd5252c9c6efe3ffc83d34a75b77ca /arch/arm/kernel
parent843e22b056c07d39b8f97935173dab879abc613b (diff)
[ARM] 5418/1: restore lr before leaving mcount
gcc seems to expect that lr isn't clobbered by mcount, because for a function starting with: static int func(void) { void *ra = __builtin_return_address(0); printk(KERN_EMERG "__builtin_return_address(0) = %pS\n", ra) ... the following assembler is generated by gcc 4.3.2: 0: e1a0c00d mov ip, sp 4: e92dd810 push {r4, fp, ip, lr, pc} 8: e24cb004 sub fp, ip, #4 ; 0x4 c: ebfffffe bl 0 <mcount> 10: e59f0034 ldr r0, [pc, #52] 14: e1a0100e mov r1, lr 18: ebfffffe bl 0 <printk> Without this patch obviously __builtin_return_address(0) yields func+0x10 instead of the return address of the caller. Note this patch fixes a similar issue for the routines used with dynamic ftrace even though this isn't currently selectable for ARM. Cc: Abhishek Sagar <sagar.abhishek@gmail.com> Cc: Steven Rostedt <rostedt@goodmis.org> Cc: Ingo Molnar <mingo@elte.hu> Signed-off-by: Uwe Kleine-König <u.kleine-koenig@pengutronix.de> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm/kernel')
-rw-r--r--arch/arm/kernel/entry-common.S4
1 files changed, 4 insertions, 0 deletions
diff --git a/arch/arm/kernel/entry-common.S b/arch/arm/kernel/entry-common.S
index 49a6ba926c2b..159d0416f270 100644
--- a/arch/arm/kernel/entry-common.S
+++ b/arch/arm/kernel/entry-common.S
@@ -111,6 +111,7 @@ ENTRY(mcount)
111 .globl mcount_call 111 .globl mcount_call
112mcount_call: 112mcount_call:
113 bl ftrace_stub 113 bl ftrace_stub
114 ldr lr, [fp, #-4] @ restore lr
114 ldmia sp!, {r0-r3, pc} 115 ldmia sp!, {r0-r3, pc}
115 116
116ENTRY(ftrace_caller) 117ENTRY(ftrace_caller)
@@ -122,6 +123,7 @@ ENTRY(ftrace_caller)
122 .globl ftrace_call 123 .globl ftrace_call
123ftrace_call: 124ftrace_call:
124 bl ftrace_stub 125 bl ftrace_stub
126 ldr lr, [fp, #-4] @ restore lr
125 ldmia sp!, {r0-r3, pc} 127 ldmia sp!, {r0-r3, pc}
126 128
127#else 129#else
@@ -133,6 +135,7 @@ ENTRY(mcount)
133 adr r0, ftrace_stub 135 adr r0, ftrace_stub
134 cmp r0, r2 136 cmp r0, r2
135 bne trace 137 bne trace
138 ldr lr, [fp, #-4] @ restore lr
136 ldmia sp!, {r0-r3, pc} 139 ldmia sp!, {r0-r3, pc}
137 140
138trace: 141trace:
@@ -141,6 +144,7 @@ trace:
141 sub r0, r0, #MCOUNT_INSN_SIZE 144 sub r0, r0, #MCOUNT_INSN_SIZE
142 mov lr, pc 145 mov lr, pc
143 mov pc, r2 146 mov pc, r2
147 mov lr, r1 @ restore lr
144 ldmia sp!, {r0-r3, pc} 148 ldmia sp!, {r0-r3, pc}
145 149
146#endif /* CONFIG_DYNAMIC_FTRACE */ 150#endif /* CONFIG_DYNAMIC_FTRACE */