aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm
diff options
context:
space:
mode:
authorWang Nan <wangnan0@huawei.com>2015-01-05 06:34:47 -0500
committerJon Medhurst <tixy@linaro.org>2015-01-14 07:24:52 -0500
commitbfc9657d752c47d59dc0bab85ebdc19cf60100dd (patch)
tree8fc4941e1571dd9b0d7d91703ce4ff67580984c4 /arch/arm
parent28a1899db30a9325498aef114055506286dc8010 (diff)
ARM: optprobes: execute instruction during restoring if possible.
This patch removes software emulation or simulation for most of probed instructions. If the instruction doesn't use PC relative addressing, it will be translated into following instructions in the restore code in code template: ldmia {r0 - r14} // restore all instruction except PC <instruction> // direct execute the probed instruction b next_insn // branch to next instruction. Signed-off-by: Wang Nan <wangnan0@huawei.com> Reviewed-by: Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com> Signed-off-by: Jon Medhurst <tixy@linaro.org>
Diffstat (limited to 'arch/arm')
-rw-r--r--arch/arm/include/asm/kprobes.h3
-rw-r--r--arch/arm/include/asm/probes.h1
-rw-r--r--arch/arm/probes/kprobes/opt-arm.c52
3 files changed, 54 insertions, 2 deletions
diff --git a/arch/arm/include/asm/kprobes.h b/arch/arm/include/asm/kprobes.h
index 50ff3bc7928e..3ea9be559726 100644
--- a/arch/arm/include/asm/kprobes.h
+++ b/arch/arm/include/asm/kprobes.h
@@ -57,6 +57,9 @@ extern __visible kprobe_opcode_t optprobe_template_call;
57extern __visible kprobe_opcode_t optprobe_template_end; 57extern __visible kprobe_opcode_t optprobe_template_end;
58extern __visible kprobe_opcode_t optprobe_template_sub_sp; 58extern __visible kprobe_opcode_t optprobe_template_sub_sp;
59extern __visible kprobe_opcode_t optprobe_template_add_sp; 59extern __visible kprobe_opcode_t optprobe_template_add_sp;
60extern __visible kprobe_opcode_t optprobe_template_restore_begin;
61extern __visible kprobe_opcode_t optprobe_template_restore_orig_insn;
62extern __visible kprobe_opcode_t optprobe_template_restore_end;
60 63
61#define MAX_OPTIMIZED_LENGTH 4 64#define MAX_OPTIMIZED_LENGTH 4
62#define MAX_OPTINSN_SIZE \ 65#define MAX_OPTINSN_SIZE \
diff --git a/arch/arm/include/asm/probes.h b/arch/arm/include/asm/probes.h
index b668e60f759c..1e5b9bb92270 100644
--- a/arch/arm/include/asm/probes.h
+++ b/arch/arm/include/asm/probes.h
@@ -42,6 +42,7 @@ struct arch_probes_insn {
42 probes_insn_fn_t *insn_fn; 42 probes_insn_fn_t *insn_fn;
43 int stack_space; 43 int stack_space;
44 unsigned long register_usage_flags; 44 unsigned long register_usage_flags;
45 bool kprobe_direct_exec;
45}; 46};
46 47
47#endif /* __ASSEMBLY__ */ 48#endif /* __ASSEMBLY__ */
diff --git a/arch/arm/probes/kprobes/opt-arm.c b/arch/arm/probes/kprobes/opt-arm.c
index 13d5232118df..bcdecc25461b 100644
--- a/arch/arm/probes/kprobes/opt-arm.c
+++ b/arch/arm/probes/kprobes/opt-arm.c
@@ -32,6 +32,14 @@
32#include "core.h" 32#include "core.h"
33 33
34/* 34/*
35 * See register_usage_flags. If the probed instruction doesn't use PC,
36 * we can copy it into template and have it executed directly without
37 * simulation or emulation.
38 */
39#define ARM_REG_PC 15
40#define can_kprobe_direct_exec(m) (!test_bit(ARM_REG_PC, &(m)))
41
42/*
35 * NOTE: the first sub and add instruction will be modified according 43 * NOTE: the first sub and add instruction will be modified according
36 * to the stack cost of the instruction. 44 * to the stack cost of the instruction.
37 */ 45 */
@@ -71,7 +79,15 @@ asm (
71 " orrne r2, #1\n" 79 " orrne r2, #1\n"
72 " strne r2, [sp, #60] @ set bit0 of PC for thumb\n" 80 " strne r2, [sp, #60] @ set bit0 of PC for thumb\n"
73 " msr cpsr_cxsf, r1\n" 81 " msr cpsr_cxsf, r1\n"
82 ".global optprobe_template_restore_begin\n"
83 "optprobe_template_restore_begin:\n"
74 " ldmia sp, {r0 - r15}\n" 84 " ldmia sp, {r0 - r15}\n"
85 ".global optprobe_template_restore_orig_insn\n"
86 "optprobe_template_restore_orig_insn:\n"
87 " nop\n"
88 ".global optprobe_template_restore_end\n"
89 "optprobe_template_restore_end:\n"
90 " nop\n"
75 ".global optprobe_template_val\n" 91 ".global optprobe_template_val\n"
76 "optprobe_template_val:\n" 92 "optprobe_template_val:\n"
77 "1: .long 0\n" 93 "1: .long 0\n"
@@ -91,6 +107,12 @@ asm (
91 ((unsigned long *)&optprobe_template_add_sp - (unsigned long *)&optprobe_template_entry) 107 ((unsigned long *)&optprobe_template_add_sp - (unsigned long *)&optprobe_template_entry)
92#define TMPL_SUB_SP \ 108#define TMPL_SUB_SP \
93 ((unsigned long *)&optprobe_template_sub_sp - (unsigned long *)&optprobe_template_entry) 109 ((unsigned long *)&optprobe_template_sub_sp - (unsigned long *)&optprobe_template_entry)
110#define TMPL_RESTORE_BEGIN \
111 ((unsigned long *)&optprobe_template_restore_begin - (unsigned long *)&optprobe_template_entry)
112#define TMPL_RESTORE_ORIGN_INSN \
113 ((unsigned long *)&optprobe_template_restore_orig_insn - (unsigned long *)&optprobe_template_entry)
114#define TMPL_RESTORE_END \
115 ((unsigned long *)&optprobe_template_restore_end - (unsigned long *)&optprobe_template_entry)
94 116
95/* 117/*
96 * ARM can always optimize an instruction when using ARM ISA, except 118 * ARM can always optimize an instruction when using ARM ISA, except
@@ -160,8 +182,12 @@ optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs)
160 __this_cpu_write(current_kprobe, NULL); 182 __this_cpu_write(current_kprobe, NULL);
161 } 183 }
162 184
163 /* In each case, we must singlestep the replaced instruction. */ 185 /*
164 op->kp.ainsn.insn_singlestep(p->opcode, &p->ainsn, regs); 186 * We singlestep the replaced instruction only when it can't be
187 * executed directly during restore.
188 */
189 if (!p->ainsn.kprobe_direct_exec)
190 op->kp.ainsn.insn_singlestep(p->opcode, &p->ainsn, regs);
165 191
166 local_irq_restore(flags); 192 local_irq_restore(flags);
167} 193}
@@ -243,6 +269,28 @@ int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *or
243 val = (unsigned long)optimized_callback; 269 val = (unsigned long)optimized_callback;
244 code[TMPL_CALL_IDX] = val; 270 code[TMPL_CALL_IDX] = val;
245 271
272 /* If possible, copy insn and have it executed during restore */
273 orig->ainsn.kprobe_direct_exec = false;
274 if (can_kprobe_direct_exec(orig->ainsn.register_usage_flags)) {
275 kprobe_opcode_t final_branch = arm_gen_branch(
276 (unsigned long)(&code[TMPL_RESTORE_END]),
277 (unsigned long)(op->kp.addr) + 4);
278 if (final_branch != 0) {
279 /*
280 * Replace original 'ldmia sp, {r0 - r15}' with
281 * 'ldmia {r0 - r14}', restore all registers except pc.
282 */
283 code[TMPL_RESTORE_BEGIN] = __opcode_to_mem_arm(0xe89d7fff);
284
285 /* The original probed instruction */
286 code[TMPL_RESTORE_ORIGN_INSN] = __opcode_to_mem_arm(orig->opcode);
287
288 /* Jump back to next instruction */
289 code[TMPL_RESTORE_END] = __opcode_to_mem_arm(final_branch);
290 orig->ainsn.kprobe_direct_exec = true;
291 }
292 }
293
246 flush_icache_range((unsigned long)code, 294 flush_icache_range((unsigned long)code,
247 (unsigned long)(&code[TMPL_END_IDX])); 295 (unsigned long)(&code[TMPL_END_IDX]));
248 296