diff options
| -rw-r--r-- | arch/x86/kernel/kprobes/opt.c | 23 |
1 files changed, 22 insertions, 1 deletions
diff --git a/arch/x86/kernel/kprobes/opt.c b/arch/x86/kernel/kprobes/opt.c index 4f98aad38237..3668f28cf5fc 100644 --- a/arch/x86/kernel/kprobes/opt.c +++ b/arch/x86/kernel/kprobes/opt.c | |||
| @@ -40,6 +40,7 @@ | |||
| 40 | #include <asm/debugreg.h> | 40 | #include <asm/debugreg.h> |
| 41 | #include <asm/set_memory.h> | 41 | #include <asm/set_memory.h> |
| 42 | #include <asm/sections.h> | 42 | #include <asm/sections.h> |
| 43 | #include <asm/nospec-branch.h> | ||
| 43 | 44 | ||
| 44 | #include "common.h" | 45 | #include "common.h" |
| 45 | 46 | ||
| @@ -205,7 +206,7 @@ static int copy_optimized_instructions(u8 *dest, u8 *src) | |||
| 205 | } | 206 | } |
| 206 | 207 | ||
| 207 | /* Check whether insn is indirect jump */ | 208 | /* Check whether insn is indirect jump */ |
| 208 | static int insn_is_indirect_jump(struct insn *insn) | 209 | static int __insn_is_indirect_jump(struct insn *insn) |
| 209 | { | 210 | { |
| 210 | return ((insn->opcode.bytes[0] == 0xff && | 211 | return ((insn->opcode.bytes[0] == 0xff && |
| 211 | (X86_MODRM_REG(insn->modrm.value) & 6) == 4) || /* Jump */ | 212 | (X86_MODRM_REG(insn->modrm.value) & 6) == 4) || /* Jump */ |
| @@ -239,6 +240,26 @@ static int insn_jump_into_range(struct insn *insn, unsigned long start, int len) | |||
| 239 | return (start <= target && target <= start + len); | 240 | return (start <= target && target <= start + len); |
| 240 | } | 241 | } |
| 241 | 242 | ||
| 243 | static int insn_is_indirect_jump(struct insn *insn) | ||
| 244 | { | ||
| 245 | int ret = __insn_is_indirect_jump(insn); | ||
| 246 | |||
| 247 | #ifdef CONFIG_RETPOLINE | ||
| 248 | /* | ||
| 249 | * Jump to x86_indirect_thunk_* is treated as an indirect jump. | ||
| 250 | * Note that even with CONFIG_RETPOLINE=y, the kernel compiled with | ||
| 251 | * older gcc may use indirect jump. So we add this check instead of | ||
| 252 | * replace indirect-jump check. | ||
| 253 | */ | ||
| 254 | if (!ret) | ||
| 255 | ret = insn_jump_into_range(insn, | ||
| 256 | (unsigned long)__indirect_thunk_start, | ||
| 257 | (unsigned long)__indirect_thunk_end - | ||
| 258 | (unsigned long)__indirect_thunk_start); | ||
| 259 | #endif | ||
| 260 | return ret; | ||
| 261 | } | ||
| 262 | |||
| 242 | /* Decode whole function to ensure any instructions don't jump into target */ | 263 | /* Decode whole function to ensure any instructions don't jump into target */ |
| 243 | static int can_optimize(unsigned long paddr) | 264 | static int can_optimize(unsigned long paddr) |
| 244 | { | 265 | { |
