diff options
author | Andy Lutomirski <luto@mit.edu> | 2011-07-13 09:24:10 -0400 |
---|---|---|
committer | H. Peter Anvin <hpa@linux.intel.com> | 2011-07-13 14:22:56 -0400 |
commit | 59e97e4d6fbcd5b74a94cb48bcbfc6f8478a5e93 (patch) | |
tree | 015fd8e63e1fcd8fdf7a066bd2e09a5636a14449 /arch/x86 | |
parent | c9712944b2a12373cb6ff8059afcfb7e826a6c54 (diff) |
x86: Make alternative instruction pointers relative
This save a few bytes on x86-64 and means that future patches can
apply alternatives to unrelocated code.
Signed-off-by: Andy Lutomirski <luto@mit.edu>
Link: http://lkml.kernel.org/r/ff64a6b9a1a3860ca4a7b8b6dc7b4754f9491cd7.1310563276.git.luto@mit.edu
Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
Diffstat (limited to 'arch/x86')
-rw-r--r-- | arch/x86/include/asm/alternative-asm.h | 4 | ||||
-rw-r--r-- | arch/x86/include/asm/alternative.h | 8 | ||||
-rw-r--r-- | arch/x86/include/asm/cpufeature.h | 8 | ||||
-rw-r--r-- | arch/x86/kernel/alternative.c | 21 | ||||
-rw-r--r-- | arch/x86/lib/copy_page_64.S | 9 | ||||
-rw-r--r-- | arch/x86/lib/memmove_64.S | 11 |
6 files changed, 31 insertions, 30 deletions
diff --git a/arch/x86/include/asm/alternative-asm.h b/arch/x86/include/asm/alternative-asm.h index 94d420b360d1..4554cc6fb96a 100644 --- a/arch/x86/include/asm/alternative-asm.h +++ b/arch/x86/include/asm/alternative-asm.h | |||
@@ -17,8 +17,8 @@ | |||
17 | 17 | ||
18 | .macro altinstruction_entry orig alt feature orig_len alt_len | 18 | .macro altinstruction_entry orig alt feature orig_len alt_len |
19 | .align 8 | 19 | .align 8 |
20 | .quad \orig | 20 | .long \orig - . |
21 | .quad \alt | 21 | .long \alt - . |
22 | .word \feature | 22 | .word \feature |
23 | .byte \orig_len | 23 | .byte \orig_len |
24 | .byte \alt_len | 24 | .byte \alt_len |
diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h index bf535f947e8c..23fb6d79f209 100644 --- a/arch/x86/include/asm/alternative.h +++ b/arch/x86/include/asm/alternative.h | |||
@@ -43,8 +43,8 @@ | |||
43 | #endif | 43 | #endif |
44 | 44 | ||
45 | struct alt_instr { | 45 | struct alt_instr { |
46 | u8 *instr; /* original instruction */ | 46 | s32 instr_offset; /* original instruction */ |
47 | u8 *replacement; | 47 | s32 repl_offset; /* offset to replacement instruction */ |
48 | u16 cpuid; /* cpuid bit set for replacement */ | 48 | u16 cpuid; /* cpuid bit set for replacement */ |
49 | u8 instrlen; /* length of original instruction */ | 49 | u8 instrlen; /* length of original instruction */ |
50 | u8 replacementlen; /* length of new instruction, <= instrlen */ | 50 | u8 replacementlen; /* length of new instruction, <= instrlen */ |
@@ -84,8 +84,8 @@ static inline int alternatives_text_reserved(void *start, void *end) | |||
84 | "661:\n\t" oldinstr "\n662:\n" \ | 84 | "661:\n\t" oldinstr "\n662:\n" \ |
85 | ".section .altinstructions,\"a\"\n" \ | 85 | ".section .altinstructions,\"a\"\n" \ |
86 | _ASM_ALIGN "\n" \ | 86 | _ASM_ALIGN "\n" \ |
87 | _ASM_PTR "661b\n" /* label */ \ | 87 | " .long 661b - .\n" /* label */ \ |
88 | _ASM_PTR "663f\n" /* new instruction */ \ | 88 | " .long 663f - .\n" /* new instruction */ \ |
89 | " .word " __stringify(feature) "\n" /* feature bit */ \ | 89 | " .word " __stringify(feature) "\n" /* feature bit */ \ |
90 | " .byte 662b-661b\n" /* sourcelen */ \ | 90 | " .byte 662b-661b\n" /* sourcelen */ \ |
91 | " .byte 664f-663f\n" /* replacementlen */ \ | 91 | " .byte 664f-663f\n" /* replacementlen */ \ |
diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h index 71cc3800712c..9929b35929ff 100644 --- a/arch/x86/include/asm/cpufeature.h +++ b/arch/x86/include/asm/cpufeature.h | |||
@@ -331,8 +331,8 @@ static __always_inline __pure bool __static_cpu_has(u16 bit) | |||
331 | "2:\n" | 331 | "2:\n" |
332 | ".section .altinstructions,\"a\"\n" | 332 | ".section .altinstructions,\"a\"\n" |
333 | _ASM_ALIGN "\n" | 333 | _ASM_ALIGN "\n" |
334 | _ASM_PTR "1b\n" | 334 | " .long 1b - .\n" |
335 | _ASM_PTR "0\n" /* no replacement */ | 335 | " .long 0\n" /* no replacement */ |
336 | " .word %P0\n" /* feature bit */ | 336 | " .word %P0\n" /* feature bit */ |
337 | " .byte 2b - 1b\n" /* source len */ | 337 | " .byte 2b - 1b\n" /* source len */ |
338 | " .byte 0\n" /* replacement len */ | 338 | " .byte 0\n" /* replacement len */ |
@@ -349,8 +349,8 @@ static __always_inline __pure bool __static_cpu_has(u16 bit) | |||
349 | "2:\n" | 349 | "2:\n" |
350 | ".section .altinstructions,\"a\"\n" | 350 | ".section .altinstructions,\"a\"\n" |
351 | _ASM_ALIGN "\n" | 351 | _ASM_ALIGN "\n" |
352 | _ASM_PTR "1b\n" | 352 | " .long 1b - .\n" |
353 | _ASM_PTR "3f\n" | 353 | " .long 3f - .\n" |
354 | " .word %P1\n" /* feature bit */ | 354 | " .word %P1\n" /* feature bit */ |
355 | " .byte 2b - 1b\n" /* source len */ | 355 | " .byte 2b - 1b\n" /* source len */ |
356 | " .byte 4f - 3f\n" /* replacement len */ | 356 | " .byte 4f - 3f\n" /* replacement len */ |
diff --git a/arch/x86/kernel/alternative.c b/arch/x86/kernel/alternative.c index a81f2d52f869..ddb207bb5f91 100644 --- a/arch/x86/kernel/alternative.c +++ b/arch/x86/kernel/alternative.c | |||
@@ -263,6 +263,7 @@ void __init_or_module apply_alternatives(struct alt_instr *start, | |||
263 | struct alt_instr *end) | 263 | struct alt_instr *end) |
264 | { | 264 | { |
265 | struct alt_instr *a; | 265 | struct alt_instr *a; |
266 | u8 *instr, *replacement; | ||
266 | u8 insnbuf[MAX_PATCH_LEN]; | 267 | u8 insnbuf[MAX_PATCH_LEN]; |
267 | 268 | ||
268 | DPRINTK("%s: alt table %p -> %p\n", __func__, start, end); | 269 | DPRINTK("%s: alt table %p -> %p\n", __func__, start, end); |
@@ -276,25 +277,29 @@ void __init_or_module apply_alternatives(struct alt_instr *start, | |||
276 | * order. | 277 | * order. |
277 | */ | 278 | */ |
278 | for (a = start; a < end; a++) { | 279 | for (a = start; a < end; a++) { |
279 | u8 *instr = a->instr; | 280 | instr = (u8 *)&a->instr_offset + a->instr_offset; |
281 | replacement = (u8 *)&a->repl_offset + a->repl_offset; | ||
280 | BUG_ON(a->replacementlen > a->instrlen); | 282 | BUG_ON(a->replacementlen > a->instrlen); |
281 | BUG_ON(a->instrlen > sizeof(insnbuf)); | 283 | BUG_ON(a->instrlen > sizeof(insnbuf)); |
282 | BUG_ON(a->cpuid >= NCAPINTS*32); | 284 | BUG_ON(a->cpuid >= NCAPINTS*32); |
283 | if (!boot_cpu_has(a->cpuid)) | 285 | if (!boot_cpu_has(a->cpuid)) |
284 | continue; | 286 | continue; |
287 | |||
288 | memcpy(insnbuf, replacement, a->replacementlen); | ||
289 | |||
290 | /* 0xe8 is a relative jump; fix the offset. */ | ||
291 | if (*insnbuf == 0xe8 && a->replacementlen == 5) | ||
292 | *(s32 *)(insnbuf + 1) += replacement - instr; | ||
293 | |||
294 | add_nops(insnbuf + a->replacementlen, | ||
295 | a->instrlen - a->replacementlen); | ||
296 | |||
285 | #ifdef CONFIG_X86_64 | 297 | #ifdef CONFIG_X86_64 |
286 | /* vsyscall code is not mapped yet. resolve it manually. */ | 298 | /* vsyscall code is not mapped yet. resolve it manually. */ |
287 | if (instr >= (u8 *)VSYSCALL_START && instr < (u8*)VSYSCALL_END) { | 299 | if (instr >= (u8 *)VSYSCALL_START && instr < (u8*)VSYSCALL_END) { |
288 | instr = __va(instr - (u8*)VSYSCALL_START + (u8*)__pa_symbol(&__vsyscall_0)); | 300 | instr = __va(instr - (u8*)VSYSCALL_START + (u8*)__pa_symbol(&__vsyscall_0)); |
289 | DPRINTK("%s: vsyscall fixup: %p => %p\n", | ||
290 | __func__, a->instr, instr); | ||
291 | } | 301 | } |
292 | #endif | 302 | #endif |
293 | memcpy(insnbuf, a->replacement, a->replacementlen); | ||
294 | if (*insnbuf == 0xe8 && a->replacementlen == 5) | ||
295 | *(s32 *)(insnbuf + 1) += a->replacement - a->instr; | ||
296 | add_nops(insnbuf + a->replacementlen, | ||
297 | a->instrlen - a->replacementlen); | ||
298 | text_poke_early(instr, insnbuf, a->instrlen); | 303 | text_poke_early(instr, insnbuf, a->instrlen); |
299 | } | 304 | } |
300 | } | 305 | } |
diff --git a/arch/x86/lib/copy_page_64.S b/arch/x86/lib/copy_page_64.S index 6fec2d1cebe1..01c805ba5359 100644 --- a/arch/x86/lib/copy_page_64.S +++ b/arch/x86/lib/copy_page_64.S | |||
@@ -2,6 +2,7 @@ | |||
2 | 2 | ||
3 | #include <linux/linkage.h> | 3 | #include <linux/linkage.h> |
4 | #include <asm/dwarf2.h> | 4 | #include <asm/dwarf2.h> |
5 | #include <asm/alternative-asm.h> | ||
5 | 6 | ||
6 | ALIGN | 7 | ALIGN |
7 | copy_page_c: | 8 | copy_page_c: |
@@ -110,10 +111,6 @@ ENDPROC(copy_page) | |||
110 | 2: | 111 | 2: |
111 | .previous | 112 | .previous |
112 | .section .altinstructions,"a" | 113 | .section .altinstructions,"a" |
113 | .align 8 | 114 | altinstruction_entry copy_page, 1b, X86_FEATURE_REP_GOOD, \ |
114 | .quad copy_page | 115 | .Lcopy_page_end-copy_page, 2b-1b |
115 | .quad 1b | ||
116 | .word X86_FEATURE_REP_GOOD | ||
117 | .byte .Lcopy_page_end - copy_page | ||
118 | .byte 2b - 1b | ||
119 | .previous | 116 | .previous |
diff --git a/arch/x86/lib/memmove_64.S b/arch/x86/lib/memmove_64.S index d0ec9c2936d7..ee164610ec46 100644 --- a/arch/x86/lib/memmove_64.S +++ b/arch/x86/lib/memmove_64.S | |||
@@ -9,6 +9,7 @@ | |||
9 | #include <linux/linkage.h> | 9 | #include <linux/linkage.h> |
10 | #include <asm/dwarf2.h> | 10 | #include <asm/dwarf2.h> |
11 | #include <asm/cpufeature.h> | 11 | #include <asm/cpufeature.h> |
12 | #include <asm/alternative-asm.h> | ||
12 | 13 | ||
13 | #undef memmove | 14 | #undef memmove |
14 | 15 | ||
@@ -214,11 +215,9 @@ ENTRY(memmove) | |||
214 | .previous | 215 | .previous |
215 | 216 | ||
216 | .section .altinstructions,"a" | 217 | .section .altinstructions,"a" |
217 | .align 8 | 218 | altinstruction_entry .Lmemmove_begin_forward, \ |
218 | .quad .Lmemmove_begin_forward | 219 | .Lmemmove_begin_forward_efs,X86_FEATURE_ERMS, \ |
219 | .quad .Lmemmove_begin_forward_efs | 220 | .Lmemmove_end_forward-.Lmemmove_begin_forward, \ |
220 | .word X86_FEATURE_ERMS | 221 | .Lmemmove_end_forward_efs-.Lmemmove_begin_forward_efs |
221 | .byte .Lmemmove_end_forward-.Lmemmove_begin_forward | ||
222 | .byte .Lmemmove_end_forward_efs-.Lmemmove_begin_forward_efs | ||
223 | .previous | 222 | .previous |
224 | ENDPROC(memmove) | 223 | ENDPROC(memmove) |