diff options
author | H. Peter Anvin <hpa@linux.intel.com> | 2010-06-09 20:10:43 -0400 |
---|---|---|
committer | H. Peter Anvin <hpa@zytor.com> | 2010-07-07 13:36:28 -0400 |
commit | 83a7a2ad2a9173dcabc05df0f01d1d85b7ba1c2c (patch) | |
tree | 06dfa61fa406d35694d9dfc1c9c4153ead231b78 /arch/x86 | |
parent | 140236b4b1c749c9b795ea3d11558a0eb5a3a080 (diff) |
x86, alternatives: Use 16-bit numbers for cpufeature index
We already have cpufeature indicies above 255, so use a 16-bit number
for the alternatives index. This consumes a padding field and so
doesn't add any size, but it means that abusing the padding field to
create assembly errors on overflow no longer works. We can retain the
test simply by redirecting it to the .discard section, however.
[ v3: updated to include open-coded locations ]
Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
LKML-Reference: <tip-f88731e3068f9d1392ba71cc9f50f035d26a0d4f@git.kernel.org>
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Diffstat (limited to 'arch/x86')
-rw-r--r-- | arch/x86/include/asm/alternative.h | 7 | ||||
-rw-r--r-- | arch/x86/include/asm/cpufeature.h | 14 | ||||
-rw-r--r-- | arch/x86/kernel/entry_32.S | 2 | ||||
-rw-r--r-- | arch/x86/lib/clear_page_64.S | 2 | ||||
-rw-r--r-- | arch/x86/lib/copy_page_64.S | 2 | ||||
-rw-r--r-- | arch/x86/lib/memcpy_64.S | 2 | ||||
-rw-r--r-- | arch/x86/lib/memset_64.S | 2 |
7 files changed, 17 insertions, 14 deletions
diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h index 03b6bb5394a0..bc6abb7bc7ee 100644 --- a/arch/x86/include/asm/alternative.h +++ b/arch/x86/include/asm/alternative.h | |||
@@ -45,10 +45,9 @@ | |||
45 | struct alt_instr { | 45 | struct alt_instr { |
46 | u8 *instr; /* original instruction */ | 46 | u8 *instr; /* original instruction */ |
47 | u8 *replacement; | 47 | u8 *replacement; |
48 | u8 cpuid; /* cpuid bit set for replacement */ | 48 | u16 cpuid; /* cpuid bit set for replacement */ |
49 | u8 instrlen; /* length of original instruction */ | 49 | u8 instrlen; /* length of original instruction */ |
50 | u8 replacementlen; /* length of new instruction, <= instrlen */ | 50 | u8 replacementlen; /* length of new instruction, <= instrlen */ |
51 | u8 pad1; | ||
52 | #ifdef CONFIG_X86_64 | 51 | #ifdef CONFIG_X86_64 |
53 | u32 pad2; | 52 | u32 pad2; |
54 | #endif | 53 | #endif |
@@ -86,9 +85,11 @@ static inline int alternatives_text_reserved(void *start, void *end) | |||
86 | _ASM_ALIGN "\n" \ | 85 | _ASM_ALIGN "\n" \ |
87 | _ASM_PTR "661b\n" /* label */ \ | 86 | _ASM_PTR "661b\n" /* label */ \ |
88 | _ASM_PTR "663f\n" /* new instruction */ \ | 87 | _ASM_PTR "663f\n" /* new instruction */ \ |
89 | " .byte " __stringify(feature) "\n" /* feature bit */ \ | 88 | " .word " __stringify(feature) "\n" /* feature bit */ \ |
90 | " .byte 662b-661b\n" /* sourcelen */ \ | 89 | " .byte 662b-661b\n" /* sourcelen */ \ |
91 | " .byte 664f-663f\n" /* replacementlen */ \ | 90 | " .byte 664f-663f\n" /* replacementlen */ \ |
91 | ".previous\n" \ | ||
92 | ".section .discard,\"aw\",@progbits\n" \ | ||
92 | " .byte 0xff + (664f-663f) - (662b-661b)\n" /* rlen <= slen */ \ | 93 | " .byte 0xff + (664f-663f) - (662b-661b)\n" /* rlen <= slen */ \ |
93 | ".previous\n" \ | 94 | ".previous\n" \ |
94 | ".section .altinstr_replacement, \"ax\"\n" \ | 95 | ".section .altinstr_replacement, \"ax\"\n" \ |
diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h index 468145914389..e8b88967de35 100644 --- a/arch/x86/include/asm/cpufeature.h +++ b/arch/x86/include/asm/cpufeature.h | |||
@@ -291,7 +291,7 @@ extern const char * const x86_power_flags[32]; | |||
291 | * patch the target code for additional performance. | 291 | * patch the target code for additional performance. |
292 | * | 292 | * |
293 | */ | 293 | */ |
294 | static __always_inline __pure bool __static_cpu_has(u8 bit) | 294 | static __always_inline __pure bool __static_cpu_has(u16 bit) |
295 | { | 295 | { |
296 | #if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) | 296 | #if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) |
297 | asm goto("1: jmp %l[t_no]\n" | 297 | asm goto("1: jmp %l[t_no]\n" |
@@ -300,11 +300,11 @@ static __always_inline __pure bool __static_cpu_has(u8 bit) | |||
300 | _ASM_ALIGN "\n" | 300 | _ASM_ALIGN "\n" |
301 | _ASM_PTR "1b\n" | 301 | _ASM_PTR "1b\n" |
302 | _ASM_PTR "0\n" /* no replacement */ | 302 | _ASM_PTR "0\n" /* no replacement */ |
303 | " .byte %P0\n" /* feature bit */ | 303 | " .word %P0\n" /* feature bit */ |
304 | " .byte 2b - 1b\n" /* source len */ | 304 | " .byte 2b - 1b\n" /* source len */ |
305 | " .byte 0\n" /* replacement len */ | 305 | " .byte 0\n" /* replacement len */ |
306 | " .byte 0xff + 0 - (2b-1b)\n" /* padding */ | ||
307 | ".previous\n" | 306 | ".previous\n" |
307 | /* skipping size check since replacement size = 0 */ | ||
308 | : : "i" (bit) : : t_no); | 308 | : : "i" (bit) : : t_no); |
309 | return true; | 309 | return true; |
310 | t_no: | 310 | t_no: |
@@ -318,10 +318,12 @@ static __always_inline __pure bool __static_cpu_has(u8 bit) | |||
318 | _ASM_ALIGN "\n" | 318 | _ASM_ALIGN "\n" |
319 | _ASM_PTR "1b\n" | 319 | _ASM_PTR "1b\n" |
320 | _ASM_PTR "3f\n" | 320 | _ASM_PTR "3f\n" |
321 | " .byte %P1\n" /* feature bit */ | 321 | " .word %P1\n" /* feature bit */ |
322 | " .byte 2b - 1b\n" /* source len */ | 322 | " .byte 2b - 1b\n" /* source len */ |
323 | " .byte 4f - 3f\n" /* replacement len */ | 323 | " .byte 4f - 3f\n" /* replacement len */ |
324 | " .byte 0xff + (4f-3f) - (2b-1b)\n" /* padding */ | 324 | ".previous\n" |
325 | ".section .discard,\"aw\",@progbits\n" | ||
326 | " .byte 0xff + (4f-3f) - (2b-1b)\n" /* size check */ | ||
325 | ".previous\n" | 327 | ".previous\n" |
326 | ".section .altinstr_replacement,\"ax\"\n" | 328 | ".section .altinstr_replacement,\"ax\"\n" |
327 | "3: movb $1,%0\n" | 329 | "3: movb $1,%0\n" |
@@ -337,7 +339,7 @@ static __always_inline __pure bool __static_cpu_has(u8 bit) | |||
337 | ( \ | 339 | ( \ |
338 | __builtin_constant_p(boot_cpu_has(bit)) ? \ | 340 | __builtin_constant_p(boot_cpu_has(bit)) ? \ |
339 | boot_cpu_has(bit) : \ | 341 | boot_cpu_has(bit) : \ |
340 | (__builtin_constant_p(bit) && !((bit) & ~0xff)) ? \ | 342 | __builtin_constant_p(bit) ? \ |
341 | __static_cpu_has(bit) : \ | 343 | __static_cpu_has(bit) : \ |
342 | boot_cpu_has(bit) \ | 344 | boot_cpu_has(bit) \ |
343 | ) | 345 | ) |
diff --git a/arch/x86/kernel/entry_32.S b/arch/x86/kernel/entry_32.S index cd49141cf153..7862cf510ea9 100644 --- a/arch/x86/kernel/entry_32.S +++ b/arch/x86/kernel/entry_32.S | |||
@@ -914,7 +914,7 @@ ENTRY(simd_coprocessor_error) | |||
914 | .balign 4 | 914 | .balign 4 |
915 | .long 661b | 915 | .long 661b |
916 | .long 663f | 916 | .long 663f |
917 | .byte X86_FEATURE_XMM | 917 | .word X86_FEATURE_XMM |
918 | .byte 662b-661b | 918 | .byte 662b-661b |
919 | .byte 664f-663f | 919 | .byte 664f-663f |
920 | .previous | 920 | .previous |
diff --git a/arch/x86/lib/clear_page_64.S b/arch/x86/lib/clear_page_64.S index ebeafcce04a9..aa4326bfb24a 100644 --- a/arch/x86/lib/clear_page_64.S +++ b/arch/x86/lib/clear_page_64.S | |||
@@ -52,7 +52,7 @@ ENDPROC(clear_page) | |||
52 | .align 8 | 52 | .align 8 |
53 | .quad clear_page | 53 | .quad clear_page |
54 | .quad 1b | 54 | .quad 1b |
55 | .byte X86_FEATURE_REP_GOOD | 55 | .word X86_FEATURE_REP_GOOD |
56 | .byte .Lclear_page_end - clear_page | 56 | .byte .Lclear_page_end - clear_page |
57 | .byte 2b - 1b | 57 | .byte 2b - 1b |
58 | .previous | 58 | .previous |
diff --git a/arch/x86/lib/copy_page_64.S b/arch/x86/lib/copy_page_64.S index 727a5d46d2fc..6fec2d1cebe1 100644 --- a/arch/x86/lib/copy_page_64.S +++ b/arch/x86/lib/copy_page_64.S | |||
@@ -113,7 +113,7 @@ ENDPROC(copy_page) | |||
113 | .align 8 | 113 | .align 8 |
114 | .quad copy_page | 114 | .quad copy_page |
115 | .quad 1b | 115 | .quad 1b |
116 | .byte X86_FEATURE_REP_GOOD | 116 | .word X86_FEATURE_REP_GOOD |
117 | .byte .Lcopy_page_end - copy_page | 117 | .byte .Lcopy_page_end - copy_page |
118 | .byte 2b - 1b | 118 | .byte 2b - 1b |
119 | .previous | 119 | .previous |
diff --git a/arch/x86/lib/memcpy_64.S b/arch/x86/lib/memcpy_64.S index f82e884928af..bcbcd1e0f7d5 100644 --- a/arch/x86/lib/memcpy_64.S +++ b/arch/x86/lib/memcpy_64.S | |||
@@ -131,7 +131,7 @@ ENDPROC(__memcpy) | |||
131 | .align 8 | 131 | .align 8 |
132 | .quad memcpy | 132 | .quad memcpy |
133 | .quad .Lmemcpy_c | 133 | .quad .Lmemcpy_c |
134 | .byte X86_FEATURE_REP_GOOD | 134 | .word X86_FEATURE_REP_GOOD |
135 | 135 | ||
136 | /* | 136 | /* |
137 | * Replace only beginning, memcpy is used to apply alternatives, | 137 | * Replace only beginning, memcpy is used to apply alternatives, |
diff --git a/arch/x86/lib/memset_64.S b/arch/x86/lib/memset_64.S index e88d3b81644a..09d344269652 100644 --- a/arch/x86/lib/memset_64.S +++ b/arch/x86/lib/memset_64.S | |||
@@ -121,7 +121,7 @@ ENDPROC(__memset) | |||
121 | .align 8 | 121 | .align 8 |
122 | .quad memset | 122 | .quad memset |
123 | .quad .Lmemset_c | 123 | .quad .Lmemset_c |
124 | .byte X86_FEATURE_REP_GOOD | 124 | .word X86_FEATURE_REP_GOOD |
125 | .byte .Lfinal - memset | 125 | .byte .Lfinal - memset |
126 | .byte .Lmemset_e - .Lmemset_c | 126 | .byte .Lmemset_e - .Lmemset_c |
127 | .previous | 127 | .previous |