aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@linux.intel.com>2010-06-09 20:10:43 -0400
committerH. Peter Anvin <hpa@zytor.com>2010-07-07 13:36:28 -0400
commit83a7a2ad2a9173dcabc05df0f01d1d85b7ba1c2c (patch)
tree06dfa61fa406d35694d9dfc1c9c4153ead231b78 /arch/x86/include/asm
parent140236b4b1c749c9b795ea3d11558a0eb5a3a080 (diff)
x86, alternatives: Use 16-bit numbers for cpufeature index
We already have cpufeature indicies above 255, so use a 16-bit number for the alternatives index. This consumes a padding field and so doesn't add any size, but it means that abusing the padding field to create assembly errors on overflow no longer works. We can retain the test simply by redirecting it to the .discard section, however. [ v3: updated to include open-coded locations ] Signed-off-by: H. Peter Anvin <hpa@linux.intel.com> LKML-Reference: <tip-f88731e3068f9d1392ba71cc9f50f035d26a0d4f@git.kernel.org> Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Diffstat (limited to 'arch/x86/include/asm')
-rw-r--r--arch/x86/include/asm/alternative.h7
-rw-r--r--arch/x86/include/asm/cpufeature.h14
2 files changed, 12 insertions, 9 deletions
diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 03b6bb5394a0..bc6abb7bc7ee 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -45,10 +45,9 @@
45struct alt_instr { 45struct alt_instr {
46 u8 *instr; /* original instruction */ 46 u8 *instr; /* original instruction */
47 u8 *replacement; 47 u8 *replacement;
48 u8 cpuid; /* cpuid bit set for replacement */ 48 u16 cpuid; /* cpuid bit set for replacement */
49 u8 instrlen; /* length of original instruction */ 49 u8 instrlen; /* length of original instruction */
50 u8 replacementlen; /* length of new instruction, <= instrlen */ 50 u8 replacementlen; /* length of new instruction, <= instrlen */
51 u8 pad1;
52#ifdef CONFIG_X86_64 51#ifdef CONFIG_X86_64
53 u32 pad2; 52 u32 pad2;
54#endif 53#endif
@@ -86,9 +85,11 @@ static inline int alternatives_text_reserved(void *start, void *end)
86 _ASM_ALIGN "\n" \ 85 _ASM_ALIGN "\n" \
87 _ASM_PTR "661b\n" /* label */ \ 86 _ASM_PTR "661b\n" /* label */ \
88 _ASM_PTR "663f\n" /* new instruction */ \ 87 _ASM_PTR "663f\n" /* new instruction */ \
89 " .byte " __stringify(feature) "\n" /* feature bit */ \ 88 " .word " __stringify(feature) "\n" /* feature bit */ \
90 " .byte 662b-661b\n" /* sourcelen */ \ 89 " .byte 662b-661b\n" /* sourcelen */ \
91 " .byte 664f-663f\n" /* replacementlen */ \ 90 " .byte 664f-663f\n" /* replacementlen */ \
91 ".previous\n" \
92 ".section .discard,\"aw\",@progbits\n" \
92 " .byte 0xff + (664f-663f) - (662b-661b)\n" /* rlen <= slen */ \ 93 " .byte 0xff + (664f-663f) - (662b-661b)\n" /* rlen <= slen */ \
93 ".previous\n" \ 94 ".previous\n" \
94 ".section .altinstr_replacement, \"ax\"\n" \ 95 ".section .altinstr_replacement, \"ax\"\n" \
diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h
index 468145914389..e8b88967de35 100644
--- a/arch/x86/include/asm/cpufeature.h
+++ b/arch/x86/include/asm/cpufeature.h
@@ -291,7 +291,7 @@ extern const char * const x86_power_flags[32];
291 * patch the target code for additional performance. 291 * patch the target code for additional performance.
292 * 292 *
293 */ 293 */
294static __always_inline __pure bool __static_cpu_has(u8 bit) 294static __always_inline __pure bool __static_cpu_has(u16 bit)
295{ 295{
296#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) 296#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
297 asm goto("1: jmp %l[t_no]\n" 297 asm goto("1: jmp %l[t_no]\n"
@@ -300,11 +300,11 @@ static __always_inline __pure bool __static_cpu_has(u8 bit)
300 _ASM_ALIGN "\n" 300 _ASM_ALIGN "\n"
301 _ASM_PTR "1b\n" 301 _ASM_PTR "1b\n"
302 _ASM_PTR "0\n" /* no replacement */ 302 _ASM_PTR "0\n" /* no replacement */
303 " .byte %P0\n" /* feature bit */ 303 " .word %P0\n" /* feature bit */
304 " .byte 2b - 1b\n" /* source len */ 304 " .byte 2b - 1b\n" /* source len */
305 " .byte 0\n" /* replacement len */ 305 " .byte 0\n" /* replacement len */
306 " .byte 0xff + 0 - (2b-1b)\n" /* padding */
307 ".previous\n" 306 ".previous\n"
307 /* skipping size check since replacement size = 0 */
308 : : "i" (bit) : : t_no); 308 : : "i" (bit) : : t_no);
309 return true; 309 return true;
310 t_no: 310 t_no:
@@ -318,10 +318,12 @@ static __always_inline __pure bool __static_cpu_has(u8 bit)
318 _ASM_ALIGN "\n" 318 _ASM_ALIGN "\n"
319 _ASM_PTR "1b\n" 319 _ASM_PTR "1b\n"
320 _ASM_PTR "3f\n" 320 _ASM_PTR "3f\n"
321 " .byte %P1\n" /* feature bit */ 321 " .word %P1\n" /* feature bit */
322 " .byte 2b - 1b\n" /* source len */ 322 " .byte 2b - 1b\n" /* source len */
323 " .byte 4f - 3f\n" /* replacement len */ 323 " .byte 4f - 3f\n" /* replacement len */
324 " .byte 0xff + (4f-3f) - (2b-1b)\n" /* padding */ 324 ".previous\n"
325 ".section .discard,\"aw\",@progbits\n"
326 " .byte 0xff + (4f-3f) - (2b-1b)\n" /* size check */
325 ".previous\n" 327 ".previous\n"
326 ".section .altinstr_replacement,\"ax\"\n" 328 ".section .altinstr_replacement,\"ax\"\n"
327 "3: movb $1,%0\n" 329 "3: movb $1,%0\n"
@@ -337,7 +339,7 @@ static __always_inline __pure bool __static_cpu_has(u8 bit)
337( \ 339( \
338 __builtin_constant_p(boot_cpu_has(bit)) ? \ 340 __builtin_constant_p(boot_cpu_has(bit)) ? \
339 boot_cpu_has(bit) : \ 341 boot_cpu_has(bit) : \
340 (__builtin_constant_p(bit) && !((bit) & ~0xff)) ? \ 342 __builtin_constant_p(bit) ? \
341 __static_cpu_has(bit) : \ 343 __static_cpu_has(bit) : \
342 boot_cpu_has(bit) \ 344 boot_cpu_has(bit) \
343) 345)