aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/cpufeature.h
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@linux.intel.com>2010-06-09 20:10:43 -0400
committerH. Peter Anvin <hpa@zytor.com>2010-07-07 13:36:28 -0400
commit83a7a2ad2a9173dcabc05df0f01d1d85b7ba1c2c (patch)
tree06dfa61fa406d35694d9dfc1c9c4153ead231b78 /arch/x86/include/asm/cpufeature.h
parent140236b4b1c749c9b795ea3d11558a0eb5a3a080 (diff)
x86, alternatives: Use 16-bit numbers for cpufeature index
We already have cpufeature indicies above 255, so use a 16-bit number for the alternatives index. This consumes a padding field and so doesn't add any size, but it means that abusing the padding field to create assembly errors on overflow no longer works. We can retain the test simply by redirecting it to the .discard section, however. [ v3: updated to include open-coded locations ] Signed-off-by: H. Peter Anvin <hpa@linux.intel.com> LKML-Reference: <tip-f88731e3068f9d1392ba71cc9f50f035d26a0d4f@git.kernel.org> Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Diffstat (limited to 'arch/x86/include/asm/cpufeature.h')
-rw-r--r--arch/x86/include/asm/cpufeature.h14
1 files changed, 8 insertions, 6 deletions
diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h
index 468145914389..e8b88967de35 100644
--- a/arch/x86/include/asm/cpufeature.h
+++ b/arch/x86/include/asm/cpufeature.h
@@ -291,7 +291,7 @@ extern const char * const x86_power_flags[32];
291 * patch the target code for additional performance. 291 * patch the target code for additional performance.
292 * 292 *
293 */ 293 */
294static __always_inline __pure bool __static_cpu_has(u8 bit) 294static __always_inline __pure bool __static_cpu_has(u16 bit)
295{ 295{
296#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) 296#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
297 asm goto("1: jmp %l[t_no]\n" 297 asm goto("1: jmp %l[t_no]\n"
@@ -300,11 +300,11 @@ static __always_inline __pure bool __static_cpu_has(u8 bit)
300 _ASM_ALIGN "\n" 300 _ASM_ALIGN "\n"
301 _ASM_PTR "1b\n" 301 _ASM_PTR "1b\n"
302 _ASM_PTR "0\n" /* no replacement */ 302 _ASM_PTR "0\n" /* no replacement */
303 " .byte %P0\n" /* feature bit */ 303 " .word %P0\n" /* feature bit */
304 " .byte 2b - 1b\n" /* source len */ 304 " .byte 2b - 1b\n" /* source len */
305 " .byte 0\n" /* replacement len */ 305 " .byte 0\n" /* replacement len */
306 " .byte 0xff + 0 - (2b-1b)\n" /* padding */
307 ".previous\n" 306 ".previous\n"
307 /* skipping size check since replacement size = 0 */
308 : : "i" (bit) : : t_no); 308 : : "i" (bit) : : t_no);
309 return true; 309 return true;
310 t_no: 310 t_no:
@@ -318,10 +318,12 @@ static __always_inline __pure bool __static_cpu_has(u8 bit)
318 _ASM_ALIGN "\n" 318 _ASM_ALIGN "\n"
319 _ASM_PTR "1b\n" 319 _ASM_PTR "1b\n"
320 _ASM_PTR "3f\n" 320 _ASM_PTR "3f\n"
321 " .byte %P1\n" /* feature bit */ 321 " .word %P1\n" /* feature bit */
322 " .byte 2b - 1b\n" /* source len */ 322 " .byte 2b - 1b\n" /* source len */
323 " .byte 4f - 3f\n" /* replacement len */ 323 " .byte 4f - 3f\n" /* replacement len */
324 " .byte 0xff + (4f-3f) - (2b-1b)\n" /* padding */ 324 ".previous\n"
325 ".section .discard,\"aw\",@progbits\n"
326 " .byte 0xff + (4f-3f) - (2b-1b)\n" /* size check */
325 ".previous\n" 327 ".previous\n"
326 ".section .altinstr_replacement,\"ax\"\n" 328 ".section .altinstr_replacement,\"ax\"\n"
327 "3: movb $1,%0\n" 329 "3: movb $1,%0\n"
@@ -337,7 +339,7 @@ static __always_inline __pure bool __static_cpu_has(u8 bit)
337( \ 339( \
338 __builtin_constant_p(boot_cpu_has(bit)) ? \ 340 __builtin_constant_p(boot_cpu_has(bit)) ? \
339 boot_cpu_has(bit) : \ 341 boot_cpu_has(bit) : \
340 (__builtin_constant_p(bit) && !((bit) & ~0xff)) ? \ 342 __builtin_constant_p(bit) ? \
341 __static_cpu_has(bit) : \ 343 __static_cpu_has(bit) : \
342 boot_cpu_has(bit) \ 344 boot_cpu_has(bit) \
343) 345)