aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/cpufeature.h
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@zytor.com>2010-05-11 20:47:07 -0400
committerH. Peter Anvin <hpa@zytor.com>2010-05-11 20:47:07 -0400
commita3c8acd04376d604370dcb6cd2143c9c14078a50 (patch)
treec6d467d1007ab51fd17b88a37db79d583bac7033 /arch/x86/include/asm/cpufeature.h
parentdce8bf4e115aa44d590802ce3554e926840c9042 (diff)
x86: Add new static_cpu_has() function using alternatives
For CPU-feature-specific code that touches performance-critical paths, introduce a static patching version of [boot_]cpu_has(). This is run at alternatives time and is therefore not appropriate for most initialization code, but on the other hand initialization code is generally not performance critical. On gcc 4.5+ this uses the new "asm goto" feature. Signed-off-by: H. Peter Anvin <hpa@zytor.com> Cc: Avi Kivity <avi@redhat.com> Cc: Suresh Siddha <suresh.b.siddha@intel.com> LKML-Reference: <1273135546-29690-2-git-send-email-avi@redhat.com>
Diffstat (limited to 'arch/x86/include/asm/cpufeature.h')
-rw-r--r--arch/x86/include/asm/cpufeature.h57
1 files changed, 57 insertions, 0 deletions
diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h
index 0cd82d068613..9b11a5cc6662 100644
--- a/arch/x86/include/asm/cpufeature.h
+++ b/arch/x86/include/asm/cpufeature.h
@@ -175,6 +175,7 @@
175 175
176#if defined(__KERNEL__) && !defined(__ASSEMBLY__) 176#if defined(__KERNEL__) && !defined(__ASSEMBLY__)
177 177
178#include <asm/asm.h>
178#include <linux/bitops.h> 179#include <linux/bitops.h>
179 180
180extern const char * const x86_cap_flags[NCAPINTS*32]; 181extern const char * const x86_cap_flags[NCAPINTS*32];
@@ -283,6 +284,62 @@ extern const char * const x86_power_flags[32];
283 284
284#endif /* CONFIG_X86_64 */ 285#endif /* CONFIG_X86_64 */
285 286
287/*
288 * Static testing of CPU features. Used the same as boot_cpu_has().
289 * These are only valid after alternatives have run, but will statically
290 * patch the target code for additional performance.
291 *
292 */
293static __always_inline __pure bool __static_cpu_has(u8 bit)
294{
295#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
296 asm goto("1: jmp %l[t_no]\n"
297 "2:\n"
298 ".section .altinstructions,\"a\"\n"
299 _ASM_ALIGN "\n"
300 _ASM_PTR "1b\n"
301 _ASM_PTR "0\n" /* no replacement */
302 " .byte %P0\n" /* feature bit */
303 " .byte 2b - 1b\n" /* source len */
304 " .byte 0\n" /* replacement len */
305 " .byte 0xff + 0 - (2b-1b)\n" /* padding */
306 ".previous\n"
307 : : "i" (bit) : : t_no);
308 return true;
309 t_no:
310 return false;
311#else
312 u8 flag;
313 /* Open-coded due to __stringify() in ALTERNATIVE() */
314 asm volatile("1: movb $0,%0\n"
315 "2:\n"
316 ".section .altinstructions,\"a\"\n"
317 _ASM_ALIGN "\n"
318 _ASM_PTR "1b\n"
319 _ASM_PTR "3f\n"
320 " .byte %P1\n" /* feature bit */
321 " .byte 2b - 1b\n" /* source len */
322 " .byte 4f - 3f\n" /* replacement len */
323 " .byte 0xff + (4f-3f) - (2b-1b)\n" /* padding */
324 ".previous\n"
325 ".section .altinstr_replacement,\"ax\"\n"
326 "3: movb $1,%0\n"
327 "4:\n"
328 ".previous\n"
329 : "=qm" (flag) : "i" (bit));
330 return flag;
331#endif
332}
333
334#define static_cpu_has(bit) \
335( \
336 __builtin_constant_p(boot_cpu_has(bit)) ? \
337 boot_cpu_has(bit) : \
338 (__builtin_constant_p(bit) && !((bit) & ~0xff)) ? \
339 __static_cpu_has(bit) : \
340 boot_cpu_has(bit) \
341)
342
286#endif /* defined(__KERNEL__) && !defined(__ASSEMBLY__) */ 343#endif /* defined(__KERNEL__) && !defined(__ASSEMBLY__) */
287 344
288#endif /* _ASM_X86_CPUFEATURE_H */ 345#endif /* _ASM_X86_CPUFEATURE_H */