aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86/processor.h
diff options
context:
space:
mode:
authorGlauber de Oliveira Costa <gcosta@redhat.com>2008-01-30 07:31:40 -0500
committerIngo Molnar <mingo@elte.hu>2008-01-30 07:31:40 -0500
commitae2e15eb3b6c2a011bee615470bf52d2beb99a4b (patch)
treeb2e323eef35a6cab5f16284378c0c96d4bc159b7 /include/asm-x86/processor.h
parent1a53905adddf6cc6d795bd7e988c60a19773f72e (diff)
x86: unify prefetch operations
This patch moves the prefetch[w]? functions to processor.h Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com> Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include/asm-x86/processor.h')
-rw-r--r--include/asm-x86/processor.h30
1 files changed, 30 insertions, 0 deletions
diff --git a/include/asm-x86/processor.h b/include/asm-x86/processor.h
index c6b749a018a7..bfac9739f57e 100644
--- a/include/asm-x86/processor.h
+++ b/include/asm-x86/processor.h
@@ -596,6 +596,36 @@ extern char ignore_fpu_irq;
596#define ARCH_HAS_PREFETCHW 596#define ARCH_HAS_PREFETCHW
597#define ARCH_HAS_SPINLOCK_PREFETCH 597#define ARCH_HAS_SPINLOCK_PREFETCH
598 598
599#ifdef CONFIG_X86_32
600#define BASE_PREFETCH ASM_NOP4
601#define ARCH_HAS_PREFETCH
602#else
603#define BASE_PREFETCH "prefetcht0 (%1)"
604#endif
605
606/* Prefetch instructions for Pentium III and AMD Athlon */
607/* It's not worth to care about 3dnow! prefetches for the K6
608 because they are microcoded there and very slow.
609 However we don't do prefetches for pre XP Athlons currently
610 That should be fixed. */
611static inline void prefetch(const void *x)
612{
613 alternative_input(BASE_PREFETCH,
614 "prefetchnta (%1)",
615 X86_FEATURE_XMM,
616 "r" (x));
617}
618
619/* 3dnow! prefetch to get an exclusive cache line. Useful for
620 spinlocks to avoid one state transition in the cache coherency protocol. */
621static inline void prefetchw(const void *x)
622{
623 alternative_input(BASE_PREFETCH,
624 "prefetchw (%1)",
625 X86_FEATURE_3DNOW,
626 "r" (x));
627}
628
599#define spin_lock_prefetch(x) prefetchw(x) 629#define spin_lock_prefetch(x) prefetchw(x)
600/* This decides where the kernel will search for a free chunk of vm 630/* This decides where the kernel will search for a free chunk of vm
601 * space during mmap's. 631 * space during mmap's.