diff options
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-x86/processor.h | 30 | ||||
-rw-r--r-- | include/asm-x86/processor_32.h | 25 | ||||
-rw-r--r-- | include/asm-x86/processor_64.h | 8 |
3 files changed, 30 insertions, 33 deletions
diff --git a/include/asm-x86/processor.h b/include/asm-x86/processor.h index c6b749a018a7..bfac9739f57e 100644 --- a/include/asm-x86/processor.h +++ b/include/asm-x86/processor.h | |||
@@ -596,6 +596,36 @@ extern char ignore_fpu_irq; | |||
596 | #define ARCH_HAS_PREFETCHW | 596 | #define ARCH_HAS_PREFETCHW |
597 | #define ARCH_HAS_SPINLOCK_PREFETCH | 597 | #define ARCH_HAS_SPINLOCK_PREFETCH |
598 | 598 | ||
599 | #ifdef CONFIG_X86_32 | ||
600 | #define BASE_PREFETCH ASM_NOP4 | ||
601 | #define ARCH_HAS_PREFETCH | ||
602 | #else | ||
603 | #define BASE_PREFETCH "prefetcht0 (%1)" | ||
604 | #endif | ||
605 | |||
606 | /* Prefetch instructions for Pentium III and AMD Athlon */ | ||
607 | /* It's not worth to care about 3dnow! prefetches for the K6 | ||
608 | because they are microcoded there and very slow. | ||
609 | However we don't do prefetches for pre XP Athlons currently | ||
610 | That should be fixed. */ | ||
611 | static inline void prefetch(const void *x) | ||
612 | { | ||
613 | alternative_input(BASE_PREFETCH, | ||
614 | "prefetchnta (%1)", | ||
615 | X86_FEATURE_XMM, | ||
616 | "r" (x)); | ||
617 | } | ||
618 | |||
619 | /* 3dnow! prefetch to get an exclusive cache line. Useful for | ||
620 | spinlocks to avoid one state transition in the cache coherency protocol. */ | ||
621 | static inline void prefetchw(const void *x) | ||
622 | { | ||
623 | alternative_input(BASE_PREFETCH, | ||
624 | "prefetchw (%1)", | ||
625 | X86_FEATURE_3DNOW, | ||
626 | "r" (x)); | ||
627 | } | ||
628 | |||
599 | #define spin_lock_prefetch(x) prefetchw(x) | 629 | #define spin_lock_prefetch(x) prefetchw(x) |
600 | /* This decides where the kernel will search for a free chunk of vm | 630 | /* This decides where the kernel will search for a free chunk of vm |
601 | * space during mmap's. | 631 | * space during mmap's. |
diff --git a/include/asm-x86/processor_32.h b/include/asm-x86/processor_32.h index 84a4c5e47d57..61a9cae2364b 100644 --- a/include/asm-x86/processor_32.h +++ b/include/asm-x86/processor_32.h | |||
@@ -228,29 +228,4 @@ extern unsigned long thread_saved_pc(struct task_struct *tsk); | |||
228 | 228 | ||
229 | #define ASM_NOP_MAX 8 | 229 | #define ASM_NOP_MAX 8 |
230 | 230 | ||
231 | /* Prefetch instructions for Pentium III and AMD Athlon */ | ||
232 | /* It's not worth to care about 3dnow! prefetches for the K6 | ||
233 | because they are microcoded there and very slow. | ||
234 | However we don't do prefetches for pre XP Athlons currently | ||
235 | That should be fixed. */ | ||
236 | static inline void prefetch(const void *x) | ||
237 | { | ||
238 | alternative_input(ASM_NOP4, | ||
239 | "prefetchnta (%1)", | ||
240 | X86_FEATURE_XMM, | ||
241 | "r" (x)); | ||
242 | } | ||
243 | |||
244 | #define ARCH_HAS_PREFETCH | ||
245 | |||
246 | /* 3dnow! prefetch to get an exclusive cache line. Useful for | ||
247 | spinlocks to avoid one state transition in the cache coherency protocol. */ | ||
248 | static inline void prefetchw(const void *x) | ||
249 | { | ||
250 | alternative_input(ASM_NOP4, | ||
251 | "prefetchw (%1)", | ||
252 | X86_FEATURE_3DNOW, | ||
253 | "r" (x)); | ||
254 | } | ||
255 | |||
256 | #endif /* __ASM_I386_PROCESSOR_H */ | 231 | #endif /* __ASM_I386_PROCESSOR_H */ |
diff --git a/include/asm-x86/processor_64.h b/include/asm-x86/processor_64.h index 45e382989b33..08b965124b97 100644 --- a/include/asm-x86/processor_64.h +++ b/include/asm-x86/processor_64.h | |||
@@ -124,12 +124,4 @@ DECLARE_PER_CPU(struct orig_ist, orig_ist); | |||
124 | 124 | ||
125 | #define ASM_NOP_MAX 8 | 125 | #define ASM_NOP_MAX 8 |
126 | 126 | ||
127 | static inline void prefetchw(void *x) | ||
128 | { | ||
129 | alternative_input("prefetcht0 (%1)", | ||
130 | "prefetchw (%1)", | ||
131 | X86_FEATURE_3DNOW, | ||
132 | "r" (x)); | ||
133 | } | ||
134 | |||
135 | #endif /* __ASM_X86_64_PROCESSOR_H */ | 127 | #endif /* __ASM_X86_64_PROCESSOR_H */ |