diff options
author | Andi Kleen <ak@suse.de> | 2007-10-19 14:35:04 -0400 |
---|---|---|
committer | Thomas Gleixner <tglx@linutronix.de> | 2007-10-19 14:35:04 -0400 |
commit | ab483570a13be2a34c0502b166df8f8b26802103 (patch) | |
tree | 6f9997b9313227b25faf28d67fe6e06d001022cc /include | |
parent | 124d395fd05efb65d00ca23c7bcc86c272bd8813 (diff) |
x86 & generic: change to __builtin_prefetch()
gcc 3.2+ supports __builtin_prefetch, so it's possible to use it on all
architectures. Change the generic fallback in linux/prefetch.h to use it
instead of noping it out. gcc should do the right thing when the
architecture doesn't support prefetching
Undefine the x86-64 inline assembler version and use the fallback.
Signed-off-by: Andi Kleen <ak@suse.de>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-x86/processor_64.h | 6 | ||||
-rw-r--r-- | include/linux/prefetch.h | 9 |
2 files changed, 2 insertions, 13 deletions
diff --git a/include/asm-x86/processor_64.h b/include/asm-x86/processor_64.h index f422becbddd9..398c39160fce 100644 --- a/include/asm-x86/processor_64.h +++ b/include/asm-x86/processor_64.h | |||
@@ -390,12 +390,6 @@ static inline void sync_core(void) | |||
390 | asm volatile("cpuid" : "=a" (tmp) : "0" (1) : "ebx","ecx","edx","memory"); | 390 | asm volatile("cpuid" : "=a" (tmp) : "0" (1) : "ebx","ecx","edx","memory"); |
391 | } | 391 | } |
392 | 392 | ||
393 | #define ARCH_HAS_PREFETCH | ||
394 | static inline void prefetch(void *x) | ||
395 | { | ||
396 | asm volatile("prefetcht0 (%0)" :: "r" (x)); | ||
397 | } | ||
398 | |||
399 | #define ARCH_HAS_PREFETCHW 1 | 393 | #define ARCH_HAS_PREFETCHW 1 |
400 | static inline void prefetchw(void *x) | 394 | static inline void prefetchw(void *x) |
401 | { | 395 | { |
diff --git a/include/linux/prefetch.h b/include/linux/prefetch.h index 1adfe668d031..af7c36a5a521 100644 --- a/include/linux/prefetch.h +++ b/include/linux/prefetch.h | |||
@@ -34,17 +34,12 @@ | |||
34 | 34 | ||
35 | */ | 35 | */ |
36 | 36 | ||
37 | /* | ||
38 | * These cannot be do{}while(0) macros. See the mental gymnastics in | ||
39 | * the loop macro. | ||
40 | */ | ||
41 | |||
42 | #ifndef ARCH_HAS_PREFETCH | 37 | #ifndef ARCH_HAS_PREFETCH |
43 | static inline void prefetch(const void *x) {;} | 38 | #define prefetch(x) __builtin_prefetch(x) |
44 | #endif | 39 | #endif |
45 | 40 | ||
46 | #ifndef ARCH_HAS_PREFETCHW | 41 | #ifndef ARCH_HAS_PREFETCHW |
47 | static inline void prefetchw(const void *x) {;} | 42 | #define prefetchw(x) __builtin_prefetch(x,1) |
48 | #endif | 43 | #endif |
49 | 44 | ||
50 | #ifndef ARCH_HAS_SPINLOCK_PREFETCH | 45 | #ifndef ARCH_HAS_SPINLOCK_PREFETCH |