diff options
author | Andi Kleen <ak@suse.de> | 2006-03-25 10:31:31 -0500 |
---|---|---|
committer | Linus Torvalds <torvalds@g5.osdl.org> | 2006-03-25 12:14:38 -0500 |
commit | ba22f13563de5773701fc318ccaaa37b1fb6d294 (patch) | |
tree | e29ddeaca4c9f7b1b5ef08804980600d0e089f5b /include/asm-x86_64/io.h | |
parent | 9d95dd849ccc43c4b21504e1829b5bed68cdb1bc (diff) |
[PATCH] x86_64: Remove CONFIG_UNORDERED_IO
It was a failed experiment - all benchmarks done with it on both AMD
and Intel showed it was a loss. That was probably because the store
buffers of the CPUs for write combining traffic weren't large enough.
Signed-off-by: Andi Kleen <ak@suse.de>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-x86_64/io.h')
-rw-r--r-- | include/asm-x86_64/io.h | 18 |
1 files changed, 0 insertions, 18 deletions
diff --git a/include/asm-x86_64/io.h b/include/asm-x86_64/io.h index ac12bda3bb1f..cafdfb37f0d8 100644 --- a/include/asm-x86_64/io.h +++ b/include/asm-x86_64/io.h | |||
@@ -200,23 +200,6 @@ static inline __u64 __readq(const volatile void __iomem *addr) | |||
200 | 200 | ||
201 | #define mmiowb() | 201 | #define mmiowb() |
202 | 202 | ||
203 | #ifdef CONFIG_UNORDERED_IO | ||
204 | static inline void __writel(__u32 val, volatile void __iomem *addr) | ||
205 | { | ||
206 | volatile __u32 __iomem *target = addr; | ||
207 | asm volatile("movnti %1,%0" | ||
208 | : "=m" (*target) | ||
209 | : "r" (val) : "memory"); | ||
210 | } | ||
211 | |||
212 | static inline void __writeq(__u64 val, volatile void __iomem *addr) | ||
213 | { | ||
214 | volatile __u64 __iomem *target = addr; | ||
215 | asm volatile("movnti %1,%0" | ||
216 | : "=m" (*target) | ||
217 | : "r" (val) : "memory"); | ||
218 | } | ||
219 | #else | ||
220 | static inline void __writel(__u32 b, volatile void __iomem *addr) | 203 | static inline void __writel(__u32 b, volatile void __iomem *addr) |
221 | { | 204 | { |
222 | *(__force volatile __u32 *)addr = b; | 205 | *(__force volatile __u32 *)addr = b; |
@@ -225,7 +208,6 @@ static inline void __writeq(__u64 b, volatile void __iomem *addr) | |||
225 | { | 208 | { |
226 | *(__force volatile __u64 *)addr = b; | 209 | *(__force volatile __u64 *)addr = b; |
227 | } | 210 | } |
228 | #endif | ||
229 | static inline void __writeb(__u8 b, volatile void __iomem *addr) | 211 | static inline void __writeb(__u8 b, volatile void __iomem *addr) |
230 | { | 212 | { |
231 | *(__force volatile __u8 *)addr = b; | 213 | *(__force volatile __u8 *)addr = b; |