aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@ppc970.osdl.org>2005-06-21 21:19:10 -0400
committerLinus Torvalds <torvalds@ppc970.osdl.org>2005-06-21 21:19:10 -0400
commit9723d95d1076e9ef394ff26162fb0b47531089b0 (patch)
tree242504391bf7f373cd5198d32df29868ea2996e2 /include
parent4a4f8fdba6f5a34ca90f426021e17491a30202da (diff)
parent7049e6800f40046c384c522a990669024d5f5836 (diff)
Merge rsync://rsync.kernel.org/pub/scm/linux/kernel/git/davem/sparc-2.6
Diffstat (limited to 'include')
-rw-r--r--include/asm-sparc64/processor.h34
1 files changed, 34 insertions, 0 deletions
diff --git a/include/asm-sparc64/processor.h b/include/asm-sparc64/processor.h
index bc1445b904ef..d0bee2413560 100644
--- a/include/asm-sparc64/processor.h
+++ b/include/asm-sparc64/processor.h
@@ -192,6 +192,40 @@ extern unsigned long get_wchan(struct task_struct *task);
192 192
193#define cpu_relax() barrier() 193#define cpu_relax() barrier()
194 194
195/* Prefetch support. This is tuned for UltraSPARC-III and later.
196 * UltraSPARC-I will treat these as nops, and UltraSPARC-II has
197 * a shallower prefetch queue than later chips.
198 */
199#define ARCH_HAS_PREFETCH
200#define ARCH_HAS_PREFETCHW
201#define ARCH_HAS_SPINLOCK_PREFETCH
202
203static inline void prefetch(const void *x)
204{
205 /* We do not use the read prefetch mnemonic because that
206 * prefetches into the prefetch-cache which only is accessible
207 * by floating point operations in UltraSPARC-III and later.
208 * By contrast, "#one_write" prefetches into the L2 cache
209 * in shared state.
210 */
211 __asm__ __volatile__("prefetch [%0], #one_write"
212 : /* no outputs */
213 : "r" (x));
214}
215
216static inline void prefetchw(const void *x)
217{
218 /* The most optimal prefetch to use for writes is
219 * "#n_writes". This brings the cacheline into the
220 * L2 cache in "owned" state.
221 */
222 __asm__ __volatile__("prefetch [%0], #n_writes"
223 : /* no outputs */
224 : "r" (x));
225}
226
227#define spin_lock_prefetch(x) prefetchw(x)
228
195#endif /* !(__ASSEMBLY__) */ 229#endif /* !(__ASSEMBLY__) */
196 230
197#endif /* !(__ASM_SPARC64_PROCESSOR_H) */ 231#endif /* !(__ASM_SPARC64_PROCESSOR_H) */