aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-sh/system.h
diff options
context:
space:
mode:
authorPaul Mundt <lethal@linux-sh.org>2006-09-27 01:57:44 -0400
committerPaul Mundt <lethal@linux-sh.org>2006-09-27 01:57:44 -0400
commit298476220d1f793ca0ac6c9e5dc817e1ad3e9851 (patch)
tree59cff744ad1837844cb7a5a43a0623d39058fb44 /include/asm-sh/system.h
parent749cf486920bf53f16e6a6889d9635a91ffb6c82 (diff)
sh: Add control register barriers.
Currently when making changes to control registers, we typically need some time for changes to take effect (8 nops, generally). However, for sh4a we simply need to do an icbi.. This is a simple patch for implementing a general purpose ctrl_barrier() which functions as a control register write barrier. There's some additional documentation in the patch itself, but it's pretty self explanatory. There were also some places where we were not doing the barrier, which didn't seem to have any adverse effects on legacy parts, but certainly did on sh4a. It's safer to have the barrier in place for legacy parts as well in these cases, though this does make flush_tlb_all() more expensive (by an order of 8 nops). We can ifdef around the flush_tlb_all() case for now if it's clear that all legacy parts won't have a problem with this. Signed-off-by: Paul Mundt <lethal@linux-sh.org>
Diffstat (limited to 'include/asm-sh/system.h')
-rw-r--r--include/asm-sh/system.h43
1 files changed, 34 insertions, 9 deletions
diff --git a/include/asm-sh/system.h b/include/asm-sh/system.h
index eb4902ed920a..1630a5411e5f 100644
--- a/include/asm-sh/system.h
+++ b/include/asm-sh/system.h
@@ -67,8 +67,17 @@ static inline void sched_cacheflush(void)
67{ 67{
68} 68}
69 69
70#define nop() __asm__ __volatile__ ("nop") 70#ifdef CONFIG_CPU_SH4A
71 71#define __icbi() \
72{ \
73 unsigned long __addr; \
74 __addr = 0xa8000000; \
75 __asm__ __volatile__( \
76 "icbi %0\n\t" \
77 : /* no output */ \
78 : "m" (__m(__addr))); \
79}
80#endif
72 81
73#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) 82#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
74 83
@@ -84,15 +93,31 @@ static __inline__ unsigned long tas(volatile int *m)
84 93
85extern void __xchg_called_with_bad_pointer(void); 94extern void __xchg_called_with_bad_pointer(void);
86 95
96/*
97 * A brief note on ctrl_barrier(), the control register write barrier.
98 *
99 * Legacy SH cores typically require a sequence of 8 nops after
100 * modification of a control register in order for the changes to take
101 * effect. On newer cores (like the sh4a and sh5) this is accomplished
102 * with icbi.
103 *
104 * Also note that on sh4a in the icbi case we can forego a synco for the
105 * write barrier, as it's not necessary for control registers.
106 *
107 * Historically we have only done this type of barrier for the MMUCR, but
108 * it's also necessary for the CCR, so we make it generic here instead.
109 */
87#ifdef CONFIG_CPU_SH4A 110#ifdef CONFIG_CPU_SH4A
88#define mb() __asm__ __volatile__ ("synco": : :"memory") 111#define mb() __asm__ __volatile__ ("synco": : :"memory")
89#define rmb() mb() 112#define rmb() mb()
90#define wmb() __asm__ __volatile__ ("synco": : :"memory") 113#define wmb() __asm__ __volatile__ ("synco": : :"memory")
114#define ctrl_barrier() __icbi()
91#define read_barrier_depends() do { } while(0) 115#define read_barrier_depends() do { } while(0)
92#else 116#else
93#define mb() __asm__ __volatile__ ("": : :"memory") 117#define mb() __asm__ __volatile__ ("": : :"memory")
94#define rmb() mb() 118#define rmb() mb()
95#define wmb() __asm__ __volatile__ ("": : :"memory") 119#define wmb() __asm__ __volatile__ ("": : :"memory")
120#define ctrl_barrier() __asm__ __volatile__ ("nop;nop;nop;nop;nop;nop;nop;nop")
96#define read_barrier_depends() do { } while(0) 121#define read_barrier_depends() do { } while(0)
97#endif 122#endif
98 123
@@ -218,8 +243,8 @@ do { \
218#define back_to_P1() \ 243#define back_to_P1() \
219do { \ 244do { \
220 unsigned long __dummy; \ 245 unsigned long __dummy; \
246 ctrl_barrier(); \
221 __asm__ __volatile__( \ 247 __asm__ __volatile__( \
222 "nop;nop;nop;nop;nop;nop;nop\n\t" \
223 "mov.l 1f, %0\n\t" \ 248 "mov.l 1f, %0\n\t" \
224 "jmp @%0\n\t" \ 249 "jmp @%0\n\t" \
225 " nop\n\t" \ 250 " nop\n\t" \