diff options
Diffstat (limited to 'include/asm-sh/system.h')
-rw-r--r-- | include/asm-sh/system.h | 43 |
1 files changed, 34 insertions, 9 deletions
diff --git a/include/asm-sh/system.h b/include/asm-sh/system.h index eb4902ed920a..1630a5411e5f 100644 --- a/include/asm-sh/system.h +++ b/include/asm-sh/system.h | |||
@@ -67,8 +67,17 @@ static inline void sched_cacheflush(void) | |||
67 | { | 67 | { |
68 | } | 68 | } |
69 | 69 | ||
70 | #define nop() __asm__ __volatile__ ("nop") | 70 | #ifdef CONFIG_CPU_SH4A |
71 | 71 | #define __icbi() \ | |
72 | { \ | ||
73 | unsigned long __addr; \ | ||
74 | __addr = 0xa8000000; \ | ||
75 | __asm__ __volatile__( \ | ||
76 | "icbi %0\n\t" \ | ||
77 | : /* no output */ \ | ||
78 | : "m" (__m(__addr))); \ | ||
79 | } | ||
80 | #endif | ||
72 | 81 | ||
73 | #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) | 82 | #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) |
74 | 83 | ||
@@ -84,15 +93,31 @@ static __inline__ unsigned long tas(volatile int *m) | |||
84 | 93 | ||
85 | extern void __xchg_called_with_bad_pointer(void); | 94 | extern void __xchg_called_with_bad_pointer(void); |
86 | 95 | ||
96 | /* | ||
97 | * A brief note on ctrl_barrier(), the control register write barrier. | ||
98 | * | ||
99 | * Legacy SH cores typically require a sequence of 8 nops after | ||
100 | * modification of a control register in order for the changes to take | ||
101 | * effect. On newer cores (like the sh4a and sh5) this is accomplished | ||
102 | * with icbi. | ||
103 | * | ||
104 | * Also note that on sh4a in the icbi case we can forego a synco for the | ||
105 | * write barrier, as it's not necessary for control registers. | ||
106 | * | ||
107 | * Historically we have only done this type of barrier for the MMUCR, but | ||
108 | * it's also necessary for the CCR, so we make it generic here instead. | ||
109 | */ | ||
87 | #ifdef CONFIG_CPU_SH4A | 110 | #ifdef CONFIG_CPU_SH4A |
88 | #define mb() __asm__ __volatile__ ("synco": : :"memory") | 111 | #define mb() __asm__ __volatile__ ("synco": : :"memory") |
89 | #define rmb() mb() | 112 | #define rmb() mb() |
90 | #define wmb() __asm__ __volatile__ ("synco": : :"memory") | 113 | #define wmb() __asm__ __volatile__ ("synco": : :"memory") |
114 | #define ctrl_barrier() __icbi() | ||
91 | #define read_barrier_depends() do { } while(0) | 115 | #define read_barrier_depends() do { } while(0) |
92 | #else | 116 | #else |
93 | #define mb() __asm__ __volatile__ ("": : :"memory") | 117 | #define mb() __asm__ __volatile__ ("": : :"memory") |
94 | #define rmb() mb() | 118 | #define rmb() mb() |
95 | #define wmb() __asm__ __volatile__ ("": : :"memory") | 119 | #define wmb() __asm__ __volatile__ ("": : :"memory") |
120 | #define ctrl_barrier() __asm__ __volatile__ ("nop;nop;nop;nop;nop;nop;nop;nop") | ||
96 | #define read_barrier_depends() do { } while(0) | 121 | #define read_barrier_depends() do { } while(0) |
97 | #endif | 122 | #endif |
98 | 123 | ||
@@ -218,8 +243,8 @@ do { \ | |||
218 | #define back_to_P1() \ | 243 | #define back_to_P1() \ |
219 | do { \ | 244 | do { \ |
220 | unsigned long __dummy; \ | 245 | unsigned long __dummy; \ |
246 | ctrl_barrier(); \ | ||
221 | __asm__ __volatile__( \ | 247 | __asm__ __volatile__( \ |
222 | "nop;nop;nop;nop;nop;nop;nop\n\t" \ | ||
223 | "mov.l 1f, %0\n\t" \ | 248 | "mov.l 1f, %0\n\t" \ |
224 | "jmp @%0\n\t" \ | 249 | "jmp @%0\n\t" \ |
225 | " nop\n\t" \ | 250 | " nop\n\t" \ |