aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-arm/system.h
diff options
context:
space:
mode:
authorCatalin Marinas <catalin.marinas@arm.com>2007-02-05 08:48:02 -0500
committerRussell King <rmk+kernel@arm.linux.org.uk>2007-02-08 09:49:31 -0500
commit56660faf9e8088542e85207df45fb9c5f4dd3909 (patch)
tree6bfebe63a5510af52ec39ea55810239080a00fb0 /include/asm-arm/system.h
parente6a5d66f58431c66c79e236f722a5ad7dd959ef3 (diff)
[ARM] 4133/1: Add ISB after changes to CP15 registers
According to ARM ARM, changes to the CP15 registers are only guaranteed to be visible after an Instruction Synchronization Barrier (ISB). This patch adds the ISB at the end of set_cr and set_copro_access functions and also moves them further down in the file, below the isb macro definition. Signed-off-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'include/asm-arm/system.h')
-rw-r--r--include/asm-arm/system.h70
1 files changed, 36 insertions, 34 deletions
diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h
index e5aa372beb94..f4386906b200 100644
--- a/include/asm-arm/system.h
+++ b/include/asm-arm/system.h
@@ -140,6 +140,40 @@ static inline int cpu_is_xsc3(void)
140#define cpu_is_xscale() 1 140#define cpu_is_xscale() 1
141#endif 141#endif
142 142
143#define UDBG_UNDEFINED (1 << 0)
144#define UDBG_SYSCALL (1 << 1)
145#define UDBG_BADABORT (1 << 2)
146#define UDBG_SEGV (1 << 3)
147#define UDBG_BUS (1 << 4)
148
149extern unsigned int user_debug;
150
151#if __LINUX_ARM_ARCH__ >= 4
152#define vectors_high() (cr_alignment & CR_V)
153#else
154#define vectors_high() (0)
155#endif
156
157#if __LINUX_ARM_ARCH__ >= 6
158#define isb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
159 : : "r" (0) : "memory")
160#define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
161 : : "r" (0) : "memory")
162#define dmb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
163 : : "r" (0) : "memory")
164#else
165#define isb() __asm__ __volatile__ ("" : : : "memory")
166#define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
167 : : "r" (0) : "memory")
168#define dmb() __asm__ __volatile__ ("" : : : "memory")
169#endif
170#define mb() dmb()
171#define rmb() mb()
172#define wmb() mb()
173#define read_barrier_depends() do { } while(0)
174#define set_mb(var, value) do { var = value; mb(); } while (0)
175#define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
176
143extern unsigned long cr_no_alignment; /* defined in entry-armv.S */ 177extern unsigned long cr_no_alignment; /* defined in entry-armv.S */
144extern unsigned long cr_alignment; /* defined in entry-armv.S */ 178extern unsigned long cr_alignment; /* defined in entry-armv.S */
145 179
@@ -154,6 +188,7 @@ static inline void set_cr(unsigned int val)
154{ 188{
155 asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR" 189 asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR"
156 : : "r" (val) : "cc"); 190 : : "r" (val) : "cc");
191 isb();
157} 192}
158 193
159#ifndef CONFIG_SMP 194#ifndef CONFIG_SMP
@@ -176,42 +211,9 @@ static inline void set_copro_access(unsigned int val)
176{ 211{
177 asm volatile("mcr p15, 0, %0, c1, c0, 2 @ set copro access" 212 asm volatile("mcr p15, 0, %0, c1, c0, 2 @ set copro access"
178 : : "r" (val) : "cc"); 213 : : "r" (val) : "cc");
214 isb();
179} 215}
180 216
181#define UDBG_UNDEFINED (1 << 0)
182#define UDBG_SYSCALL (1 << 1)
183#define UDBG_BADABORT (1 << 2)
184#define UDBG_SEGV (1 << 3)
185#define UDBG_BUS (1 << 4)
186
187extern unsigned int user_debug;
188
189#if __LINUX_ARM_ARCH__ >= 4
190#define vectors_high() (cr_alignment & CR_V)
191#else
192#define vectors_high() (0)
193#endif
194
195#if __LINUX_ARM_ARCH__ >= 6
196#define isb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
197 : : "r" (0) : "memory")
198#define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
199 : : "r" (0) : "memory")
200#define dmb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
201 : : "r" (0) : "memory")
202#else
203#define isb() __asm__ __volatile__ ("" : : : "memory")
204#define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
205 : : "r" (0) : "memory")
206#define dmb() __asm__ __volatile__ ("" : : : "memory")
207#endif
208#define mb() dmb()
209#define rmb() mb()
210#define wmb() mb()
211#define read_barrier_depends() do { } while(0)
212#define set_mb(var, value) do { var = value; mb(); } while (0)
213#define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
214
215/* 217/*
216 * switch_mm() may do a full cache flush over the context switch, 218 * switch_mm() may do a full cache flush over the context switch,
217 * so enable interrupts over the context switch to avoid high 219 * so enable interrupts over the context switch to avoid high