diff options
author | Catalin Marinas <catalin.marinas@arm.com> | 2007-02-28 06:30:38 -0500 |
---|---|---|
committer | Russell King <rmk+kernel@arm.linux.org.uk> | 2007-03-02 06:59:00 -0500 |
commit | 9623b3732d11b0a18d9af3419f680d27ea24b014 (patch) | |
tree | 9f00b744060872ca3ad1518002f67376d3050d45 /include/asm-arm/system.h | |
parent | 5bfe8cb62127b5eb799f5724764abe914e49860e (diff) |
[ARM] 4241/1: Define mb() as compiler barrier on a uniprocessor system
Currently, the mb() is defined as a DMB operation on ARMv6, even for
UP systems. This patch defines mb() as a compiler barrier only. For
the SMP case, the smp_* variants should be used anyway and the patch
defines them as DMB.
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'include/asm-arm/system.h')
-rw-r--r-- | include/asm-arm/system.h | 40 |
1 files changed, 19 insertions, 21 deletions
diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h index f4386906b200..f06d8a43fdee 100644 --- a/include/asm-arm/system.h +++ b/include/asm-arm/system.h | |||
@@ -167,11 +167,25 @@ extern unsigned int user_debug; | |||
167 | : : "r" (0) : "memory") | 167 | : : "r" (0) : "memory") |
168 | #define dmb() __asm__ __volatile__ ("" : : : "memory") | 168 | #define dmb() __asm__ __volatile__ ("" : : : "memory") |
169 | #endif | 169 | #endif |
170 | #define mb() dmb() | 170 | |
171 | #define rmb() mb() | 171 | #define mb() barrier() |
172 | #define wmb() mb() | 172 | #define rmb() barrier() |
173 | #define read_barrier_depends() do { } while(0) | 173 | #define wmb() barrier() |
174 | #define set_mb(var, value) do { var = value; mb(); } while (0) | 174 | #define read_barrier_depends() do { } while(0) |
175 | |||
176 | #ifdef CONFIG_SMP | ||
177 | #define smp_mb() dmb() | ||
178 | #define smp_rmb() dmb() | ||
179 | #define smp_wmb() dmb() | ||
180 | #define smp_read_barrier_depends() read_barrier_depends() | ||
181 | #else | ||
182 | #define smp_mb() barrier() | ||
183 | #define smp_rmb() barrier() | ||
184 | #define smp_wmb() barrier() | ||
185 | #define smp_read_barrier_depends() read_barrier_depends() | ||
186 | #endif /* CONFIG_SMP */ | ||
187 | |||
188 | #define set_mb(var, value) do { var = value; smp_mb(); } while (0) | ||
175 | #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t"); | 189 | #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t"); |
176 | 190 | ||
177 | extern unsigned long cr_no_alignment; /* defined in entry-armv.S */ | 191 | extern unsigned long cr_no_alignment; /* defined in entry-armv.S */ |
@@ -243,22 +257,6 @@ static inline void sched_cacheflush(void) | |||
243 | { | 257 | { |
244 | } | 258 | } |
245 | 259 | ||
246 | #ifdef CONFIG_SMP | ||
247 | |||
248 | #define smp_mb() mb() | ||
249 | #define smp_rmb() rmb() | ||
250 | #define smp_wmb() wmb() | ||
251 | #define smp_read_barrier_depends() read_barrier_depends() | ||
252 | |||
253 | #else | ||
254 | |||
255 | #define smp_mb() barrier() | ||
256 | #define smp_rmb() barrier() | ||
257 | #define smp_wmb() barrier() | ||
258 | #define smp_read_barrier_depends() do { } while(0) | ||
259 | |||
260 | #endif /* CONFIG_SMP */ | ||
261 | |||
262 | #if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110) | 260 | #if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110) |
263 | /* | 261 | /* |
264 | * On the StrongARM, "swp" is terminally broken since it bypasses the | 262 | * On the StrongARM, "swp" is terminally broken since it bypasses the |