aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRussell King <rmk@dyn-67.arm.linux.org.uk>2005-06-28 14:22:25 -0400
committerRussell King <rmk+kernel@arm.linux.org.uk>2005-06-28 14:22:25 -0400
commit053a7b5b7617a72d7c61b6f84196d1c0f79b9849 (patch)
tree53859118b6a815278987a5c679575f1b4b37a1c7
parent4b0ef3b1127776d4a2787d7530ac0c4da82c2331 (diff)
[PATCH] ARM SMP: __xchg support
This enables the existing __xchg implementation to be used on SMP. Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
-rw-r--r--include/asm-arm/system.h12
1 files changed, 9 insertions, 3 deletions
diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h
index 3d0d2860b6db..cdf49f442fd2 100644
--- a/include/asm-arm/system.h
+++ b/include/asm-arm/system.h
@@ -290,7 +290,6 @@ do { \
290}) 290})
291 291
292#ifdef CONFIG_SMP 292#ifdef CONFIG_SMP
293#error SMP not supported
294 293
295#define smp_mb() mb() 294#define smp_mb() mb()
296#define smp_rmb() rmb() 295#define smp_rmb() rmb()
@@ -304,6 +303,8 @@ do { \
304#define smp_wmb() barrier() 303#define smp_wmb() barrier()
305#define smp_read_barrier_depends() do { } while(0) 304#define smp_read_barrier_depends() do { } while(0)
306 305
306#endif /* CONFIG_SMP */
307
307#if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110) 308#if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110)
308/* 309/*
309 * On the StrongARM, "swp" is terminally broken since it bypasses the 310 * On the StrongARM, "swp" is terminally broken since it bypasses the
@@ -316,9 +317,16 @@ do { \
316 * 317 *
317 * We choose (1) since its the "easiest" to achieve here and is not 318 * We choose (1) since its the "easiest" to achieve here and is not
318 * dependent on the processor type. 319 * dependent on the processor type.
320 *
321 * NOTE that this solution won't work on an SMP system, so explcitly
322 * forbid it here.
319 */ 323 */
324#ifdef CONFIG_SMP
325#error SMP is not supported on SA1100/SA110
326#else
320#define swp_is_buggy 327#define swp_is_buggy
321#endif 328#endif
329#endif
322 330
323static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size) 331static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
324{ 332{
@@ -361,8 +369,6 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
361 return ret; 369 return ret;
362} 370}
363 371
364#endif /* CONFIG_SMP */
365
366#endif /* __ASSEMBLY__ */ 372#endif /* __ASSEMBLY__ */
367 373
368#define arch_align_stack(x) (x) 374#define arch_align_stack(x) (x)