diff options
Diffstat (limited to 'include/asm-arm/system.h')
| -rw-r--r-- | include/asm-arm/system.h | 12 |
1 files changed, 9 insertions, 3 deletions
diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h index 3d0d2860b6db..cdf49f442fd2 100644 --- a/include/asm-arm/system.h +++ b/include/asm-arm/system.h | |||
| @@ -290,7 +290,6 @@ do { \ | |||
| 290 | }) | 290 | }) |
| 291 | 291 | ||
| 292 | #ifdef CONFIG_SMP | 292 | #ifdef CONFIG_SMP |
| 293 | #error SMP not supported | ||
| 294 | 293 | ||
| 295 | #define smp_mb() mb() | 294 | #define smp_mb() mb() |
| 296 | #define smp_rmb() rmb() | 295 | #define smp_rmb() rmb() |
| @@ -304,6 +303,8 @@ do { \ | |||
| 304 | #define smp_wmb() barrier() | 303 | #define smp_wmb() barrier() |
| 305 | #define smp_read_barrier_depends() do { } while(0) | 304 | #define smp_read_barrier_depends() do { } while(0) |
| 306 | 305 | ||
| 306 | #endif /* CONFIG_SMP */ | ||
| 307 | |||
| 307 | #if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110) | 308 | #if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110) |
| 308 | /* | 309 | /* |
| 309 | * On the StrongARM, "swp" is terminally broken since it bypasses the | 310 | * On the StrongARM, "swp" is terminally broken since it bypasses the |
| @@ -316,9 +317,16 @@ do { \ | |||
| 316 | * | 317 | * |
| 317 | * We choose (1) since its the "easiest" to achieve here and is not | 318 | * We choose (1) since its the "easiest" to achieve here and is not |
| 318 | * dependent on the processor type. | 319 | * dependent on the processor type. |
| 320 | * | ||
| 321 | * NOTE that this solution won't work on an SMP system, so explcitly | ||
| 322 | * forbid it here. | ||
| 319 | */ | 323 | */ |
| 324 | #ifdef CONFIG_SMP | ||
| 325 | #error SMP is not supported on SA1100/SA110 | ||
| 326 | #else | ||
| 320 | #define swp_is_buggy | 327 | #define swp_is_buggy |
| 321 | #endif | 328 | #endif |
| 329 | #endif | ||
| 322 | 330 | ||
| 323 | static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size) | 331 | static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size) |
| 324 | { | 332 | { |
| @@ -361,8 +369,6 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size | |||
| 361 | return ret; | 369 | return ret; |
| 362 | } | 370 | } |
| 363 | 371 | ||
| 364 | #endif /* CONFIG_SMP */ | ||
| 365 | |||
| 366 | #endif /* __ASSEMBLY__ */ | 372 | #endif /* __ASSEMBLY__ */ |
| 367 | 373 | ||
| 368 | #define arch_align_stack(x) (x) | 374 | #define arch_align_stack(x) (x) |
