aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-arm/system.h
diff options
context:
space:
mode:
authorRussell King <rmk@dyn-67.arm.linux.org.uk>2005-07-26 14:39:31 -0400
committerRussell King <rmk+kernel@arm.linux.org.uk>2005-07-26 14:39:31 -0400
commit9560782f9a68a5de3e72bc3ba71317f373844549 (patch)
treefe0329a6ad4cf40247517b88176fea264e841d4e /include/asm-arm/system.h
parent6b6a93c6876ea1c530d5d3f68e3678093a27fab0 (diff)
[PATCH] ARM SMP: Use exclusive load/store for __xchg
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'include/asm-arm/system.h')
-rw-r--r--include/asm-arm/system.h87
1 files changed, 58 insertions, 29 deletions
diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h
index 2f44b2044214..3f9c3626a73c 100644
--- a/include/asm-arm/system.h
+++ b/include/asm-arm/system.h
@@ -323,12 +323,8 @@ do { \
323 * NOTE that this solution won't work on an SMP system, so explcitly 323 * NOTE that this solution won't work on an SMP system, so explcitly
324 * forbid it here. 324 * forbid it here.
325 */ 325 */
326#ifdef CONFIG_SMP
327#error SMP is not supported on SA1100/SA110
328#else
329#define swp_is_buggy 326#define swp_is_buggy
330#endif 327#endif
331#endif
332 328
333static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size) 329static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
334{ 330{
@@ -337,35 +333,68 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
337#ifdef swp_is_buggy 333#ifdef swp_is_buggy
338 unsigned long flags; 334 unsigned long flags;
339#endif 335#endif
336#if __LINUX_ARM_ARCH__ >= 6
337 unsigned int tmp;
338#endif
340 339
341 switch (size) { 340 switch (size) {
342#ifdef swp_is_buggy 341#if __LINUX_ARM_ARCH__ >= 6
343 case 1: 342 case 1:
344 local_irq_save(flags); 343 asm volatile("@ __xchg1\n"
345 ret = *(volatile unsigned char *)ptr; 344 "1: ldrexb %0, [%3]\n"
346 *(volatile unsigned char *)ptr = x; 345 " strexb %1, %2, [%3]\n"
347 local_irq_restore(flags); 346 " teq %1, #0\n"
348 break; 347 " bne 1b"
349 348 : "=&r" (ret), "=&r" (tmp)
350 case 4: 349 : "r" (x), "r" (ptr)
351 local_irq_save(flags); 350 : "memory", "cc");
352 ret = *(volatile unsigned long *)ptr; 351 break;
353 *(volatile unsigned long *)ptr = x; 352 case 4:
354 local_irq_restore(flags); 353 asm volatile("@ __xchg4\n"
355 break; 354 "1: ldrex %0, [%3]\n"
355 " strex %1, %2, [%3]\n"
356 " teq %1, #0\n"
357 " bne 1b"
358 : "=&r" (ret), "=&r" (tmp)
359 : "r" (x), "r" (ptr)
360 : "memory", "cc");
361 break;
362#elif defined(swp_is_buggy)
363#ifdef CONFIG_SMP
364#error SMP is not supported on this platform
365#endif
366 case 1:
367 local_irq_save(flags);
368 ret = *(volatile unsigned char *)ptr;
369 *(volatile unsigned char *)ptr = x;
370 local_irq_restore(flags);
371 break;
372
373 case 4:
374 local_irq_save(flags);
375 ret = *(volatile unsigned long *)ptr;
376 *(volatile unsigned long *)ptr = x;
377 local_irq_restore(flags);
378 break;
356#else 379#else
357 case 1: __asm__ __volatile__ ("swpb %0, %1, [%2]" 380 case 1:
358 : "=&r" (ret) 381 asm volatile("@ __xchg1\n"
359 : "r" (x), "r" (ptr) 382 " swpb %0, %1, [%2]"
360 : "memory", "cc"); 383 : "=&r" (ret)
361 break; 384 : "r" (x), "r" (ptr)
362 case 4: __asm__ __volatile__ ("swp %0, %1, [%2]" 385 : "memory", "cc");
363 : "=&r" (ret) 386 break;
364 : "r" (x), "r" (ptr) 387 case 4:
365 : "memory", "cc"); 388 asm volatile("@ __xchg4\n"
366 break; 389 " swp %0, %1, [%2]"
390 : "=&r" (ret)
391 : "r" (x), "r" (ptr)
392 : "memory", "cc");
393 break;
367#endif 394#endif
368 default: __bad_xchg(ptr, size), ret = 0; 395 default:
396 __bad_xchg(ptr, size), ret = 0;
397 break;
369 } 398 }
370 399
371 return ret; 400 return ret;