aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-arm/system.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-arm/system.h')
-rw-r--r--include/asm-arm/system.h92
1 files changed, 63 insertions, 29 deletions
diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h
index 2f44b2044214..8efa4ebdcacb 100644
--- a/include/asm-arm/system.h
+++ b/include/asm-arm/system.h
@@ -139,7 +139,12 @@ extern unsigned int user_debug;
139#define vectors_high() (0) 139#define vectors_high() (0)
140#endif 140#endif
141 141
142#if __LINUX_ARM_ARCH__ >= 6
143#define mb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
144 : : "r" (0) : "memory")
145#else
142#define mb() __asm__ __volatile__ ("" : : : "memory") 146#define mb() __asm__ __volatile__ ("" : : : "memory")
147#endif
143#define rmb() mb() 148#define rmb() mb()
144#define wmb() mb() 149#define wmb() mb()
145#define read_barrier_depends() do { } while(0) 150#define read_barrier_depends() do { } while(0)
@@ -323,12 +328,8 @@ do { \
323 * NOTE that this solution won't work on an SMP system, so explcitly 328 * NOTE that this solution won't work on an SMP system, so explcitly
324 * forbid it here. 329 * forbid it here.
325 */ 330 */
326#ifdef CONFIG_SMP
327#error SMP is not supported on SA1100/SA110
328#else
329#define swp_is_buggy 331#define swp_is_buggy
330#endif 332#endif
331#endif
332 333
333static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size) 334static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
334{ 335{
@@ -337,35 +338,68 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
337#ifdef swp_is_buggy 338#ifdef swp_is_buggy
338 unsigned long flags; 339 unsigned long flags;
339#endif 340#endif
341#if __LINUX_ARM_ARCH__ >= 6
342 unsigned int tmp;
343#endif
340 344
341 switch (size) { 345 switch (size) {
342#ifdef swp_is_buggy 346#if __LINUX_ARM_ARCH__ >= 6
343 case 1: 347 case 1:
344 local_irq_save(flags); 348 asm volatile("@ __xchg1\n"
345 ret = *(volatile unsigned char *)ptr; 349 "1: ldrexb %0, [%3]\n"
346 *(volatile unsigned char *)ptr = x; 350 " strexb %1, %2, [%3]\n"
347 local_irq_restore(flags); 351 " teq %1, #0\n"
348 break; 352 " bne 1b"
349 353 : "=&r" (ret), "=&r" (tmp)
350 case 4: 354 : "r" (x), "r" (ptr)
351 local_irq_save(flags); 355 : "memory", "cc");
352 ret = *(volatile unsigned long *)ptr; 356 break;
353 *(volatile unsigned long *)ptr = x; 357 case 4:
354 local_irq_restore(flags); 358 asm volatile("@ __xchg4\n"
355 break; 359 "1: ldrex %0, [%3]\n"
360 " strex %1, %2, [%3]\n"
361 " teq %1, #0\n"
362 " bne 1b"
363 : "=&r" (ret), "=&r" (tmp)
364 : "r" (x), "r" (ptr)
365 : "memory", "cc");
366 break;
367#elif defined(swp_is_buggy)
368#ifdef CONFIG_SMP
369#error SMP is not supported on this platform
370#endif
371 case 1:
372 local_irq_save(flags);
373 ret = *(volatile unsigned char *)ptr;
374 *(volatile unsigned char *)ptr = x;
375 local_irq_restore(flags);
376 break;
377
378 case 4:
379 local_irq_save(flags);
380 ret = *(volatile unsigned long *)ptr;
381 *(volatile unsigned long *)ptr = x;
382 local_irq_restore(flags);
383 break;
356#else 384#else
357 case 1: __asm__ __volatile__ ("swpb %0, %1, [%2]" 385 case 1:
358 : "=&r" (ret) 386 asm volatile("@ __xchg1\n"
359 : "r" (x), "r" (ptr) 387 " swpb %0, %1, [%2]"
360 : "memory", "cc"); 388 : "=&r" (ret)
361 break; 389 : "r" (x), "r" (ptr)
362 case 4: __asm__ __volatile__ ("swp %0, %1, [%2]" 390 : "memory", "cc");
363 : "=&r" (ret) 391 break;
364 : "r" (x), "r" (ptr) 392 case 4:
365 : "memory", "cc"); 393 asm volatile("@ __xchg4\n"
366 break; 394 " swp %0, %1, [%2]"
395 : "=&r" (ret)
396 : "r" (x), "r" (ptr)
397 : "memory", "cc");
398 break;
367#endif 399#endif
368 default: __bad_xchg(ptr, size), ret = 0; 400 default:
401 __bad_xchg(ptr, size), ret = 0;
402 break;
369 } 403 }
370 404
371 return ret; 405 return ret;