From 365bf8ac6f5b3d3187cb39444fa87a5b38683ff4 Mon Sep 17 00:00:00 2001 From: Nicolas Pitre Date: Wed, 8 Feb 2006 21:19:38 +0000 Subject: [ARM] 3311/1: clean up include/asm-arm/mutex.h Patch from Nicolas Pitre Since: if (unlikely(__res || __ex_flag)) produces worse code on ARM than: if (unlikely(__res | __ex_flag)) I therefore made it more explicit: __res |= __ex_flag; if (unlikely(__res != 0)) so it is not seen as a typo again. Also made everything static inline rather than macros for better readability (both produce the same code after all). And finally added missing \t from multi-line assembly code. Signed-off-by: Nicolas Pitre Acked-by: Ingo Molnar Signed-off-by: Andrew Morton Signed-off-by: Andrew Morton Signed-off-by: Russell King --- include/asm-arm/mutex.h | 131 ++++++++++++++++++++++++------------------------ 1 file changed, 65 insertions(+), 66 deletions(-) (limited to 'include') diff --git a/include/asm-arm/mutex.h b/include/asm-arm/mutex.h index 6caa59f1f595..cb29d84e690d 100644 --- a/include/asm-arm/mutex.h +++ b/include/asm-arm/mutex.h @@ -23,72 +23,71 @@ * simply bail out immediately through the slow path where the lock will be * reattempted until it succeeds. */ -#define __mutex_fastpath_lock(count, fail_fn) \ -do { \ - int __ex_flag, __res; \ - \ - typecheck(atomic_t *, count); \ - typecheck_fn(fastcall void (*)(atomic_t *), fail_fn); \ - \ - __asm__ ( \ - "ldrex %0, [%2] \n" \ - "sub %0, %0, #1 \n" \ - "strex %1, %0, [%2] \n" \ - \ - : "=&r" (__res), "=&r" (__ex_flag) \ - : "r" (&(count)->counter) \ - : "cc","memory" ); \ - \ - if (unlikely(__res || __ex_flag)) \ - fail_fn(count); \ -} while (0) - -#define __mutex_fastpath_lock_retval(count, fail_fn) \ -({ \ - int __ex_flag, __res; \ - \ - typecheck(atomic_t *, count); \ - typecheck_fn(fastcall int (*)(atomic_t *), fail_fn); \ - \ - __asm__ ( \ - "ldrex %0, [%2] \n" \ - "sub %0, %0, #1 \n" \ - "strex %1, %0, [%2] \n" \ - \ - : "=&r" (__res), "=&r" (__ex_flag) \ - : "r" (&(count)->counter) \ - : "cc","memory" ); \ - \ - __res |= __ex_flag; \ - if (unlikely(__res != 0)) \ - __res = fail_fn(count); \ - __res; \ -}) +static inline void +__mutex_fastpath_lock(atomic_t *count, fastcall void (*fail_fn)(atomic_t *)) +{ + int __ex_flag, __res; + + __asm__ ( + + "ldrex %0, [%2] \n\t" + "sub %0, %0, #1 \n\t" + "strex %1, %0, [%2] " + + : "=&r" (__res), "=&r" (__ex_flag) + : "r" (&(count)->counter) + : "cc","memory" ); + + __res |= __ex_flag; + if (unlikely(__res != 0)) + fail_fn(count); +} + +static inline int +__mutex_fastpath_lock_retval(atomic_t *count, fastcall int (*fail_fn)(atomic_t *)) +{ + int __ex_flag, __res; + + __asm__ ( + + "ldrex %0, [%2] \n\t" + "sub %0, %0, #1 \n\t" + "strex %1, %0, [%2] " + + : "=&r" (__res), "=&r" (__ex_flag) + : "r" (&(count)->counter) + : "cc","memory" ); + + __res |= __ex_flag; + if (unlikely(__res != 0)) + __res = fail_fn(count); + return __res; +} /* * Same trick is used for the unlock fast path. However the original value, * rather than the result, is used to test for success in order to have * better generated assembly. */ -#define __mutex_fastpath_unlock(count, fail_fn) \ -do { \ - int __ex_flag, __res, __orig; \ - \ - typecheck(atomic_t *, count); \ - typecheck_fn(fastcall void (*)(atomic_t *), fail_fn); \ - \ - __asm__ ( \ - "ldrex %0, [%3] \n" \ - "add %1, %0, #1 \n" \ - "strex %2, %1, [%3] \n" \ - \ - : "=&r" (__orig), "=&r" (__res), "=&r" (__ex_flag) \ - : "r" (&(count)->counter) \ - : "cc","memory" ); \ - \ - if (unlikely(__orig || __ex_flag)) \ - fail_fn(count); \ -} while (0) +static inline void +__mutex_fastpath_unlock(atomic_t *count, fastcall void (*fail_fn)(atomic_t *)) +{ + int __ex_flag, __res, __orig; + + __asm__ ( + + "ldrex %0, [%3] \n\t" + "add %1, %0, #1 \n\t" + "strex %2, %1, [%3] " + + : "=&r" (__orig), "=&r" (__res), "=&r" (__ex_flag) + : "r" (&(count)->counter) + : "cc","memory" ); + + __orig |= __ex_flag; + if (unlikely(__orig != 0)) + fail_fn(count); +} /* * If the unlock was done on a contended lock, or if the unlock simply fails @@ -110,12 +109,12 @@ __mutex_fastpath_trylock(atomic_t *count, int (*fail_fn)(atomic_t *)) __asm__ ( - "1: ldrex %0, [%3] \n" - "subs %1, %0, #1 \n" - "strexeq %2, %1, [%3] \n" - "movlt %0, #0 \n" - "cmpeq %2, #0 \n" - "bgt 1b \n" + "1: ldrex %0, [%3] \n\t" + "subs %1, %0, #1 \n\t" + "strexeq %2, %1, [%3] \n\t" + "movlt %0, #0 \n\t" + "cmpeq %2, #0 \n\t" + "bgt 1b " : "=&r" (__orig), "=&r" (__res), "=&r" (__ex_flag) : "r" (&count->counter) -- cgit v1.2.2