aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAnton Blanchard <anton@samba.org>2005-09-05 23:05:58 -0400
committerPaul Mackerras <paulus@samba.org>2005-09-06 02:07:53 -0400
commitb2c0ab17ba751abe13a28508b1ac7e9ca074cd87 (patch)
tree08abcde7f48b3393afcae069e3668923ee3e4492
parent4721e2214b5fd6eca48caea76afb1bad3148930f (diff)
[PATCH] ppc64: speedup cmpxchg
cmpxchg has the following code: __typeof__(*(ptr)) _o_ = (o); __typeof__(*(ptr)) _n_ = (n); Unfortunately it makes gcc 4.0 store and load the variables to the stack. Eg in atomic_dec_and_test we get: stw r10,112(r1) stw r9,116(r1) lwz r9,112(r1) lwz r0,116(r1) x86 is just casting the values so do that instead. Also change __xchg* and __cmpxchg* to take unsigned values, removing a few sign extensions. Signed-off-by: Anton Blanchard <anton@samba.org> Signed-off-by: Paul Mackerras <paulus@samba.org>
-rw-r--r--include/asm-ppc64/system.h19
1 files changed, 8 insertions, 11 deletions
diff --git a/include/asm-ppc64/system.h b/include/asm-ppc64/system.h
index b9e1835351e9..c0396428cc3c 100644
--- a/include/asm-ppc64/system.h
+++ b/include/asm-ppc64/system.h
@@ -158,7 +158,7 @@ static inline int __is_processor(unsigned long pv)
158 * is more like most of the other architectures. 158 * is more like most of the other architectures.
159 */ 159 */
160static __inline__ unsigned long 160static __inline__ unsigned long
161__xchg_u32(volatile int *m, unsigned long val) 161__xchg_u32(volatile unsigned int *m, unsigned long val)
162{ 162{
163 unsigned long dummy; 163 unsigned long dummy;
164 164
@@ -200,7 +200,7 @@ __xchg_u64(volatile long *m, unsigned long val)
200extern void __xchg_called_with_bad_pointer(void); 200extern void __xchg_called_with_bad_pointer(void);
201 201
202static __inline__ unsigned long 202static __inline__ unsigned long
203__xchg(volatile void *ptr, unsigned long x, int size) 203__xchg(volatile void *ptr, unsigned long x, unsigned int size)
204{ 204{
205 switch (size) { 205 switch (size) {
206 case 4: 206 case 4:
@@ -223,7 +223,7 @@ __xchg(volatile void *ptr, unsigned long x, int size)
223#define __HAVE_ARCH_CMPXCHG 1 223#define __HAVE_ARCH_CMPXCHG 1
224 224
225static __inline__ unsigned long 225static __inline__ unsigned long
226__cmpxchg_u32(volatile int *p, int old, int new) 226__cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
227{ 227{
228 unsigned int prev; 228 unsigned int prev;
229 229
@@ -271,7 +271,8 @@ __cmpxchg_u64(volatile long *p, unsigned long old, unsigned long new)
271extern void __cmpxchg_called_with_bad_pointer(void); 271extern void __cmpxchg_called_with_bad_pointer(void);
272 272
273static __inline__ unsigned long 273static __inline__ unsigned long
274__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size) 274__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
275 unsigned int size)
275{ 276{
276 switch (size) { 277 switch (size) {
277 case 4: 278 case 4:
@@ -283,13 +284,9 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
283 return old; 284 return old;
284} 285}
285 286
286#define cmpxchg(ptr,o,n) \ 287#define cmpxchg(ptr,o,n)\
287 ({ \ 288 ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
288 __typeof__(*(ptr)) _o_ = (o); \ 289 (unsigned long)(n),sizeof(*(ptr))))
289 __typeof__(*(ptr)) _n_ = (n); \
290 (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
291 (unsigned long)_n_, sizeof(*(ptr))); \
292 })
293 290
294/* 291/*
295 * We handle most unaligned accesses in hardware. On the other hand 292 * We handle most unaligned accesses in hardware. On the other hand