aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-parisc/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-parisc/atomic.h')
-rw-r--r--include/asm-parisc/atomic.h12
1 files changed, 6 insertions, 6 deletions
diff --git a/include/asm-parisc/atomic.h b/include/asm-parisc/atomic.h
index 48bf9b8ab8ff..7d57d34fcca8 100644
--- a/include/asm-parisc/atomic.h
+++ b/include/asm-parisc/atomic.h
@@ -58,7 +58,7 @@ extern void __xchg_called_with_bad_pointer(void);
58/* __xchg32/64 defined in arch/parisc/lib/bitops.c */ 58/* __xchg32/64 defined in arch/parisc/lib/bitops.c */
59extern unsigned long __xchg8(char, char *); 59extern unsigned long __xchg8(char, char *);
60extern unsigned long __xchg32(int, int *); 60extern unsigned long __xchg32(int, int *);
61#ifdef __LP64__ 61#ifdef CONFIG_64BIT
62extern unsigned long __xchg64(unsigned long, unsigned long *); 62extern unsigned long __xchg64(unsigned long, unsigned long *);
63#endif 63#endif
64 64
@@ -67,7 +67,7 @@ static __inline__ unsigned long
67__xchg(unsigned long x, __volatile__ void * ptr, int size) 67__xchg(unsigned long x, __volatile__ void * ptr, int size)
68{ 68{
69 switch(size) { 69 switch(size) {
70#ifdef __LP64__ 70#ifdef CONFIG_64BIT
71 case 8: return __xchg64(x,(unsigned long *) ptr); 71 case 8: return __xchg64(x,(unsigned long *) ptr);
72#endif 72#endif
73 case 4: return __xchg32((int) x, (int *) ptr); 73 case 4: return __xchg32((int) x, (int *) ptr);
@@ -81,7 +81,7 @@ __xchg(unsigned long x, __volatile__ void * ptr, int size)
81/* 81/*
82** REVISIT - Abandoned use of LDCW in xchg() for now: 82** REVISIT - Abandoned use of LDCW in xchg() for now:
83** o need to test sizeof(*ptr) to avoid clearing adjacent bytes 83** o need to test sizeof(*ptr) to avoid clearing adjacent bytes
84** o and while we are at it, could __LP64__ code use LDCD too? 84** o and while we are at it, could CONFIG_64BIT code use LDCD too?
85** 85**
86** if (__builtin_constant_p(x) && (x == NULL)) 86** if (__builtin_constant_p(x) && (x == NULL))
87** if (((unsigned long)p & 0xf) == 0) 87** if (((unsigned long)p & 0xf) == 0)
@@ -105,7 +105,7 @@ static __inline__ unsigned long
105__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size) 105__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size)
106{ 106{
107 switch(size) { 107 switch(size) {
108#ifdef __LP64__ 108#ifdef CONFIG_64BIT
109 case 8: return __cmpxchg_u64((unsigned long *)ptr, old, new_); 109 case 8: return __cmpxchg_u64((unsigned long *)ptr, old, new_);
110#endif 110#endif
111 case 4: return __cmpxchg_u32((unsigned int *)ptr, (unsigned int) old, (unsigned int) new_); 111 case 4: return __cmpxchg_u32((unsigned int *)ptr, (unsigned int) old, (unsigned int) new_);
@@ -218,7 +218,7 @@ static __inline__ int atomic_read(const atomic_t *v)
218#define smp_mb__before_atomic_inc() smp_mb() 218#define smp_mb__before_atomic_inc() smp_mb()
219#define smp_mb__after_atomic_inc() smp_mb() 219#define smp_mb__after_atomic_inc() smp_mb()
220 220
221#ifdef __LP64__ 221#ifdef CONFIG_64BIT
222 222
223typedef struct { volatile s64 counter; } atomic64_t; 223typedef struct { volatile s64 counter; } atomic64_t;
224 224
@@ -270,7 +270,7 @@ atomic64_read(const atomic64_t *v)
270#define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0) 270#define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0)
271#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i),(v)) == 0) 271#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i),(v)) == 0)
272 272
273#endif /* __LP64__ */ 273#endif /* CONFIG_64BIT */
274 274
275#include <asm-generic/atomic.h> 275#include <asm-generic/atomic.h>
276 276