diff options
author | Helge Deller <deller@gmx.de> | 2007-01-28 09:09:20 -0500 |
---|---|---|
committer | Kyle McMartin <kyle@athena.road.mcmartin.ca> | 2007-02-17 01:17:13 -0500 |
commit | 513e7ecd695a4c0f95b9aa86c03ec9b7d2d09e03 (patch) | |
tree | 244b66b448f3de6bf772828c13955b786a960924 /include/asm-parisc/atomic.h | |
parent | a8f44e3889b686813926b288bd4e51a0cf17d2c7 (diff) |
[PARISC] convert to use CONFIG_64BIT instead of __LP64__
Signed-off-by: Helge Deller <deller@gmx.de>
Signed-off-by: Kyle McMartin <kyle@parisc-linux.org>
Diffstat (limited to 'include/asm-parisc/atomic.h')
-rw-r--r-- | include/asm-parisc/atomic.h | 12 |
1 files changed, 6 insertions, 6 deletions
diff --git a/include/asm-parisc/atomic.h b/include/asm-parisc/atomic.h index 48bf9b8ab8ff..7d57d34fcca8 100644 --- a/include/asm-parisc/atomic.h +++ b/include/asm-parisc/atomic.h | |||
@@ -58,7 +58,7 @@ extern void __xchg_called_with_bad_pointer(void); | |||
58 | /* __xchg32/64 defined in arch/parisc/lib/bitops.c */ | 58 | /* __xchg32/64 defined in arch/parisc/lib/bitops.c */ |
59 | extern unsigned long __xchg8(char, char *); | 59 | extern unsigned long __xchg8(char, char *); |
60 | extern unsigned long __xchg32(int, int *); | 60 | extern unsigned long __xchg32(int, int *); |
61 | #ifdef __LP64__ | 61 | #ifdef CONFIG_64BIT |
62 | extern unsigned long __xchg64(unsigned long, unsigned long *); | 62 | extern unsigned long __xchg64(unsigned long, unsigned long *); |
63 | #endif | 63 | #endif |
64 | 64 | ||
@@ -67,7 +67,7 @@ static __inline__ unsigned long | |||
67 | __xchg(unsigned long x, __volatile__ void * ptr, int size) | 67 | __xchg(unsigned long x, __volatile__ void * ptr, int size) |
68 | { | 68 | { |
69 | switch(size) { | 69 | switch(size) { |
70 | #ifdef __LP64__ | 70 | #ifdef CONFIG_64BIT |
71 | case 8: return __xchg64(x,(unsigned long *) ptr); | 71 | case 8: return __xchg64(x,(unsigned long *) ptr); |
72 | #endif | 72 | #endif |
73 | case 4: return __xchg32((int) x, (int *) ptr); | 73 | case 4: return __xchg32((int) x, (int *) ptr); |
@@ -81,7 +81,7 @@ __xchg(unsigned long x, __volatile__ void * ptr, int size) | |||
81 | /* | 81 | /* |
82 | ** REVISIT - Abandoned use of LDCW in xchg() for now: | 82 | ** REVISIT - Abandoned use of LDCW in xchg() for now: |
83 | ** o need to test sizeof(*ptr) to avoid clearing adjacent bytes | 83 | ** o need to test sizeof(*ptr) to avoid clearing adjacent bytes |
84 | ** o and while we are at it, could __LP64__ code use LDCD too? | 84 | ** o and while we are at it, could CONFIG_64BIT code use LDCD too? |
85 | ** | 85 | ** |
86 | ** if (__builtin_constant_p(x) && (x == NULL)) | 86 | ** if (__builtin_constant_p(x) && (x == NULL)) |
87 | ** if (((unsigned long)p & 0xf) == 0) | 87 | ** if (((unsigned long)p & 0xf) == 0) |
@@ -105,7 +105,7 @@ static __inline__ unsigned long | |||
105 | __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size) | 105 | __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size) |
106 | { | 106 | { |
107 | switch(size) { | 107 | switch(size) { |
108 | #ifdef __LP64__ | 108 | #ifdef CONFIG_64BIT |
109 | case 8: return __cmpxchg_u64((unsigned long *)ptr, old, new_); | 109 | case 8: return __cmpxchg_u64((unsigned long *)ptr, old, new_); |
110 | #endif | 110 | #endif |
111 | case 4: return __cmpxchg_u32((unsigned int *)ptr, (unsigned int) old, (unsigned int) new_); | 111 | case 4: return __cmpxchg_u32((unsigned int *)ptr, (unsigned int) old, (unsigned int) new_); |
@@ -218,7 +218,7 @@ static __inline__ int atomic_read(const atomic_t *v) | |||
218 | #define smp_mb__before_atomic_inc() smp_mb() | 218 | #define smp_mb__before_atomic_inc() smp_mb() |
219 | #define smp_mb__after_atomic_inc() smp_mb() | 219 | #define smp_mb__after_atomic_inc() smp_mb() |
220 | 220 | ||
221 | #ifdef __LP64__ | 221 | #ifdef CONFIG_64BIT |
222 | 222 | ||
223 | typedef struct { volatile s64 counter; } atomic64_t; | 223 | typedef struct { volatile s64 counter; } atomic64_t; |
224 | 224 | ||
@@ -270,7 +270,7 @@ atomic64_read(const atomic64_t *v) | |||
270 | #define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0) | 270 | #define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0) |
271 | #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i),(v)) == 0) | 271 | #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i),(v)) == 0) |
272 | 272 | ||
273 | #endif /* __LP64__ */ | 273 | #endif /* CONFIG_64BIT */ |
274 | 274 | ||
275 | #include <asm-generic/atomic.h> | 275 | #include <asm-generic/atomic.h> |
276 | 276 | ||