diff options
author | Segher Boessenkool <segher@kernel.crashing.org> | 2007-08-10 20:15:30 -0400 |
---|---|---|
committer | Paul Mackerras <paulus@samba.org> | 2007-08-16 21:01:58 -0400 |
commit | 9f0cbea0d8cc47801b853d3c61d0e17475b0cc89 (patch) | |
tree | b1cd6fe848a7544a834d0073297193069c24c7dd | |
parent | c6d4267eced79775399f256fbb4adb671e9b597e (diff) |
[POWERPC] Implement atomic{, 64}_{read, write}() without volatile
Instead, use asm() like all other atomic operations already do.
Also use inline functions instead of macros; this actually
improves code generation (some code becomes a little smaller,
probably because of improved alias information -- just a few
hundred bytes total on a default kernel build, nothing shocking).
Signed-off-by: Segher Boessenkool <segher@kernel.crashing.org>
Signed-off-by: Paul Mackerras <paulus@samba.org>
-rw-r--r-- | include/asm-powerpc/atomic.h | 34 |
1 files changed, 28 insertions, 6 deletions
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h index c44810b9d322..f3fc733758f5 100644 --- a/include/asm-powerpc/atomic.h +++ b/include/asm-powerpc/atomic.h | |||
@@ -5,7 +5,7 @@ | |||
5 | * PowerPC atomic operations | 5 | * PowerPC atomic operations |
6 | */ | 6 | */ |
7 | 7 | ||
8 | typedef struct { volatile int counter; } atomic_t; | 8 | typedef struct { int counter; } atomic_t; |
9 | 9 | ||
10 | #ifdef __KERNEL__ | 10 | #ifdef __KERNEL__ |
11 | #include <linux/compiler.h> | 11 | #include <linux/compiler.h> |
@@ -15,8 +15,19 @@ typedef struct { volatile int counter; } atomic_t; | |||
15 | 15 | ||
16 | #define ATOMIC_INIT(i) { (i) } | 16 | #define ATOMIC_INIT(i) { (i) } |
17 | 17 | ||
18 | #define atomic_read(v) ((v)->counter) | 18 | static __inline__ int atomic_read(const atomic_t *v) |
19 | #define atomic_set(v,i) (((v)->counter) = (i)) | 19 | { |
20 | int t; | ||
21 | |||
22 | __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | ||
23 | |||
24 | return t; | ||
25 | } | ||
26 | |||
27 | static __inline__ void atomic_set(atomic_t *v, int i) | ||
28 | { | ||
29 | __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | ||
30 | } | ||
20 | 31 | ||
21 | static __inline__ void atomic_add(int a, atomic_t *v) | 32 | static __inline__ void atomic_add(int a, atomic_t *v) |
22 | { | 33 | { |
@@ -240,12 +251,23 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v) | |||
240 | 251 | ||
241 | #ifdef __powerpc64__ | 252 | #ifdef __powerpc64__ |
242 | 253 | ||
243 | typedef struct { volatile long counter; } atomic64_t; | 254 | typedef struct { long counter; } atomic64_t; |
244 | 255 | ||
245 | #define ATOMIC64_INIT(i) { (i) } | 256 | #define ATOMIC64_INIT(i) { (i) } |
246 | 257 | ||
247 | #define atomic64_read(v) ((v)->counter) | 258 | static __inline__ long atomic64_read(const atomic64_t *v) |
248 | #define atomic64_set(v,i) (((v)->counter) = (i)) | 259 | { |
260 | long t; | ||
261 | |||
262 | __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | ||
263 | |||
264 | return t; | ||
265 | } | ||
266 | |||
267 | static __inline__ void atomic64_set(atomic64_t *v, long i) | ||
268 | { | ||
269 | __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | ||
270 | } | ||
249 | 271 | ||
250 | static __inline__ void atomic64_add(long a, atomic64_t *v) | 272 | static __inline__ void atomic64_add(long a, atomic64_t *v) |
251 | { | 273 | { |