diff options
author | Paul Mackerras <paulus@samba.org> | 2008-11-05 13:39:27 -0500 |
---|---|---|
committer | Paul Mackerras <paulus@samba.org> | 2008-11-19 00:04:28 -0500 |
commit | efc3624c9419cad3cca93dfabb7b12664773d2b1 (patch) | |
tree | 706b7ec9f2b240c23869887200db8c7088a76842 /arch/powerpc/include/asm/atomic.h | |
parent | d5e54913433fff89609adfc4b96fefcf807a9030 (diff) |
powerpc: Tell gcc when we clobber the carry in inline asm
We have several instances of inline assembly code that use the addic
or addic. instructions, but don't include XER in the list of clobbers.
The addic and addic. instructions affect the carry bit, which is in
the XER register.
This adds "xer" to the list of clobbers for those inline asm
statements that use addic or addic. and didn't already have it.
Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc/include/asm/atomic.h')
-rw-r--r-- | arch/powerpc/include/asm/atomic.h | 18 |
1 files changed, 9 insertions, 9 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h index f3fc733758f5..499be5bdd6fa 100644 --- a/arch/powerpc/include/asm/atomic.h +++ b/arch/powerpc/include/asm/atomic.h | |||
@@ -111,7 +111,7 @@ static __inline__ void atomic_inc(atomic_t *v) | |||
111 | bne- 1b" | 111 | bne- 1b" |
112 | : "=&r" (t), "+m" (v->counter) | 112 | : "=&r" (t), "+m" (v->counter) |
113 | : "r" (&v->counter) | 113 | : "r" (&v->counter) |
114 | : "cc"); | 114 | : "cc", "xer"); |
115 | } | 115 | } |
116 | 116 | ||
117 | static __inline__ int atomic_inc_return(atomic_t *v) | 117 | static __inline__ int atomic_inc_return(atomic_t *v) |
@@ -128,7 +128,7 @@ static __inline__ int atomic_inc_return(atomic_t *v) | |||
128 | ISYNC_ON_SMP | 128 | ISYNC_ON_SMP |
129 | : "=&r" (t) | 129 | : "=&r" (t) |
130 | : "r" (&v->counter) | 130 | : "r" (&v->counter) |
131 | : "cc", "memory"); | 131 | : "cc", "xer", "memory"); |
132 | 132 | ||
133 | return t; | 133 | return t; |
134 | } | 134 | } |
@@ -155,7 +155,7 @@ static __inline__ void atomic_dec(atomic_t *v) | |||
155 | bne- 1b" | 155 | bne- 1b" |
156 | : "=&r" (t), "+m" (v->counter) | 156 | : "=&r" (t), "+m" (v->counter) |
157 | : "r" (&v->counter) | 157 | : "r" (&v->counter) |
158 | : "cc"); | 158 | : "cc", "xer"); |
159 | } | 159 | } |
160 | 160 | ||
161 | static __inline__ int atomic_dec_return(atomic_t *v) | 161 | static __inline__ int atomic_dec_return(atomic_t *v) |
@@ -172,7 +172,7 @@ static __inline__ int atomic_dec_return(atomic_t *v) | |||
172 | ISYNC_ON_SMP | 172 | ISYNC_ON_SMP |
173 | : "=&r" (t) | 173 | : "=&r" (t) |
174 | : "r" (&v->counter) | 174 | : "r" (&v->counter) |
175 | : "cc", "memory"); | 175 | : "cc", "xer", "memory"); |
176 | 176 | ||
177 | return t; | 177 | return t; |
178 | } | 178 | } |
@@ -346,7 +346,7 @@ static __inline__ void atomic64_inc(atomic64_t *v) | |||
346 | bne- 1b" | 346 | bne- 1b" |
347 | : "=&r" (t), "+m" (v->counter) | 347 | : "=&r" (t), "+m" (v->counter) |
348 | : "r" (&v->counter) | 348 | : "r" (&v->counter) |
349 | : "cc"); | 349 | : "cc", "xer"); |
350 | } | 350 | } |
351 | 351 | ||
352 | static __inline__ long atomic64_inc_return(atomic64_t *v) | 352 | static __inline__ long atomic64_inc_return(atomic64_t *v) |
@@ -362,7 +362,7 @@ static __inline__ long atomic64_inc_return(atomic64_t *v) | |||
362 | ISYNC_ON_SMP | 362 | ISYNC_ON_SMP |
363 | : "=&r" (t) | 363 | : "=&r" (t) |
364 | : "r" (&v->counter) | 364 | : "r" (&v->counter) |
365 | : "cc", "memory"); | 365 | : "cc", "xer", "memory"); |
366 | 366 | ||
367 | return t; | 367 | return t; |
368 | } | 368 | } |
@@ -388,7 +388,7 @@ static __inline__ void atomic64_dec(atomic64_t *v) | |||
388 | bne- 1b" | 388 | bne- 1b" |
389 | : "=&r" (t), "+m" (v->counter) | 389 | : "=&r" (t), "+m" (v->counter) |
390 | : "r" (&v->counter) | 390 | : "r" (&v->counter) |
391 | : "cc"); | 391 | : "cc", "xer"); |
392 | } | 392 | } |
393 | 393 | ||
394 | static __inline__ long atomic64_dec_return(atomic64_t *v) | 394 | static __inline__ long atomic64_dec_return(atomic64_t *v) |
@@ -404,7 +404,7 @@ static __inline__ long atomic64_dec_return(atomic64_t *v) | |||
404 | ISYNC_ON_SMP | 404 | ISYNC_ON_SMP |
405 | : "=&r" (t) | 405 | : "=&r" (t) |
406 | : "r" (&v->counter) | 406 | : "r" (&v->counter) |
407 | : "cc", "memory"); | 407 | : "cc", "xer", "memory"); |
408 | 408 | ||
409 | return t; | 409 | return t; |
410 | } | 410 | } |
@@ -431,7 +431,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | |||
431 | "\n\ | 431 | "\n\ |
432 | 2:" : "=&r" (t) | 432 | 2:" : "=&r" (t) |
433 | : "r" (&v->counter) | 433 | : "r" (&v->counter) |
434 | : "cc", "memory"); | 434 | : "cc", "xer", "memory"); |
435 | 435 | ||
436 | return t; | 436 | return t; |
437 | } | 437 | } |