aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/include
diff options
context:
space:
mode:
authorPaul Mackerras <paulus@samba.org>2008-11-05 13:39:27 -0500
committerPaul Mackerras <paulus@samba.org>2008-11-19 00:04:28 -0500
commitefc3624c9419cad3cca93dfabb7b12664773d2b1 (patch)
tree706b7ec9f2b240c23869887200db8c7088a76842 /arch/powerpc/include
parentd5e54913433fff89609adfc4b96fefcf807a9030 (diff)
powerpc: Tell gcc when we clobber the carry in inline asm
We have several instances of inline assembly code that use the addic or addic. instructions, but don't include XER in the list of clobbers. The addic and addic. instructions affect the carry bit, which is in the XER register. This adds "xer" to the list of clobbers for those inline asm statements that use addic or addic. and didn't already have it. Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc/include')
-rw-r--r--arch/powerpc/include/asm/atomic.h18
-rw-r--r--arch/powerpc/include/asm/local.h4
-rw-r--r--arch/powerpc/include/asm/spinlock.h2
3 files changed, 12 insertions, 12 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h
index f3fc733758f5..499be5bdd6fa 100644
--- a/arch/powerpc/include/asm/atomic.h
+++ b/arch/powerpc/include/asm/atomic.h
@@ -111,7 +111,7 @@ static __inline__ void atomic_inc(atomic_t *v)
111 bne- 1b" 111 bne- 1b"
112 : "=&r" (t), "+m" (v->counter) 112 : "=&r" (t), "+m" (v->counter)
113 : "r" (&v->counter) 113 : "r" (&v->counter)
114 : "cc"); 114 : "cc", "xer");
115} 115}
116 116
117static __inline__ int atomic_inc_return(atomic_t *v) 117static __inline__ int atomic_inc_return(atomic_t *v)
@@ -128,7 +128,7 @@ static __inline__ int atomic_inc_return(atomic_t *v)
128 ISYNC_ON_SMP 128 ISYNC_ON_SMP
129 : "=&r" (t) 129 : "=&r" (t)
130 : "r" (&v->counter) 130 : "r" (&v->counter)
131 : "cc", "memory"); 131 : "cc", "xer", "memory");
132 132
133 return t; 133 return t;
134} 134}
@@ -155,7 +155,7 @@ static __inline__ void atomic_dec(atomic_t *v)
155 bne- 1b" 155 bne- 1b"
156 : "=&r" (t), "+m" (v->counter) 156 : "=&r" (t), "+m" (v->counter)
157 : "r" (&v->counter) 157 : "r" (&v->counter)
158 : "cc"); 158 : "cc", "xer");
159} 159}
160 160
161static __inline__ int atomic_dec_return(atomic_t *v) 161static __inline__ int atomic_dec_return(atomic_t *v)
@@ -172,7 +172,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
172 ISYNC_ON_SMP 172 ISYNC_ON_SMP
173 : "=&r" (t) 173 : "=&r" (t)
174 : "r" (&v->counter) 174 : "r" (&v->counter)
175 : "cc", "memory"); 175 : "cc", "xer", "memory");
176 176
177 return t; 177 return t;
178} 178}
@@ -346,7 +346,7 @@ static __inline__ void atomic64_inc(atomic64_t *v)
346 bne- 1b" 346 bne- 1b"
347 : "=&r" (t), "+m" (v->counter) 347 : "=&r" (t), "+m" (v->counter)
348 : "r" (&v->counter) 348 : "r" (&v->counter)
349 : "cc"); 349 : "cc", "xer");
350} 350}
351 351
352static __inline__ long atomic64_inc_return(atomic64_t *v) 352static __inline__ long atomic64_inc_return(atomic64_t *v)
@@ -362,7 +362,7 @@ static __inline__ long atomic64_inc_return(atomic64_t *v)
362 ISYNC_ON_SMP 362 ISYNC_ON_SMP
363 : "=&r" (t) 363 : "=&r" (t)
364 : "r" (&v->counter) 364 : "r" (&v->counter)
365 : "cc", "memory"); 365 : "cc", "xer", "memory");
366 366
367 return t; 367 return t;
368} 368}
@@ -388,7 +388,7 @@ static __inline__ void atomic64_dec(atomic64_t *v)
388 bne- 1b" 388 bne- 1b"
389 : "=&r" (t), "+m" (v->counter) 389 : "=&r" (t), "+m" (v->counter)
390 : "r" (&v->counter) 390 : "r" (&v->counter)
391 : "cc"); 391 : "cc", "xer");
392} 392}
393 393
394static __inline__ long atomic64_dec_return(atomic64_t *v) 394static __inline__ long atomic64_dec_return(atomic64_t *v)
@@ -404,7 +404,7 @@ static __inline__ long atomic64_dec_return(atomic64_t *v)
404 ISYNC_ON_SMP 404 ISYNC_ON_SMP
405 : "=&r" (t) 405 : "=&r" (t)
406 : "r" (&v->counter) 406 : "r" (&v->counter)
407 : "cc", "memory"); 407 : "cc", "xer", "memory");
408 408
409 return t; 409 return t;
410} 410}
@@ -431,7 +431,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
431 "\n\ 431 "\n\
4322:" : "=&r" (t) 4322:" : "=&r" (t)
433 : "r" (&v->counter) 433 : "r" (&v->counter)
434 : "cc", "memory"); 434 : "cc", "xer", "memory");
435 435
436 return t; 436 return t;
437} 437}
diff --git a/arch/powerpc/include/asm/local.h b/arch/powerpc/include/asm/local.h
index 612d83276653..84b457a3c1bc 100644
--- a/arch/powerpc/include/asm/local.h
+++ b/arch/powerpc/include/asm/local.h
@@ -67,7 +67,7 @@ static __inline__ long local_inc_return(local_t *l)
67 bne- 1b" 67 bne- 1b"
68 : "=&r" (t) 68 : "=&r" (t)
69 : "r" (&(l->a.counter)) 69 : "r" (&(l->a.counter))
70 : "cc", "memory"); 70 : "cc", "xer", "memory");
71 71
72 return t; 72 return t;
73} 73}
@@ -94,7 +94,7 @@ static __inline__ long local_dec_return(local_t *l)
94 bne- 1b" 94 bne- 1b"
95 : "=&r" (t) 95 : "=&r" (t)
96 : "r" (&(l->a.counter)) 96 : "r" (&(l->a.counter))
97 : "cc", "memory"); 97 : "cc", "xer", "memory");
98 98
99 return t; 99 return t;
100} 100}
diff --git a/arch/powerpc/include/asm/spinlock.h b/arch/powerpc/include/asm/spinlock.h
index f56a843f4705..36864364e601 100644
--- a/arch/powerpc/include/asm/spinlock.h
+++ b/arch/powerpc/include/asm/spinlock.h
@@ -277,7 +277,7 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw)
277 bne- 1b" 277 bne- 1b"
278 : "=&r"(tmp) 278 : "=&r"(tmp)
279 : "r"(&rw->lock) 279 : "r"(&rw->lock)
280 : "cr0", "memory"); 280 : "cr0", "xer", "memory");
281} 281}
282 282
283static inline void __raw_write_unlock(raw_rwlock_t *rw) 283static inline void __raw_write_unlock(raw_rwlock_t *rw)