diff options
author | Linus Torvalds <torvalds@macmini.osdl.org> | 2006-07-08 18:24:18 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@macmini.osdl.org> | 2006-07-08 18:24:18 -0400 |
commit | b862f3b099f3ea672c7438c0b282ce8201d39dfc (patch) | |
tree | 62f8cc2dc2b1c9abb6364b16f3b218a04d121f3e /include/asm-i386/spinlock.h | |
parent | e2a3d40258fe20d205f8ed592e1e2c0d5529c2e1 (diff) |
i386: improve and correct inline asm memory constraints
Use "+m" rather than a combination of "=m" and "m" for improved clarity
and consistency.
This also fixes some inlines that incorrectly didn't tell the compiler
that they read the old value at all, potentially causing the compiler to
generate bogus code. It appear that all of those potential bugs were
hidden by the use of extra "volatile" specifiers on the data structures
in question, though.
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-i386/spinlock.h')
-rw-r--r-- | include/asm-i386/spinlock.h | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/include/asm-i386/spinlock.h b/include/asm-i386/spinlock.h index 87c40f830653..d816c62a7a1d 100644 --- a/include/asm-i386/spinlock.h +++ b/include/asm-i386/spinlock.h | |||
@@ -65,7 +65,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) | |||
65 | alternative_smp( | 65 | alternative_smp( |
66 | __raw_spin_lock_string, | 66 | __raw_spin_lock_string, |
67 | __raw_spin_lock_string_up, | 67 | __raw_spin_lock_string_up, |
68 | "=m" (lock->slock) : : "memory"); | 68 | "+m" (lock->slock) : : "memory"); |
69 | } | 69 | } |
70 | 70 | ||
71 | /* | 71 | /* |
@@ -79,7 +79,7 @@ static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long fla | |||
79 | alternative_smp( | 79 | alternative_smp( |
80 | __raw_spin_lock_string_flags, | 80 | __raw_spin_lock_string_flags, |
81 | __raw_spin_lock_string_up, | 81 | __raw_spin_lock_string_up, |
82 | "=m" (lock->slock) : "r" (flags) : "memory"); | 82 | "+m" (lock->slock) : "r" (flags) : "memory"); |
83 | } | 83 | } |
84 | #endif | 84 | #endif |
85 | 85 | ||
@@ -88,7 +88,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) | |||
88 | char oldval; | 88 | char oldval; |
89 | __asm__ __volatile__( | 89 | __asm__ __volatile__( |
90 | "xchgb %b0,%1" | 90 | "xchgb %b0,%1" |
91 | :"=q" (oldval), "=m" (lock->slock) | 91 | :"=q" (oldval), "+m" (lock->slock) |
92 | :"0" (0) : "memory"); | 92 | :"0" (0) : "memory"); |
93 | return oldval > 0; | 93 | return oldval > 0; |
94 | } | 94 | } |
@@ -104,7 +104,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) | |||
104 | 104 | ||
105 | #define __raw_spin_unlock_string \ | 105 | #define __raw_spin_unlock_string \ |
106 | "movb $1,%0" \ | 106 | "movb $1,%0" \ |
107 | :"=m" (lock->slock) : : "memory" | 107 | :"+m" (lock->slock) : : "memory" |
108 | 108 | ||
109 | 109 | ||
110 | static inline void __raw_spin_unlock(raw_spinlock_t *lock) | 110 | static inline void __raw_spin_unlock(raw_spinlock_t *lock) |
@@ -118,7 +118,7 @@ static inline void __raw_spin_unlock(raw_spinlock_t *lock) | |||
118 | 118 | ||
119 | #define __raw_spin_unlock_string \ | 119 | #define __raw_spin_unlock_string \ |
120 | "xchgb %b0, %1" \ | 120 | "xchgb %b0, %1" \ |
121 | :"=q" (oldval), "=m" (lock->slock) \ | 121 | :"=q" (oldval), "+m" (lock->slock) \ |
122 | :"0" (oldval) : "memory" | 122 | :"0" (oldval) : "memory" |
123 | 123 | ||
124 | static inline void __raw_spin_unlock(raw_spinlock_t *lock) | 124 | static inline void __raw_spin_unlock(raw_spinlock_t *lock) |
@@ -199,13 +199,13 @@ static inline int __raw_write_trylock(raw_rwlock_t *lock) | |||
199 | 199 | ||
200 | static inline void __raw_read_unlock(raw_rwlock_t *rw) | 200 | static inline void __raw_read_unlock(raw_rwlock_t *rw) |
201 | { | 201 | { |
202 | asm volatile(LOCK_PREFIX "incl %0" :"=m" (rw->lock) : : "memory"); | 202 | asm volatile(LOCK_PREFIX "incl %0" :"+m" (rw->lock) : : "memory"); |
203 | } | 203 | } |
204 | 204 | ||
205 | static inline void __raw_write_unlock(raw_rwlock_t *rw) | 205 | static inline void __raw_write_unlock(raw_rwlock_t *rw) |
206 | { | 206 | { |
207 | asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0" | 207 | asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0" |
208 | : "=m" (rw->lock) : : "memory"); | 208 | : "+m" (rw->lock) : : "memory"); |
209 | } | 209 | } |
210 | 210 | ||
211 | #endif /* __ASM_SPINLOCK_H */ | 211 | #endif /* __ASM_SPINLOCK_H */ |