aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86/spinlock.h
diff options
context:
space:
mode:
authorIngo Molnar <mingo@elte.hu>2008-02-29 04:29:38 -0500
committerIngo Molnar <mingo@elte.hu>2008-04-17 11:41:29 -0400
commit7fda20f146d5d217684ffbc37c6b6c5f82c2dffd (patch)
treeec72cbdbdc05c95b7be5922694fa069af12ff22b /include/asm-x86/spinlock.h
parentd93c870bad38e8daaaf9f7e900a13431f24becbb (diff)
x86: spinlock ops are always-inlined
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86/spinlock.h')
-rw-r--r--include/asm-x86/spinlock.h12
1 files changed, 6 insertions, 6 deletions
diff --git a/include/asm-x86/spinlock.h b/include/asm-x86/spinlock.h
index 47dfe2607bb1..bc6376f1bc5a 100644
--- a/include/asm-x86/spinlock.h
+++ b/include/asm-x86/spinlock.h
@@ -78,7 +78,7 @@ static inline int __raw_spin_is_contended(raw_spinlock_t *lock)
78 return (((tmp >> 8) & 0xff) - (tmp & 0xff)) > 1; 78 return (((tmp >> 8) & 0xff) - (tmp & 0xff)) > 1;
79} 79}
80 80
81static inline void __raw_spin_lock(raw_spinlock_t *lock) 81static __always_inline void __raw_spin_lock(raw_spinlock_t *lock)
82{ 82{
83 short inc = 0x0100; 83 short inc = 0x0100;
84 84
@@ -99,7 +99,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
99 99
100#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) 100#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
101 101
102static inline int __raw_spin_trylock(raw_spinlock_t *lock) 102static __always_inline int __raw_spin_trylock(raw_spinlock_t *lock)
103{ 103{
104 int tmp; 104 int tmp;
105 short new; 105 short new;
@@ -120,7 +120,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
120 return tmp; 120 return tmp;
121} 121}
122 122
123static inline void __raw_spin_unlock(raw_spinlock_t *lock) 123static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock)
124{ 124{
125 asm volatile(UNLOCK_LOCK_PREFIX "incb %0" 125 asm volatile(UNLOCK_LOCK_PREFIX "incb %0"
126 : "+m" (lock->slock) 126 : "+m" (lock->slock)
@@ -142,7 +142,7 @@ static inline int __raw_spin_is_contended(raw_spinlock_t *lock)
142 return (((tmp >> 16) & 0xffff) - (tmp & 0xffff)) > 1; 142 return (((tmp >> 16) & 0xffff) - (tmp & 0xffff)) > 1;
143} 143}
144 144
145static inline void __raw_spin_lock(raw_spinlock_t *lock) 145static __always_inline void __raw_spin_lock(raw_spinlock_t *lock)
146{ 146{
147 int inc = 0x00010000; 147 int inc = 0x00010000;
148 int tmp; 148 int tmp;
@@ -165,7 +165,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
165 165
166#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock) 166#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
167 167
168static inline int __raw_spin_trylock(raw_spinlock_t *lock) 168static __always_inline int __raw_spin_trylock(raw_spinlock_t *lock)
169{ 169{
170 int tmp; 170 int tmp;
171 int new; 171 int new;
@@ -187,7 +187,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
187 return tmp; 187 return tmp;
188} 188}
189 189
190static inline void __raw_spin_unlock(raw_spinlock_t *lock) 190static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock)
191{ 191{
192 asm volatile(UNLOCK_LOCK_PREFIX "incw %0" 192 asm volatile(UNLOCK_LOCK_PREFIX "incw %0"
193 : "+m" (lock->slock) 193 : "+m" (lock->slock)