aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-i386/spinlock.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-i386/spinlock.h')
-rw-r--r--include/asm-i386/spinlock.h151
1 files changed, 80 insertions, 71 deletions
diff --git a/include/asm-i386/spinlock.h b/include/asm-i386/spinlock.h
index 04ba30234c48..b0b3043f05e1 100644
--- a/include/asm-i386/spinlock.h
+++ b/include/asm-i386/spinlock.h
@@ -4,8 +4,12 @@
4#include <asm/atomic.h> 4#include <asm/atomic.h>
5#include <asm/rwlock.h> 5#include <asm/rwlock.h>
6#include <asm/page.h> 6#include <asm/page.h>
7#include <asm/processor.h>
7#include <linux/compiler.h> 8#include <linux/compiler.h>
8 9
10#define CLI_STRING "cli"
11#define STI_STRING "sti"
12
9/* 13/*
10 * Your basic SMP spinlocks, allowing only a single CPU anywhere 14 * Your basic SMP spinlocks, allowing only a single CPU anywhere
11 * 15 *
@@ -17,66 +21,66 @@
17 * (the type definitions are in asm/spinlock_types.h) 21 * (the type definitions are in asm/spinlock_types.h)
18 */ 22 */
19 23
20#define __raw_spin_is_locked(x) \ 24static inline int __raw_spin_is_locked(raw_spinlock_t *x)
21 (*(volatile signed char *)(&(x)->slock) <= 0) 25{
22 26 return *(volatile signed char *)(&(x)->slock) <= 0;
23#define __raw_spin_lock_string \ 27}
24 "\n1:\t" \
25 "lock ; decb %0\n\t" \
26 "jns 3f\n" \
27 "2:\t" \
28 "rep;nop\n\t" \
29 "cmpb $0,%0\n\t" \
30 "jle 2b\n\t" \
31 "jmp 1b\n" \
32 "3:\n\t"
33
34#define __raw_spin_lock_string_flags \
35 "\n1:\t" \
36 "lock ; decb %0\n\t" \
37 "jns 5f\n" \
38 "2:\t" \
39 "testl $0x200, %1\n\t" \
40 "jz 4f\n\t" \
41 "sti\n" \
42 "3:\t" \
43 "rep;nop\n\t" \
44 "cmpb $0, %0\n\t" \
45 "jle 3b\n\t" \
46 "cli\n\t" \
47 "jmp 1b\n" \
48 "4:\t" \
49 "rep;nop\n\t" \
50 "cmpb $0, %0\n\t" \
51 "jg 1b\n\t" \
52 "jmp 4b\n" \
53 "5:\n\t"
54
55#define __raw_spin_lock_string_up \
56 "\n\tdecb %0"
57 28
58static inline void __raw_spin_lock(raw_spinlock_t *lock) 29static inline void __raw_spin_lock(raw_spinlock_t *lock)
59{ 30{
60 alternative_smp( 31 asm volatile("\n1:\t"
61 __raw_spin_lock_string, 32 LOCK_PREFIX " ; decb %0\n\t"
62 __raw_spin_lock_string_up, 33 "jns 3f\n"
63 "=m" (lock->slock) : : "memory"); 34 "2:\t"
35 "rep;nop\n\t"
36 "cmpb $0,%0\n\t"
37 "jle 2b\n\t"
38 "jmp 1b\n"
39 "3:\n\t"
40 : "+m" (lock->slock) : : "memory");
64} 41}
65 42
43/*
44 * It is easier for the lock validator if interrupts are not re-enabled
45 * in the middle of a lock-acquire. This is a performance feature anyway
46 * so we turn it off:
47 *
48 * NOTE: there's an irqs-on section here, which normally would have to be
49 * irq-traced, but on CONFIG_TRACE_IRQFLAGS we never use this variant.
50 */
51#ifndef CONFIG_PROVE_LOCKING
66static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) 52static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags)
67{ 53{
68 alternative_smp( 54 asm volatile(
69 __raw_spin_lock_string_flags, 55 "\n1:\t"
70 __raw_spin_lock_string_up, 56 LOCK_PREFIX " ; decb %0\n\t"
71 "=m" (lock->slock) : "r" (flags) : "memory"); 57 "jns 5f\n"
58 "2:\t"
59 "testl $0x200, %1\n\t"
60 "jz 4f\n\t"
61 STI_STRING "\n"
62 "3:\t"
63 "rep;nop\n\t"
64 "cmpb $0, %0\n\t"
65 "jle 3b\n\t"
66 CLI_STRING "\n\t"
67 "jmp 1b\n"
68 "4:\t"
69 "rep;nop\n\t"
70 "cmpb $0, %0\n\t"
71 "jg 1b\n\t"
72 "jmp 4b\n"
73 "5:\n\t"
74 : "+m" (lock->slock) : "r" (flags) : "memory");
72} 75}
76#endif
73 77
74static inline int __raw_spin_trylock(raw_spinlock_t *lock) 78static inline int __raw_spin_trylock(raw_spinlock_t *lock)
75{ 79{
76 char oldval; 80 char oldval;
77 __asm__ __volatile__( 81 asm volatile(
78 "xchgb %b0,%1" 82 "xchgb %b0,%1"
79 :"=q" (oldval), "=m" (lock->slock) 83 :"=q" (oldval), "+m" (lock->slock)
80 :"0" (0) : "memory"); 84 :"0" (0) : "memory");
81 return oldval > 0; 85 return oldval > 0;
82} 86}
@@ -90,38 +94,29 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
90 94
91#if !defined(CONFIG_X86_OOSTORE) && !defined(CONFIG_X86_PPRO_FENCE) 95#if !defined(CONFIG_X86_OOSTORE) && !defined(CONFIG_X86_PPRO_FENCE)
92 96
93#define __raw_spin_unlock_string \
94 "movb $1,%0" \
95 :"=m" (lock->slock) : : "memory"
96
97
98static inline void __raw_spin_unlock(raw_spinlock_t *lock) 97static inline void __raw_spin_unlock(raw_spinlock_t *lock)
99{ 98{
100 __asm__ __volatile__( 99 asm volatile("movb $1,%0" : "+m" (lock->slock) :: "memory");
101 __raw_spin_unlock_string
102 );
103} 100}
104 101
105#else 102#else
106 103
107#define __raw_spin_unlock_string \
108 "xchgb %b0, %1" \
109 :"=q" (oldval), "=m" (lock->slock) \
110 :"0" (oldval) : "memory"
111
112static inline void __raw_spin_unlock(raw_spinlock_t *lock) 104static inline void __raw_spin_unlock(raw_spinlock_t *lock)
113{ 105{
114 char oldval = 1; 106 char oldval = 1;
115 107
116 __asm__ __volatile__( 108 asm volatile("xchgb %b0, %1"
117 __raw_spin_unlock_string 109 : "=q" (oldval), "+m" (lock->slock)
118 ); 110 : "0" (oldval) : "memory");
119} 111}
120 112
121#endif 113#endif
122 114
123#define __raw_spin_unlock_wait(lock) \ 115static inline void __raw_spin_unlock_wait(raw_spinlock_t *lock)
124 do { while (__raw_spin_is_locked(lock)) cpu_relax(); } while (0) 116{
117 while (__raw_spin_is_locked(lock))
118 cpu_relax();
119}
125 120
126/* 121/*
127 * Read-write spinlocks, allowing multiple readers 122 * Read-write spinlocks, allowing multiple readers
@@ -148,22 +143,36 @@ static inline void __raw_spin_unlock(raw_spinlock_t *lock)
148 * read_can_lock - would read_trylock() succeed? 143 * read_can_lock - would read_trylock() succeed?
149 * @lock: the rwlock in question. 144 * @lock: the rwlock in question.
150 */ 145 */
151#define __raw_read_can_lock(x) ((int)(x)->lock > 0) 146static inline int __raw_read_can_lock(raw_rwlock_t *x)
147{
148 return (int)(x)->lock > 0;
149}
152 150
153/** 151/**
154 * write_can_lock - would write_trylock() succeed? 152 * write_can_lock - would write_trylock() succeed?
155 * @lock: the rwlock in question. 153 * @lock: the rwlock in question.
156 */ 154 */
157#define __raw_write_can_lock(x) ((x)->lock == RW_LOCK_BIAS) 155static inline int __raw_write_can_lock(raw_rwlock_t *x)
156{
157 return (x)->lock == RW_LOCK_BIAS;
158}
158 159
159static inline void __raw_read_lock(raw_rwlock_t *rw) 160static inline void __raw_read_lock(raw_rwlock_t *rw)
160{ 161{
161 __build_read_lock(rw, "__read_lock_failed"); 162 asm volatile(LOCK_PREFIX " subl $1,(%0)\n\t"
163 "jns 1f\n"
164 "call __read_lock_failed\n\t"
165 "1:\n"
166 ::"a" (rw) : "memory");
162} 167}
163 168
164static inline void __raw_write_lock(raw_rwlock_t *rw) 169static inline void __raw_write_lock(raw_rwlock_t *rw)
165{ 170{
166 __build_write_lock(rw, "__write_lock_failed"); 171 asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",(%0)\n\t"
172 "jz 1f\n"
173 "call __write_lock_failed\n\t"
174 "1:\n"
175 ::"a" (rw) : "memory");
167} 176}
168 177
169static inline int __raw_read_trylock(raw_rwlock_t *lock) 178static inline int __raw_read_trylock(raw_rwlock_t *lock)
@@ -187,13 +196,13 @@ static inline int __raw_write_trylock(raw_rwlock_t *lock)
187 196
188static inline void __raw_read_unlock(raw_rwlock_t *rw) 197static inline void __raw_read_unlock(raw_rwlock_t *rw)
189{ 198{
190 asm volatile(LOCK_PREFIX "incl %0" :"=m" (rw->lock) : : "memory"); 199 asm volatile(LOCK_PREFIX "incl %0" :"+m" (rw->lock) : : "memory");
191} 200}
192 201
193static inline void __raw_write_unlock(raw_rwlock_t *rw) 202static inline void __raw_write_unlock(raw_rwlock_t *rw)
194{ 203{
195 asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0" 204 asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0"
196 : "=m" (rw->lock) : : "memory"); 205 : "+m" (rw->lock) : : "memory");
197} 206}
198 207
199#endif /* __ASM_SPINLOCK_H */ 208#endif /* __ASM_SPINLOCK_H */