aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-arm/spinlock.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-arm/spinlock.h')
-rw-r--r--include/asm-arm/spinlock.h32
1 files changed, 27 insertions, 5 deletions
diff --git a/include/asm-arm/spinlock.h b/include/asm-arm/spinlock.h
index cb4906b45555..43ad4e55878c 100644
--- a/include/asm-arm/spinlock.h
+++ b/include/asm-arm/spinlock.h
@@ -30,6 +30,9 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
30 __asm__ __volatile__( 30 __asm__ __volatile__(
31"1: ldrex %0, [%1]\n" 31"1: ldrex %0, [%1]\n"
32" teq %0, #0\n" 32" teq %0, #0\n"
33#ifdef CONFIG_CPU_32v6K
34" wfene\n"
35#endif
33" strexeq %0, %2, [%1]\n" 36" strexeq %0, %2, [%1]\n"
34" teqeq %0, #0\n" 37" teqeq %0, #0\n"
35" bne 1b" 38" bne 1b"
@@ -65,7 +68,11 @@ static inline void __raw_spin_unlock(raw_spinlock_t *lock)
65 smp_mb(); 68 smp_mb();
66 69
67 __asm__ __volatile__( 70 __asm__ __volatile__(
68" str %1, [%0]" 71" str %1, [%0]\n"
72#ifdef CONFIG_CPU_32v6K
73" mcr p15, 0, %1, c7, c10, 4\n" /* DSB */
74" sev"
75#endif
69 : 76 :
70 : "r" (&lock->lock), "r" (0) 77 : "r" (&lock->lock), "r" (0)
71 : "cc"); 78 : "cc");
@@ -80,13 +87,16 @@ static inline void __raw_spin_unlock(raw_spinlock_t *lock)
80 */ 87 */
81#define rwlock_is_locked(x) (*((volatile unsigned int *)(x)) != 0) 88#define rwlock_is_locked(x) (*((volatile unsigned int *)(x)) != 0)
82 89
83static inline void __raw_write_lock(rwlock_t *rw) 90static inline void __raw_write_lock(raw_rwlock_t *rw)
84{ 91{
85 unsigned long tmp; 92 unsigned long tmp;
86 93
87 __asm__ __volatile__( 94 __asm__ __volatile__(
88"1: ldrex %0, [%1]\n" 95"1: ldrex %0, [%1]\n"
89" teq %0, #0\n" 96" teq %0, #0\n"
97#ifdef CONFIG_CPU_32v6K
98" wfene\n"
99#endif
90" strexeq %0, %2, [%1]\n" 100" strexeq %0, %2, [%1]\n"
91" teq %0, #0\n" 101" teq %0, #0\n"
92" bne 1b" 102" bne 1b"
@@ -97,7 +107,7 @@ static inline void __raw_write_lock(rwlock_t *rw)
97 smp_mb(); 107 smp_mb();
98} 108}
99 109
100static inline int __raw_write_trylock(rwlock_t *rw) 110static inline int __raw_write_trylock(raw_rwlock_t *rw)
101{ 111{
102 unsigned long tmp; 112 unsigned long tmp;
103 113
@@ -122,7 +132,11 @@ static inline void __raw_write_unlock(raw_rwlock_t *rw)
122 smp_mb(); 132 smp_mb();
123 133
124 __asm__ __volatile__( 134 __asm__ __volatile__(
125 "str %1, [%0]" 135 "str %1, [%0]\n"
136#ifdef CONFIG_CPU_32v6K
137" mcr p15, 0, %1, c7, c10, 4\n" /* DSB */
138" sev\n"
139#endif
126 : 140 :
127 : "r" (&rw->lock), "r" (0) 141 : "r" (&rw->lock), "r" (0)
128 : "cc"); 142 : "cc");
@@ -148,6 +162,9 @@ static inline void __raw_read_lock(raw_rwlock_t *rw)
148"1: ldrex %0, [%2]\n" 162"1: ldrex %0, [%2]\n"
149" adds %0, %0, #1\n" 163" adds %0, %0, #1\n"
150" strexpl %1, %0, [%2]\n" 164" strexpl %1, %0, [%2]\n"
165#ifdef CONFIG_CPU_32v6K
166" wfemi\n"
167#endif
151" rsbpls %0, %1, #0\n" 168" rsbpls %0, %1, #0\n"
152" bmi 1b" 169" bmi 1b"
153 : "=&r" (tmp), "=&r" (tmp2) 170 : "=&r" (tmp), "=&r" (tmp2)
@@ -157,7 +174,7 @@ static inline void __raw_read_lock(raw_rwlock_t *rw)
157 smp_mb(); 174 smp_mb();
158} 175}
159 176
160static inline void __raw_read_unlock(rwlock_t *rw) 177static inline void __raw_read_unlock(raw_rwlock_t *rw)
161{ 178{
162 unsigned long tmp, tmp2; 179 unsigned long tmp, tmp2;
163 180
@@ -169,6 +186,11 @@ static inline void __raw_read_unlock(rwlock_t *rw)
169" strex %1, %0, [%2]\n" 186" strex %1, %0, [%2]\n"
170" teq %1, #0\n" 187" teq %1, #0\n"
171" bne 1b" 188" bne 1b"
189#ifdef CONFIG_CPU_32v6K
190"\n cmp %0, #0\n"
191" mcreq p15, 0, %0, c7, c10, 4\n"
192" seveq"
193#endif
172 : "=&r" (tmp), "=&r" (tmp2) 194 : "=&r" (tmp), "=&r" (tmp2)
173 : "r" (&rw->lock) 195 : "r" (&rw->lock)
174 : "cc"); 196 : "cc");