diff options
Diffstat (limited to 'include/asm-arm/spinlock.h')
-rw-r--r-- | include/asm-arm/spinlock.h | 26 |
1 files changed, 24 insertions, 2 deletions
diff --git a/include/asm-arm/spinlock.h b/include/asm-arm/spinlock.h index 6ed4f916b166..43ad4e55878c 100644 --- a/include/asm-arm/spinlock.h +++ b/include/asm-arm/spinlock.h | |||
@@ -30,6 +30,9 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) | |||
30 | __asm__ __volatile__( | 30 | __asm__ __volatile__( |
31 | "1: ldrex %0, [%1]\n" | 31 | "1: ldrex %0, [%1]\n" |
32 | " teq %0, #0\n" | 32 | " teq %0, #0\n" |
33 | #ifdef CONFIG_CPU_32v6K | ||
34 | " wfene\n" | ||
35 | #endif | ||
33 | " strexeq %0, %2, [%1]\n" | 36 | " strexeq %0, %2, [%1]\n" |
34 | " teqeq %0, #0\n" | 37 | " teqeq %0, #0\n" |
35 | " bne 1b" | 38 | " bne 1b" |
@@ -65,7 +68,11 @@ static inline void __raw_spin_unlock(raw_spinlock_t *lock) | |||
65 | smp_mb(); | 68 | smp_mb(); |
66 | 69 | ||
67 | __asm__ __volatile__( | 70 | __asm__ __volatile__( |
68 | " str %1, [%0]" | 71 | " str %1, [%0]\n" |
72 | #ifdef CONFIG_CPU_32v6K | ||
73 | " mcr p15, 0, %1, c7, c10, 4\n" /* DSB */ | ||
74 | " sev" | ||
75 | #endif | ||
69 | : | 76 | : |
70 | : "r" (&lock->lock), "r" (0) | 77 | : "r" (&lock->lock), "r" (0) |
71 | : "cc"); | 78 | : "cc"); |
@@ -87,6 +94,9 @@ static inline void __raw_write_lock(raw_rwlock_t *rw) | |||
87 | __asm__ __volatile__( | 94 | __asm__ __volatile__( |
88 | "1: ldrex %0, [%1]\n" | 95 | "1: ldrex %0, [%1]\n" |
89 | " teq %0, #0\n" | 96 | " teq %0, #0\n" |
97 | #ifdef CONFIG_CPU_32v6K | ||
98 | " wfene\n" | ||
99 | #endif | ||
90 | " strexeq %0, %2, [%1]\n" | 100 | " strexeq %0, %2, [%1]\n" |
91 | " teq %0, #0\n" | 101 | " teq %0, #0\n" |
92 | " bne 1b" | 102 | " bne 1b" |
@@ -122,7 +132,11 @@ static inline void __raw_write_unlock(raw_rwlock_t *rw) | |||
122 | smp_mb(); | 132 | smp_mb(); |
123 | 133 | ||
124 | __asm__ __volatile__( | 134 | __asm__ __volatile__( |
125 | "str %1, [%0]" | 135 | "str %1, [%0]\n" |
136 | #ifdef CONFIG_CPU_32v6K | ||
137 | " mcr p15, 0, %1, c7, c10, 4\n" /* DSB */ | ||
138 | " sev\n" | ||
139 | #endif | ||
126 | : | 140 | : |
127 | : "r" (&rw->lock), "r" (0) | 141 | : "r" (&rw->lock), "r" (0) |
128 | : "cc"); | 142 | : "cc"); |
@@ -148,6 +162,9 @@ static inline void __raw_read_lock(raw_rwlock_t *rw) | |||
148 | "1: ldrex %0, [%2]\n" | 162 | "1: ldrex %0, [%2]\n" |
149 | " adds %0, %0, #1\n" | 163 | " adds %0, %0, #1\n" |
150 | " strexpl %1, %0, [%2]\n" | 164 | " strexpl %1, %0, [%2]\n" |
165 | #ifdef CONFIG_CPU_32v6K | ||
166 | " wfemi\n" | ||
167 | #endif | ||
151 | " rsbpls %0, %1, #0\n" | 168 | " rsbpls %0, %1, #0\n" |
152 | " bmi 1b" | 169 | " bmi 1b" |
153 | : "=&r" (tmp), "=&r" (tmp2) | 170 | : "=&r" (tmp), "=&r" (tmp2) |
@@ -169,6 +186,11 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw) | |||
169 | " strex %1, %0, [%2]\n" | 186 | " strex %1, %0, [%2]\n" |
170 | " teq %1, #0\n" | 187 | " teq %1, #0\n" |
171 | " bne 1b" | 188 | " bne 1b" |
189 | #ifdef CONFIG_CPU_32v6K | ||
190 | "\n cmp %0, #0\n" | ||
191 | " mcreq p15, 0, %0, c7, c10, 4\n" | ||
192 | " seveq" | ||
193 | #endif | ||
172 | : "=&r" (tmp), "=&r" (tmp2) | 194 | : "=&r" (tmp), "=&r" (tmp2) |
173 | : "r" (&rw->lock) | 195 | : "r" (&rw->lock) |
174 | : "cc"); | 196 | : "cc"); |