diff options
Diffstat (limited to 'arch/arc/include/asm/bitops.h')
-rw-r--r-- | arch/arc/include/asm/bitops.h | 19 |
1 files changed, 19 insertions, 0 deletions
diff --git a/arch/arc/include/asm/bitops.h b/arch/arc/include/asm/bitops.h index 4051e9525939..624a9d048ca9 100644 --- a/arch/arc/include/asm/bitops.h +++ b/arch/arc/include/asm/bitops.h | |||
@@ -117,6 +117,12 @@ static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *m) | |||
117 | if (__builtin_constant_p(nr)) | 117 | if (__builtin_constant_p(nr)) |
118 | nr &= 0x1f; | 118 | nr &= 0x1f; |
119 | 119 | ||
120 | /* | ||
121 | * Explicit full memory barrier needed before/after as | ||
122 | * LLOCK/SCOND themselves don't provide any such semantics | ||
123 | */ | ||
124 | smp_mb(); | ||
125 | |||
120 | __asm__ __volatile__( | 126 | __asm__ __volatile__( |
121 | "1: llock %0, [%2] \n" | 127 | "1: llock %0, [%2] \n" |
122 | " bset %1, %0, %3 \n" | 128 | " bset %1, %0, %3 \n" |
@@ -126,6 +132,8 @@ static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *m) | |||
126 | : "r"(m), "ir"(nr) | 132 | : "r"(m), "ir"(nr) |
127 | : "cc"); | 133 | : "cc"); |
128 | 134 | ||
135 | smp_mb(); | ||
136 | |||
129 | return (old & (1 << nr)) != 0; | 137 | return (old & (1 << nr)) != 0; |
130 | } | 138 | } |
131 | 139 | ||
@@ -139,6 +147,8 @@ test_and_clear_bit(unsigned long nr, volatile unsigned long *m) | |||
139 | if (__builtin_constant_p(nr)) | 147 | if (__builtin_constant_p(nr)) |
140 | nr &= 0x1f; | 148 | nr &= 0x1f; |
141 | 149 | ||
150 | smp_mb(); | ||
151 | |||
142 | __asm__ __volatile__( | 152 | __asm__ __volatile__( |
143 | "1: llock %0, [%2] \n" | 153 | "1: llock %0, [%2] \n" |
144 | " bclr %1, %0, %3 \n" | 154 | " bclr %1, %0, %3 \n" |
@@ -148,6 +158,8 @@ test_and_clear_bit(unsigned long nr, volatile unsigned long *m) | |||
148 | : "r"(m), "ir"(nr) | 158 | : "r"(m), "ir"(nr) |
149 | : "cc"); | 159 | : "cc"); |
150 | 160 | ||
161 | smp_mb(); | ||
162 | |||
151 | return (old & (1 << nr)) != 0; | 163 | return (old & (1 << nr)) != 0; |
152 | } | 164 | } |
153 | 165 | ||
@@ -161,6 +173,8 @@ test_and_change_bit(unsigned long nr, volatile unsigned long *m) | |||
161 | if (__builtin_constant_p(nr)) | 173 | if (__builtin_constant_p(nr)) |
162 | nr &= 0x1f; | 174 | nr &= 0x1f; |
163 | 175 | ||
176 | smp_mb(); | ||
177 | |||
164 | __asm__ __volatile__( | 178 | __asm__ __volatile__( |
165 | "1: llock %0, [%2] \n" | 179 | "1: llock %0, [%2] \n" |
166 | " bxor %1, %0, %3 \n" | 180 | " bxor %1, %0, %3 \n" |
@@ -170,6 +184,8 @@ test_and_change_bit(unsigned long nr, volatile unsigned long *m) | |||
170 | : "r"(m), "ir"(nr) | 184 | : "r"(m), "ir"(nr) |
171 | : "cc"); | 185 | : "cc"); |
172 | 186 | ||
187 | smp_mb(); | ||
188 | |||
173 | return (old & (1 << nr)) != 0; | 189 | return (old & (1 << nr)) != 0; |
174 | } | 190 | } |
175 | 191 | ||
@@ -249,6 +265,9 @@ static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *m) | |||
249 | if (__builtin_constant_p(nr)) | 265 | if (__builtin_constant_p(nr)) |
250 | nr &= 0x1f; | 266 | nr &= 0x1f; |
251 | 267 | ||
268 | /* | ||
269 | * spin lock/unlock provide the needed smp_mb() before/after | ||
270 | */ | ||
252 | bitops_lock(flags); | 271 | bitops_lock(flags); |
253 | 272 | ||
254 | old = *m; | 273 | old = *m; |