aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-alpha/bitops.h
diff options
context:
space:
mode:
authorNick Piggin <npiggin@suse.de>2007-10-18 06:06:50 -0400
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-10-18 17:37:29 -0400
commit7c29ca5b8d13287ed67d2863f4c5f7bfc1a15279 (patch)
treee6745adc937d6f0afe56c394ce5830128ad3c88c /include/asm-alpha/bitops.h
parent26333576fd0d0b52f6e4025c5aded97e188bdd44 (diff)
alpha: fix bitops
Documentation/atomic_ops.txt defines these primitives must contain a memory barrier both before and after their memory operation. This is consistent with the atomic ops implementation on alpha. Signed-off-by: Nick Piggin <npiggin@suse.de> Cc: Richard Henderson <rth@twiddle.net> Cc: Ivan Kokshaysky <ink@jurassic.park.msu.ru> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-alpha/bitops.h')
-rw-r--r--include/asm-alpha/bitops.h9
1 files changed, 9 insertions, 0 deletions
diff --git a/include/asm-alpha/bitops.h b/include/asm-alpha/bitops.h
index ca667d121898..f1bbe6cf0e84 100644
--- a/include/asm-alpha/bitops.h
+++ b/include/asm-alpha/bitops.h
@@ -117,6 +117,9 @@ test_and_set_bit(unsigned long nr, volatile void *addr)
117 int *m = ((int *) addr) + (nr >> 5); 117 int *m = ((int *) addr) + (nr >> 5);
118 118
119 __asm__ __volatile__( 119 __asm__ __volatile__(
120#ifdef CONFIG_SMP
121 " mb\n"
122#endif
120 "1: ldl_l %0,%4\n" 123 "1: ldl_l %0,%4\n"
121 " and %0,%3,%2\n" 124 " and %0,%3,%2\n"
122 " bne %2,2f\n" 125 " bne %2,2f\n"
@@ -158,6 +161,9 @@ test_and_clear_bit(unsigned long nr, volatile void * addr)
158 int *m = ((int *) addr) + (nr >> 5); 161 int *m = ((int *) addr) + (nr >> 5);
159 162
160 __asm__ __volatile__( 163 __asm__ __volatile__(
164#ifdef CONFIG_SMP
165 " mb\n"
166#endif
161 "1: ldl_l %0,%4\n" 167 "1: ldl_l %0,%4\n"
162 " and %0,%3,%2\n" 168 " and %0,%3,%2\n"
163 " beq %2,2f\n" 169 " beq %2,2f\n"
@@ -199,6 +205,9 @@ test_and_change_bit(unsigned long nr, volatile void * addr)
199 int *m = ((int *) addr) + (nr >> 5); 205 int *m = ((int *) addr) + (nr >> 5);
200 206
201 __asm__ __volatile__( 207 __asm__ __volatile__(
208#ifdef CONFIG_SMP
209 " mb\n"
210#endif
202 "1: ldl_l %0,%4\n" 211 "1: ldl_l %0,%4\n"
203 " and %0,%3,%2\n" 212 " and %0,%3,%2\n"
204 " xor %0,%3,%0\n" 213 " xor %0,%3,%0\n"