aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-mips/bitops.h')
-rw-r--r--include/asm-mips/bitops.h92
1 files changed, 29 insertions, 63 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index 1bb89c5a10ee..06445de1324b 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -3,38 +3,34 @@
3 * License. See the file "COPYING" in the main directory of this archive 3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details. 4 * for more details.
5 * 5 *
6 * Copyright (c) 1994 - 1997, 1999, 2000 Ralf Baechle (ralf@gnu.org) 6 * Copyright (c) 1994 - 1997, 1999, 2000, 06 Ralf Baechle (ralf@linux-mips.org)
7 * Copyright (c) 1999, 2000 Silicon Graphics, Inc. 7 * Copyright (c) 1999, 2000 Silicon Graphics, Inc.
8 */ 8 */
9#ifndef _ASM_BITOPS_H 9#ifndef _ASM_BITOPS_H
10#define _ASM_BITOPS_H 10#define _ASM_BITOPS_H
11 11
12#include <linux/compiler.h> 12#include <linux/compiler.h>
13#include <linux/irqflags.h>
13#include <linux/types.h> 14#include <linux/types.h>
15#include <asm/barrier.h>
14#include <asm/bug.h> 16#include <asm/bug.h>
15#include <asm/byteorder.h> /* sigh ... */ 17#include <asm/byteorder.h> /* sigh ... */
16#include <asm/cpu-features.h> 18#include <asm/cpu-features.h>
19#include <asm/sgidefs.h>
20#include <asm/war.h>
17 21
18#if (_MIPS_SZLONG == 32) 22#if (_MIPS_SZLONG == 32)
19#define SZLONG_LOG 5 23#define SZLONG_LOG 5
20#define SZLONG_MASK 31UL 24#define SZLONG_MASK 31UL
21#define __LL "ll " 25#define __LL "ll "
22#define __SC "sc " 26#define __SC "sc "
23#define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x))
24#elif (_MIPS_SZLONG == 64) 27#elif (_MIPS_SZLONG == 64)
25#define SZLONG_LOG 6 28#define SZLONG_LOG 6
26#define SZLONG_MASK 63UL 29#define SZLONG_MASK 63UL
27#define __LL "lld " 30#define __LL "lld "
28#define __SC "scd " 31#define __SC "scd "
29#define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x))
30#endif 32#endif
31 33
32#ifdef __KERNEL__
33
34#include <linux/irqflags.h>
35#include <asm/sgidefs.h>
36#include <asm/war.h>
37
38/* 34/*
39 * clear_bit() doesn't provide any barrier for the compiler. 35 * clear_bit() doesn't provide any barrier for the compiler.
40 */ 36 */
@@ -42,20 +38,6 @@
42#define smp_mb__after_clear_bit() smp_mb() 38#define smp_mb__after_clear_bit() smp_mb()
43 39
44/* 40/*
45 * Only disable interrupt for kernel mode stuff to keep usermode stuff
46 * that dares to use kernel include files alive.
47 */
48
49#define __bi_flags unsigned long flags
50#define __bi_local_irq_save(x) local_irq_save(x)
51#define __bi_local_irq_restore(x) local_irq_restore(x)
52#else
53#define __bi_flags
54#define __bi_local_irq_save(x)
55#define __bi_local_irq_restore(x)
56#endif /* __KERNEL__ */
57
58/*
59 * set_bit - Atomically set a bit in memory 41 * set_bit - Atomically set a bit in memory
60 * @nr: the bit to set 42 * @nr: the bit to set
61 * @addr: the address to start counting from 43 * @addr: the address to start counting from
@@ -93,13 +75,13 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
93 } else { 75 } else {
94 volatile unsigned long *a = addr; 76 volatile unsigned long *a = addr;
95 unsigned long mask; 77 unsigned long mask;
96 __bi_flags; 78 unsigned long flags;
97 79
98 a += nr >> SZLONG_LOG; 80 a += nr >> SZLONG_LOG;
99 mask = 1UL << (nr & SZLONG_MASK); 81 mask = 1UL << (nr & SZLONG_MASK);
100 __bi_local_irq_save(flags); 82 local_irq_save(flags);
101 *a |= mask; 83 *a |= mask;
102 __bi_local_irq_restore(flags); 84 local_irq_restore(flags);
103 } 85 }
104} 86}
105 87
@@ -141,13 +123,13 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
141 } else { 123 } else {
142 volatile unsigned long *a = addr; 124 volatile unsigned long *a = addr;
143 unsigned long mask; 125 unsigned long mask;
144 __bi_flags; 126 unsigned long flags;
145 127
146 a += nr >> SZLONG_LOG; 128 a += nr >> SZLONG_LOG;
147 mask = 1UL << (nr & SZLONG_MASK); 129 mask = 1UL << (nr & SZLONG_MASK);
148 __bi_local_irq_save(flags); 130 local_irq_save(flags);
149 *a &= ~mask; 131 *a &= ~mask;
150 __bi_local_irq_restore(flags); 132 local_irq_restore(flags);
151 } 133 }
152} 134}
153 135
@@ -191,13 +173,13 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
191 } else { 173 } else {
192 volatile unsigned long *a = addr; 174 volatile unsigned long *a = addr;
193 unsigned long mask; 175 unsigned long mask;
194 __bi_flags; 176 unsigned long flags;
195 177
196 a += nr >> SZLONG_LOG; 178 a += nr >> SZLONG_LOG;
197 mask = 1UL << (nr & SZLONG_MASK); 179 mask = 1UL << (nr & SZLONG_MASK);
198 __bi_local_irq_save(flags); 180 local_irq_save(flags);
199 *a ^= mask; 181 *a ^= mask;
200 __bi_local_irq_restore(flags); 182 local_irq_restore(flags);
201 } 183 }
202} 184}
203 185
@@ -223,9 +205,6 @@ static inline int test_and_set_bit(unsigned long nr,
223 " " __SC "%2, %1 \n" 205 " " __SC "%2, %1 \n"
224 " beqzl %2, 1b \n" 206 " beqzl %2, 1b \n"
225 " and %2, %0, %3 \n" 207 " and %2, %0, %3 \n"
226#ifdef CONFIG_SMP
227 " sync \n"
228#endif
229 " .set mips0 \n" 208 " .set mips0 \n"
230 : "=&r" (temp), "=m" (*m), "=&r" (res) 209 : "=&r" (temp), "=m" (*m), "=&r" (res)
231 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 210 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
@@ -245,9 +224,6 @@ static inline int test_and_set_bit(unsigned long nr,
245 " " __SC "%2, %1 \n" 224 " " __SC "%2, %1 \n"
246 " beqz %2, 1b \n" 225 " beqz %2, 1b \n"
247 " and %2, %0, %3 \n" 226 " and %2, %0, %3 \n"
248#ifdef CONFIG_SMP
249 " sync \n"
250#endif
251 " .set pop \n" 227 " .set pop \n"
252 : "=&r" (temp), "=m" (*m), "=&r" (res) 228 : "=&r" (temp), "=m" (*m), "=&r" (res)
253 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 229 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
@@ -258,17 +234,19 @@ static inline int test_and_set_bit(unsigned long nr,
258 volatile unsigned long *a = addr; 234 volatile unsigned long *a = addr;
259 unsigned long mask; 235 unsigned long mask;
260 int retval; 236 int retval;
261 __bi_flags; 237 unsigned long flags;
262 238
263 a += nr >> SZLONG_LOG; 239 a += nr >> SZLONG_LOG;
264 mask = 1UL << (nr & SZLONG_MASK); 240 mask = 1UL << (nr & SZLONG_MASK);
265 __bi_local_irq_save(flags); 241 local_irq_save(flags);
266 retval = (mask & *a) != 0; 242 retval = (mask & *a) != 0;
267 *a |= mask; 243 *a |= mask;
268 __bi_local_irq_restore(flags); 244 local_irq_restore(flags);
269 245
270 return retval; 246 return retval;
271 } 247 }
248
249 smp_mb();
272} 250}
273 251
274/* 252/*
@@ -294,9 +272,6 @@ static inline int test_and_clear_bit(unsigned long nr,
294 " " __SC "%2, %1 \n" 272 " " __SC "%2, %1 \n"
295 " beqzl %2, 1b \n" 273 " beqzl %2, 1b \n"
296 " and %2, %0, %3 \n" 274 " and %2, %0, %3 \n"
297#ifdef CONFIG_SMP
298 " sync \n"
299#endif
300 " .set mips0 \n" 275 " .set mips0 \n"
301 : "=&r" (temp), "=m" (*m), "=&r" (res) 276 : "=&r" (temp), "=m" (*m), "=&r" (res)
302 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 277 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
@@ -317,9 +292,6 @@ static inline int test_and_clear_bit(unsigned long nr,
317 " " __SC "%2, %1 \n" 292 " " __SC "%2, %1 \n"
318 " beqz %2, 1b \n" 293 " beqz %2, 1b \n"
319 " and %2, %0, %3 \n" 294 " and %2, %0, %3 \n"
320#ifdef CONFIG_SMP
321 " sync \n"
322#endif
323 " .set pop \n" 295 " .set pop \n"
324 : "=&r" (temp), "=m" (*m), "=&r" (res) 296 : "=&r" (temp), "=m" (*m), "=&r" (res)
325 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 297 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
@@ -330,17 +302,19 @@ static inline int test_and_clear_bit(unsigned long nr,
330 volatile unsigned long *a = addr; 302 volatile unsigned long *a = addr;
331 unsigned long mask; 303 unsigned long mask;
332 int retval; 304 int retval;
333 __bi_flags; 305 unsigned long flags;
334 306
335 a += nr >> SZLONG_LOG; 307 a += nr >> SZLONG_LOG;
336 mask = 1UL << (nr & SZLONG_MASK); 308 mask = 1UL << (nr & SZLONG_MASK);
337 __bi_local_irq_save(flags); 309 local_irq_save(flags);
338 retval = (mask & *a) != 0; 310 retval = (mask & *a) != 0;
339 *a &= ~mask; 311 *a &= ~mask;
340 __bi_local_irq_restore(flags); 312 local_irq_restore(flags);
341 313
342 return retval; 314 return retval;
343 } 315 }
316
317 smp_mb();
344} 318}
345 319
346/* 320/*
@@ -365,9 +339,6 @@ static inline int test_and_change_bit(unsigned long nr,
365 " " __SC "%2, %1 \n" 339 " " __SC "%2, %1 \n"
366 " beqzl %2, 1b \n" 340 " beqzl %2, 1b \n"
367 " and %2, %0, %3 \n" 341 " and %2, %0, %3 \n"
368#ifdef CONFIG_SMP
369 " sync \n"
370#endif
371 " .set mips0 \n" 342 " .set mips0 \n"
372 : "=&r" (temp), "=m" (*m), "=&r" (res) 343 : "=&r" (temp), "=m" (*m), "=&r" (res)
373 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 344 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
@@ -387,9 +358,6 @@ static inline int test_and_change_bit(unsigned long nr,
387 " " __SC "\t%2, %1 \n" 358 " " __SC "\t%2, %1 \n"
388 " beqz %2, 1b \n" 359 " beqz %2, 1b \n"
389 " and %2, %0, %3 \n" 360 " and %2, %0, %3 \n"
390#ifdef CONFIG_SMP
391 " sync \n"
392#endif
393 " .set pop \n" 361 " .set pop \n"
394 : "=&r" (temp), "=m" (*m), "=&r" (res) 362 : "=&r" (temp), "=m" (*m), "=&r" (res)
395 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) 363 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
@@ -399,22 +367,20 @@ static inline int test_and_change_bit(unsigned long nr,
399 } else { 367 } else {
400 volatile unsigned long *a = addr; 368 volatile unsigned long *a = addr;
401 unsigned long mask, retval; 369 unsigned long mask, retval;
402 __bi_flags; 370 unsigned long flags;
403 371
404 a += nr >> SZLONG_LOG; 372 a += nr >> SZLONG_LOG;
405 mask = 1UL << (nr & SZLONG_MASK); 373 mask = 1UL << (nr & SZLONG_MASK);
406 __bi_local_irq_save(flags); 374 local_irq_save(flags);
407 retval = (mask & *a) != 0; 375 retval = (mask & *a) != 0;
408 *a ^= mask; 376 *a ^= mask;
409 __bi_local_irq_restore(flags); 377 local_irq_restore(flags);
410 378
411 return retval; 379 return retval;
412 } 380 }
413}
414 381
415#undef __bi_flags 382 smp_mb();
416#undef __bi_local_irq_save 383}
417#undef __bi_local_irq_restore
418 384
419#include <asm-generic/bitops/non-atomic.h> 385#include <asm-generic/bitops/non-atomic.h>
420 386