aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-sh/bitops.h
diff options
context:
space:
mode:
authorStuart Menefy <stuart.menefy@st.com>2007-11-30 02:12:36 -0500
committerPaul Mundt <lethal@linux-sh.org>2008-01-27 23:18:58 -0500
commit1efe4ce3ca126da77e450d5a83f7201949d76f62 (patch)
treefbae9902aa4103a9e86d06f841d580f24682e7b3 /include/asm-sh/bitops.h
parent53ff09422e5e7a6d6198b767c8f494e43ec8e3ae (diff)
sh: GUSA atomic rollback support.
This implements kernel-level atomic rollback built on top of gUSA, as an alternative non-IRQ based atomicity method. This is generally a faster method for platforms that are lacking the LL/SC pairs that SH-4A and later use, and is only supportable on legacy cores. Signed-off-by: Stuart Menefy <stuart.menefy@st.com> Signed-off-by: Paul Mundt <lethal@linux-sh.org>
Diffstat (limited to 'include/asm-sh/bitops.h')
-rw-r--r--include/asm-sh/bitops.h91
1 files changed, 6 insertions, 85 deletions
diff --git a/include/asm-sh/bitops.h b/include/asm-sh/bitops.h
index a7bd81a7f064..b6ba5a60dec2 100644
--- a/include/asm-sh/bitops.h
+++ b/include/asm-sh/bitops.h
@@ -11,97 +11,18 @@
11/* For __swab32 */ 11/* For __swab32 */
12#include <asm/byteorder.h> 12#include <asm/byteorder.h>
13 13
14static inline void set_bit(int nr, volatile void * addr) 14#ifdef CONFIG_GUSA_RB
15{ 15#include <asm/bitops-grb.h>
16 int mask; 16#else
17 volatile unsigned int *a = addr; 17#include <asm/bitops-irq.h>
18 unsigned long flags; 18#endif
19 19
20 a += nr >> 5;
21 mask = 1 << (nr & 0x1f);
22 local_irq_save(flags);
23 *a |= mask;
24 local_irq_restore(flags);
25}
26 20
27/* 21/*
28 * clear_bit() doesn't provide any barrier for the compiler. 22 * clear_bit() doesn't provide any barrier for the compiler.
29 */ 23 */
30#define smp_mb__before_clear_bit() barrier() 24#define smp_mb__before_clear_bit() barrier()
31#define smp_mb__after_clear_bit() barrier() 25#define smp_mb__after_clear_bit() barrier()
32static inline void clear_bit(int nr, volatile void * addr)
33{
34 int mask;
35 volatile unsigned int *a = addr;
36 unsigned long flags;
37
38 a += nr >> 5;
39 mask = 1 << (nr & 0x1f);
40 local_irq_save(flags);
41 *a &= ~mask;
42 local_irq_restore(flags);
43}
44
45static inline void change_bit(int nr, volatile void * addr)
46{
47 int mask;
48 volatile unsigned int *a = addr;
49 unsigned long flags;
50
51 a += nr >> 5;
52 mask = 1 << (nr & 0x1f);
53 local_irq_save(flags);
54 *a ^= mask;
55 local_irq_restore(flags);
56}
57
58static inline int test_and_set_bit(int nr, volatile void * addr)
59{
60 int mask, retval;
61 volatile unsigned int *a = addr;
62 unsigned long flags;
63
64 a += nr >> 5;
65 mask = 1 << (nr & 0x1f);
66 local_irq_save(flags);
67 retval = (mask & *a) != 0;
68 *a |= mask;
69 local_irq_restore(flags);
70
71 return retval;
72}
73
74static inline int test_and_clear_bit(int nr, volatile void * addr)
75{
76 int mask, retval;
77 volatile unsigned int *a = addr;
78 unsigned long flags;
79
80 a += nr >> 5;
81 mask = 1 << (nr & 0x1f);
82 local_irq_save(flags);
83 retval = (mask & *a) != 0;
84 *a &= ~mask;
85 local_irq_restore(flags);
86
87 return retval;
88}
89
90static inline int test_and_change_bit(int nr, volatile void * addr)
91{
92 int mask, retval;
93 volatile unsigned int *a = addr;
94 unsigned long flags;
95
96 a += nr >> 5;
97 mask = 1 << (nr & 0x1f);
98 local_irq_save(flags);
99 retval = (mask & *a) != 0;
100 *a ^= mask;
101 local_irq_restore(flags);
102
103 return retval;
104}
105 26
106#include <asm-generic/bitops/non-atomic.h> 27#include <asm-generic/bitops/non-atomic.h>
107 28