aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/include/asm/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/include/asm/bitops.h')
-rw-r--r--arch/powerpc/include/asm/bitops.h196
1 files changed, 62 insertions, 134 deletions
diff --git a/arch/powerpc/include/asm/bitops.h b/arch/powerpc/include/asm/bitops.h
index 897eade3afb..56f2f2ea563 100644
--- a/arch/powerpc/include/asm/bitops.h
+++ b/arch/powerpc/include/asm/bitops.h
@@ -56,174 +56,102 @@
56#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG) 56#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG)
57#define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7) 57#define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7)
58 58
59/* Macro for generating the ***_bits() functions */
60#define DEFINE_BITOP(fn, op, prefix, postfix) \
61static __inline__ void fn(unsigned long mask, \
62 volatile unsigned long *_p) \
63{ \
64 unsigned long old; \
65 unsigned long *p = (unsigned long *)_p; \
66 __asm__ __volatile__ ( \
67 prefix \
68"1:" PPC_LLARX "%0,0,%3\n" \
69 stringify_in_c(op) "%0,%0,%2\n" \
70 PPC405_ERR77(0,%3) \
71 PPC_STLCX "%0,0,%3\n" \
72 "bne- 1b\n" \
73 postfix \
74 : "=&r" (old), "+m" (*p) \
75 : "r" (mask), "r" (p) \
76 : "cc", "memory"); \
77}
78
79DEFINE_BITOP(set_bits, or, "", "")
80DEFINE_BITOP(clear_bits, andc, "", "")
81DEFINE_BITOP(clear_bits_unlock, andc, LWSYNC_ON_SMP, "")
82DEFINE_BITOP(change_bits, xor, "", "")
83
59static __inline__ void set_bit(int nr, volatile unsigned long *addr) 84static __inline__ void set_bit(int nr, volatile unsigned long *addr)
60{ 85{
61 unsigned long old; 86 set_bits(BITOP_MASK(nr), addr + BITOP_WORD(nr));
62 unsigned long mask = BITOP_MASK(nr);
63 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
64
65 __asm__ __volatile__(
66"1:" PPC_LLARX "%0,0,%3 # set_bit\n"
67 "or %0,%0,%2\n"
68 PPC405_ERR77(0,%3)
69 PPC_STLCX "%0,0,%3\n"
70 "bne- 1b"
71 : "=&r" (old), "+m" (*p)
72 : "r" (mask), "r" (p)
73 : "cc" );
74} 87}
75 88
76static __inline__ void clear_bit(int nr, volatile unsigned long *addr) 89static __inline__ void clear_bit(int nr, volatile unsigned long *addr)
77{ 90{
78 unsigned long old; 91 clear_bits(BITOP_MASK(nr), addr + BITOP_WORD(nr));
79 unsigned long mask = BITOP_MASK(nr);
80 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
81
82 __asm__ __volatile__(
83"1:" PPC_LLARX "%0,0,%3 # clear_bit\n"
84 "andc %0,%0,%2\n"
85 PPC405_ERR77(0,%3)
86 PPC_STLCX "%0,0,%3\n"
87 "bne- 1b"
88 : "=&r" (old), "+m" (*p)
89 : "r" (mask), "r" (p)
90 : "cc" );
91} 92}
92 93
93static __inline__ void clear_bit_unlock(int nr, volatile unsigned long *addr) 94static __inline__ void clear_bit_unlock(int nr, volatile unsigned long *addr)
94{ 95{
95 unsigned long old; 96 clear_bits_unlock(BITOP_MASK(nr), addr + BITOP_WORD(nr));
96 unsigned long mask = BITOP_MASK(nr);
97 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
98
99 __asm__ __volatile__(
100 LWSYNC_ON_SMP
101"1:" PPC_LLARX "%0,0,%3 # clear_bit_unlock\n"
102 "andc %0,%0,%2\n"
103 PPC405_ERR77(0,%3)
104 PPC_STLCX "%0,0,%3\n"
105 "bne- 1b"
106 : "=&r" (old), "+m" (*p)
107 : "r" (mask), "r" (p)
108 : "cc", "memory");
109} 97}
110 98
111static __inline__ void change_bit(int nr, volatile unsigned long *addr) 99static __inline__ void change_bit(int nr, volatile unsigned long *addr)
112{ 100{
113 unsigned long old; 101 change_bits(BITOP_MASK(nr), addr + BITOP_WORD(nr));
114 unsigned long mask = BITOP_MASK(nr); 102}
115 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr); 103
116 104/* Like DEFINE_BITOP(), with changes to the arguments to 'op' and the output
117 __asm__ __volatile__( 105 * operands. */
118"1:" PPC_LLARX "%0,0,%3 # change_bit\n" 106#define DEFINE_TESTOP(fn, op, prefix, postfix) \
119 "xor %0,%0,%2\n" 107static __inline__ unsigned long fn( \
120 PPC405_ERR77(0,%3) 108 unsigned long mask, \
121 PPC_STLCX "%0,0,%3\n" 109 volatile unsigned long *_p) \
122 "bne- 1b" 110{ \
123 : "=&r" (old), "+m" (*p) 111 unsigned long old, t; \
124 : "r" (mask), "r" (p) 112 unsigned long *p = (unsigned long *)_p; \
125 : "cc" ); 113 __asm__ __volatile__ ( \
114 prefix \
115"1:" PPC_LLARX "%0,0,%3\n" \
116 stringify_in_c(op) "%1,%0,%2\n" \
117 PPC405_ERR77(0,%3) \
118 PPC_STLCX "%1,0,%3\n" \
119 "bne- 1b\n" \
120 postfix \
121 : "=&r" (old), "=&r" (t) \
122 : "r" (mask), "r" (p) \
123 : "cc", "memory"); \
124 return (old & mask); \
126} 125}
127 126
127DEFINE_TESTOP(test_and_set_bits, or, LWSYNC_ON_SMP, ISYNC_ON_SMP)
128DEFINE_TESTOP(test_and_set_bits_lock, or, "", ISYNC_ON_SMP)
129DEFINE_TESTOP(test_and_clear_bits, andc, LWSYNC_ON_SMP, ISYNC_ON_SMP)
130DEFINE_TESTOP(test_and_change_bits, xor, LWSYNC_ON_SMP, ISYNC_ON_SMP)
131
128static __inline__ int test_and_set_bit(unsigned long nr, 132static __inline__ int test_and_set_bit(unsigned long nr,
129 volatile unsigned long *addr) 133 volatile unsigned long *addr)
130{ 134{
131 unsigned long old, t; 135 return test_and_set_bits(BITOP_MASK(nr), addr + BITOP_WORD(nr)) != 0;
132 unsigned long mask = BITOP_MASK(nr);
133 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
134
135 __asm__ __volatile__(
136 LWSYNC_ON_SMP
137"1:" PPC_LLARX "%0,0,%3 # test_and_set_bit\n"
138 "or %1,%0,%2 \n"
139 PPC405_ERR77(0,%3)
140 PPC_STLCX "%1,0,%3 \n"
141 "bne- 1b"
142 ISYNC_ON_SMP
143 : "=&r" (old), "=&r" (t)
144 : "r" (mask), "r" (p)
145 : "cc", "memory");
146
147 return (old & mask) != 0;
148} 136}
149 137
150static __inline__ int test_and_set_bit_lock(unsigned long nr, 138static __inline__ int test_and_set_bit_lock(unsigned long nr,
151 volatile unsigned long *addr) 139 volatile unsigned long *addr)
152{ 140{
153 unsigned long old, t; 141 return test_and_set_bits_lock(BITOP_MASK(nr),
154 unsigned long mask = BITOP_MASK(nr); 142 addr + BITOP_WORD(nr)) != 0;
155 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
156
157 __asm__ __volatile__(
158"1:" PPC_LLARX "%0,0,%3 # test_and_set_bit_lock\n"
159 "or %1,%0,%2 \n"
160 PPC405_ERR77(0,%3)
161 PPC_STLCX "%1,0,%3 \n"
162 "bne- 1b"
163 ISYNC_ON_SMP
164 : "=&r" (old), "=&r" (t)
165 : "r" (mask), "r" (p)
166 : "cc", "memory");
167
168 return (old & mask) != 0;
169} 143}
170 144
171static __inline__ int test_and_clear_bit(unsigned long nr, 145static __inline__ int test_and_clear_bit(unsigned long nr,
172 volatile unsigned long *addr) 146 volatile unsigned long *addr)
173{ 147{
174 unsigned long old, t; 148 return test_and_clear_bits(BITOP_MASK(nr), addr + BITOP_WORD(nr)) != 0;
175 unsigned long mask = BITOP_MASK(nr);
176 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
177
178 __asm__ __volatile__(
179 LWSYNC_ON_SMP
180"1:" PPC_LLARX "%0,0,%3 # test_and_clear_bit\n"
181 "andc %1,%0,%2 \n"
182 PPC405_ERR77(0,%3)
183 PPC_STLCX "%1,0,%3 \n"
184 "bne- 1b"
185 ISYNC_ON_SMP
186 : "=&r" (old), "=&r" (t)
187 : "r" (mask), "r" (p)
188 : "cc", "memory");
189
190 return (old & mask) != 0;
191} 149}
192 150
193static __inline__ int test_and_change_bit(unsigned long nr, 151static __inline__ int test_and_change_bit(unsigned long nr,
194 volatile unsigned long *addr) 152 volatile unsigned long *addr)
195{ 153{
196 unsigned long old, t; 154 return test_and_change_bits(BITOP_MASK(nr), addr + BITOP_WORD(nr)) != 0;
197 unsigned long mask = BITOP_MASK(nr);
198 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
199
200 __asm__ __volatile__(
201 LWSYNC_ON_SMP
202"1:" PPC_LLARX "%0,0,%3 # test_and_change_bit\n"
203 "xor %1,%0,%2 \n"
204 PPC405_ERR77(0,%3)
205 PPC_STLCX "%1,0,%3 \n"
206 "bne- 1b"
207 ISYNC_ON_SMP
208 : "=&r" (old), "=&r" (t)
209 : "r" (mask), "r" (p)
210 : "cc", "memory");
211
212 return (old & mask) != 0;
213}
214
215static __inline__ void set_bits(unsigned long mask, unsigned long *addr)
216{
217 unsigned long old;
218
219 __asm__ __volatile__(
220"1:" PPC_LLARX "%0,0,%3 # set_bits\n"
221 "or %0,%0,%2\n"
222 PPC_STLCX "%0,0,%3\n"
223 "bne- 1b"
224 : "=&r" (old), "+m" (*addr)
225 : "r" (mask), "r" (addr)
226 : "cc");
227} 155}
228 156
229#include <asm-generic/bitops/non-atomic.h> 157#include <asm-generic/bitops/non-atomic.h>