aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/asm-powerpc/bitops.h46
1 files changed, 45 insertions, 1 deletions
diff --git a/include/asm-powerpc/bitops.h b/include/asm-powerpc/bitops.h
index 1d4c16613d2f..e85c3e078ba2 100644
--- a/include/asm-powerpc/bitops.h
+++ b/include/asm-powerpc/bitops.h
@@ -86,6 +86,24 @@ static __inline__ void clear_bit(int nr, volatile unsigned long *addr)
86 : "cc" ); 86 : "cc" );
87} 87}
88 88
89static __inline__ void clear_bit_unlock(int nr, volatile unsigned long *addr)
90{
91 unsigned long old;
92 unsigned long mask = BITOP_MASK(nr);
93 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
94
95 __asm__ __volatile__(
96 LWSYNC_ON_SMP
97"1:" PPC_LLARX "%0,0,%3 # clear_bit_unlock\n"
98 "andc %0,%0,%2\n"
99 PPC405_ERR77(0,%3)
100 PPC_STLCX "%0,0,%3\n"
101 "bne- 1b"
102 : "=&r" (old), "+m" (*p)
103 : "r" (mask), "r" (p)
104 : "cc", "memory");
105}
106
89static __inline__ void change_bit(int nr, volatile unsigned long *addr) 107static __inline__ void change_bit(int nr, volatile unsigned long *addr)
90{ 108{
91 unsigned long old; 109 unsigned long old;
@@ -125,6 +143,27 @@ static __inline__ int test_and_set_bit(unsigned long nr,
125 return (old & mask) != 0; 143 return (old & mask) != 0;
126} 144}
127 145
146static __inline__ int test_and_set_bit_lock(unsigned long nr,
147 volatile unsigned long *addr)
148{
149 unsigned long old, t;
150 unsigned long mask = BITOP_MASK(nr);
151 unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
152
153 __asm__ __volatile__(
154"1:" PPC_LLARX "%0,0,%3 # test_and_set_bit_lock\n"
155 "or %1,%0,%2 \n"
156 PPC405_ERR77(0,%3)
157 PPC_STLCX "%1,0,%3 \n"
158 "bne- 1b"
159 ISYNC_ON_SMP
160 : "=&r" (old), "=&r" (t)
161 : "r" (mask), "r" (p)
162 : "cc", "memory");
163
164 return (old & mask) != 0;
165}
166
128static __inline__ int test_and_clear_bit(unsigned long nr, 167static __inline__ int test_and_clear_bit(unsigned long nr,
129 volatile unsigned long *addr) 168 volatile unsigned long *addr)
130{ 169{
@@ -185,6 +224,12 @@ static __inline__ void set_bits(unsigned long mask, unsigned long *addr)
185 224
186#include <asm-generic/bitops/non-atomic.h> 225#include <asm-generic/bitops/non-atomic.h>
187 226
227static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr)
228{
229 __asm__ __volatile__(LWSYNC_ON_SMP "" ::: "memory");
230 __clear_bit(nr, addr);
231}
232
188/* 233/*
189 * Return the zero-based bit position (LE, not IBM bit numbering) of 234 * Return the zero-based bit position (LE, not IBM bit numbering) of
190 * the most significant 1-bit in a double word. 235 * the most significant 1-bit in a double word.
@@ -266,7 +311,6 @@ static __inline__ int fls(unsigned int x)
266#include <asm-generic/bitops/fls64.h> 311#include <asm-generic/bitops/fls64.h>
267 312
268#include <asm-generic/bitops/hweight.h> 313#include <asm-generic/bitops/hweight.h>
269#include <asm-generic/bitops/lock.h>
270 314
271#define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0) 315#define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0)
272unsigned long find_next_zero_bit(const unsigned long *addr, 316unsigned long find_next_zero_bit(const unsigned long *addr,