aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips
diff options
context:
space:
mode:
Diffstat (limited to 'arch/mips')
-rw-r--r--arch/mips/include/asm/bitops.h14
1 files changed, 7 insertions, 7 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 82ad35ce2b45..455664cf5352 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -57,7 +57,7 @@
57static inline void set_bit(unsigned long nr, volatile unsigned long *addr) 57static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
58{ 58{
59 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 59 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
60 unsigned short bit = nr & SZLONG_MASK; 60 int bit = nr & SZLONG_MASK;
61 unsigned long temp; 61 unsigned long temp;
62 62
63 if (kernel_uses_llsc && R10000_LLSC_WAR) { 63 if (kernel_uses_llsc && R10000_LLSC_WAR) {
@@ -118,7 +118,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
118static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) 118static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
119{ 119{
120 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 120 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
121 unsigned short bit = nr & SZLONG_MASK; 121 int bit = nr & SZLONG_MASK;
122 unsigned long temp; 122 unsigned long temp;
123 123
124 if (kernel_uses_llsc && R10000_LLSC_WAR) { 124 if (kernel_uses_llsc && R10000_LLSC_WAR) {
@@ -191,7 +191,7 @@ static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *ad
191 */ 191 */
192static inline void change_bit(unsigned long nr, volatile unsigned long *addr) 192static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
193{ 193{
194 unsigned short bit = nr & SZLONG_MASK; 194 int bit = nr & SZLONG_MASK;
195 195
196 if (kernel_uses_llsc && R10000_LLSC_WAR) { 196 if (kernel_uses_llsc && R10000_LLSC_WAR) {
197 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 197 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -244,7 +244,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
244static inline int test_and_set_bit(unsigned long nr, 244static inline int test_and_set_bit(unsigned long nr,
245 volatile unsigned long *addr) 245 volatile unsigned long *addr)
246{ 246{
247 unsigned short bit = nr & SZLONG_MASK; 247 int bit = nr & SZLONG_MASK;
248 unsigned long res; 248 unsigned long res;
249 249
250 smp_mb__before_llsc(); 250 smp_mb__before_llsc();
@@ -310,7 +310,7 @@ static inline int test_and_set_bit(unsigned long nr,
310static inline int test_and_set_bit_lock(unsigned long nr, 310static inline int test_and_set_bit_lock(unsigned long nr,
311 volatile unsigned long *addr) 311 volatile unsigned long *addr)
312{ 312{
313 unsigned short bit = nr & SZLONG_MASK; 313 int bit = nr & SZLONG_MASK;
314 unsigned long res; 314 unsigned long res;
315 315
316 if (kernel_uses_llsc && R10000_LLSC_WAR) { 316 if (kernel_uses_llsc && R10000_LLSC_WAR) {
@@ -373,7 +373,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
373static inline int test_and_clear_bit(unsigned long nr, 373static inline int test_and_clear_bit(unsigned long nr,
374 volatile unsigned long *addr) 374 volatile unsigned long *addr)
375{ 375{
376 unsigned short bit = nr & SZLONG_MASK; 376 int bit = nr & SZLONG_MASK;
377 unsigned long res; 377 unsigned long res;
378 378
379 smp_mb__before_llsc(); 379 smp_mb__before_llsc();
@@ -457,7 +457,7 @@ static inline int test_and_clear_bit(unsigned long nr,
457static inline int test_and_change_bit(unsigned long nr, 457static inline int test_and_change_bit(unsigned long nr,
458 volatile unsigned long *addr) 458 volatile unsigned long *addr)
459{ 459{
460 unsigned short bit = nr & SZLONG_MASK; 460 int bit = nr & SZLONG_MASK;
461 unsigned long res; 461 unsigned long res;
462 462
463 smp_mb__before_llsc(); 463 smp_mb__before_llsc();