aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorMatthew Wilcox <matthew@wil.cx>2006-12-14 11:00:25 -0500
committerKyle McMartin <kyle@athena.road.mcmartin.ca>2007-02-17 00:43:03 -0500
commitaf5917f0cd60715ed09874bb793d4f62ba692f47 (patch)
treef808220b3254089658d1c35c76096aadf989f38b /include
parentd6ce8626dbc7d277d29b62e31c24ce777c60546b (diff)
[PARISC] Only write to memory in test_and_set_bit/test_and_clear_bit if we're
going to change the bit. Signed-off-by: Matthew Wilcox <matthew@wil.cx> Signed-off-by: Kyle McMartin <kyle@parisc-linux.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-parisc/bitops.h22
1 files changed, 14 insertions, 8 deletions
diff --git a/include/asm-parisc/bitops.h b/include/asm-parisc/bitops.h
index 900561922c4c..9577342f21aa 100644
--- a/include/asm-parisc/bitops.h
+++ b/include/asm-parisc/bitops.h
@@ -60,31 +60,37 @@ static __inline__ void change_bit(int nr, volatile unsigned long * addr)
60static __inline__ int test_and_set_bit(int nr, volatile unsigned long * addr) 60static __inline__ int test_and_set_bit(int nr, volatile unsigned long * addr)
61{ 61{
62 unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr); 62 unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr);
63 unsigned long oldbit; 63 unsigned long old;
64 unsigned long flags; 64 unsigned long flags;
65 int set;
65 66
66 addr += (nr >> SHIFT_PER_LONG); 67 addr += (nr >> SHIFT_PER_LONG);
67 _atomic_spin_lock_irqsave(addr, flags); 68 _atomic_spin_lock_irqsave(addr, flags);
68 oldbit = *addr; 69 old = *addr;
69 *addr = oldbit | mask; 70 set = (old & mask) ? 1 : 0;
71 if (!set)
72 *addr = old | mask;
70 _atomic_spin_unlock_irqrestore(addr, flags); 73 _atomic_spin_unlock_irqrestore(addr, flags);
71 74
72 return (oldbit & mask) ? 1 : 0; 75 return set;
73} 76}
74 77
75static __inline__ int test_and_clear_bit(int nr, volatile unsigned long * addr) 78static __inline__ int test_and_clear_bit(int nr, volatile unsigned long * addr)
76{ 79{
77 unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr); 80 unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr);
78 unsigned long oldbit; 81 unsigned long old;
79 unsigned long flags; 82 unsigned long flags;
83 int set;
80 84
81 addr += (nr >> SHIFT_PER_LONG); 85 addr += (nr >> SHIFT_PER_LONG);
82 _atomic_spin_lock_irqsave(addr, flags); 86 _atomic_spin_lock_irqsave(addr, flags);
83 oldbit = *addr; 87 old = *addr;
84 *addr = oldbit & ~mask; 88 set = (old & mask) ? 1 : 0;
89 if (set)
90 *addr = old & ~mask;
85 _atomic_spin_unlock_irqrestore(addr, flags); 91 _atomic_spin_unlock_irqrestore(addr, flags);
86 92
87 return (oldbit & mask) ? 1 : 0; 93 return set;
88} 94}
89 95
90static __inline__ int test_and_change_bit(int nr, volatile unsigned long * addr) 96static __inline__ int test_and_change_bit(int nr, volatile unsigned long * addr)