aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/lib
diff options
context:
space:
mode:
authorDavid S. Miller <davem@sunset.davemloft.net>2006-12-17 19:18:47 -0500
committerDavid S. Miller <davem@sunset.davemloft.net>2006-12-17 19:18:47 -0500
commit8a8b836b91aa170a383f2f360b73d3d23160d9d7 (patch)
tree875a635f634a869b801c4efa8f145c5b7b7db8e4 /arch/sparc/lib
parent216da721b881838d639a3987bf8a825e6b4aacdd (diff)
[SPARC]: Make bitops use same spinlocks as atomics.
Recent workqueue changes basically make this a formal requirement. Also, move atomic32.o from lib-y to obj-y since it exports symbols to modules. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc/lib')
-rw-r--r--arch/sparc/lib/Makefile4
-rw-r--r--arch/sparc/lib/atomic32.c39
-rw-r--r--arch/sparc/lib/bitops.S109
3 files changed, 41 insertions, 111 deletions
diff --git a/arch/sparc/lib/Makefile b/arch/sparc/lib/Makefile
index 5db7e1d85385..9ddc5b9ce3bd 100644
--- a/arch/sparc/lib/Makefile
+++ b/arch/sparc/lib/Makefile
@@ -7,7 +7,7 @@ EXTRA_AFLAGS := -ansi -DST_DIV0=0x02
7lib-y := mul.o rem.o sdiv.o udiv.o umul.o urem.o ashrdi3.o memcpy.o memset.o \ 7lib-y := mul.o rem.o sdiv.o udiv.o umul.o urem.o ashrdi3.o memcpy.o memset.o \
8 strlen.o checksum.o blockops.o memscan.o memcmp.o strncmp.o \ 8 strlen.o checksum.o blockops.o memscan.o memcmp.o strncmp.o \
9 strncpy_from_user.o divdi3.o udivdi3.o strlen_user.o \ 9 strncpy_from_user.o divdi3.o udivdi3.o strlen_user.o \
10 copy_user.o locks.o atomic.o atomic32.o bitops.o \ 10 copy_user.o locks.o atomic.o \
11 lshrdi3.o ashldi3.o rwsem.o muldi3.o bitext.o 11 lshrdi3.o ashldi3.o rwsem.o muldi3.o bitext.o
12 12
13obj-y += iomap.o 13obj-y += iomap.o atomic32.o
diff --git a/arch/sparc/lib/atomic32.c b/arch/sparc/lib/atomic32.c
index de84f8534bac..53ddcd9d1e60 100644
--- a/arch/sparc/lib/atomic32.c
+++ b/arch/sparc/lib/atomic32.c
@@ -76,3 +76,42 @@ void atomic_set(atomic_t *v, int i)
76 spin_unlock_irqrestore(ATOMIC_HASH(v), flags); 76 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
77} 77}
78EXPORT_SYMBOL(atomic_set); 78EXPORT_SYMBOL(atomic_set);
79
80unsigned long ___set_bit(unsigned long *addr, unsigned long mask)
81{
82 unsigned long old, flags;
83
84 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
85 old = *addr;
86 *addr = old | mask;
87 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
88
89 return old & mask;
90}
91EXPORT_SYMBOL(___set_bit);
92
93unsigned long ___clear_bit(unsigned long *addr, unsigned long mask)
94{
95 unsigned long old, flags;
96
97 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
98 old = *addr;
99 *addr = old & ~mask;
100 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
101
102 return old & mask;
103}
104EXPORT_SYMBOL(___clear_bit);
105
106unsigned long ___change_bit(unsigned long *addr, unsigned long mask)
107{
108 unsigned long old, flags;
109
110 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
111 old = *addr;
112 *addr = old ^ mask;
113 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
114
115 return old & mask;
116}
117EXPORT_SYMBOL(___change_bit);
diff --git a/arch/sparc/lib/bitops.S b/arch/sparc/lib/bitops.S
deleted file mode 100644
index cb7fb66a40c8..000000000000
--- a/arch/sparc/lib/bitops.S
+++ /dev/null
@@ -1,109 +0,0 @@
1/* bitops.S: Low level assembler bit operations.
2 *
3 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
4 */
5
6#include <asm/ptrace.h>
7#include <asm/psr.h>
8
9 .text
10 .align 4
11
12 .globl __bitops_begin
13__bitops_begin:
14
15 /* Take bits in %g2 and set them in word at %g1,
16 * return whether bits were set in original value
17 * in %g2. %g4 holds value to restore into %o7
18 * in delay slot of jmpl return, %g3 + %g5 + %g7 can be
19 * used as temporaries and thus is considered clobbered
20 * by all callers.
21 */
22 .globl ___set_bit
23___set_bit:
24 rd %psr, %g3
25 nop; nop; nop;
26 or %g3, PSR_PIL, %g5
27 wr %g5, 0x0, %psr
28 nop; nop; nop
29#ifdef CONFIG_SMP
30 set bitops_spinlock, %g5
312: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
32 orcc %g7, 0x0, %g0 ! Did we get it?
33 bne 2b ! Nope...
34#endif
35 ld [%g1], %g7
36 or %g7, %g2, %g5
37 and %g7, %g2, %g2
38#ifdef CONFIG_SMP
39 st %g5, [%g1]
40 set bitops_spinlock, %g5
41 stb %g0, [%g5]
42#else
43 st %g5, [%g1]
44#endif
45 wr %g3, 0x0, %psr
46 nop; nop; nop
47 jmpl %o7, %g0
48 mov %g4, %o7
49
50 /* Same as above, but clears the bits from %g2 instead. */
51 .globl ___clear_bit
52___clear_bit:
53 rd %psr, %g3
54 nop; nop; nop
55 or %g3, PSR_PIL, %g5
56 wr %g5, 0x0, %psr
57 nop; nop; nop
58#ifdef CONFIG_SMP
59 set bitops_spinlock, %g5
602: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
61 orcc %g7, 0x0, %g0 ! Did we get it?
62 bne 2b ! Nope...
63#endif
64 ld [%g1], %g7
65 andn %g7, %g2, %g5
66 and %g7, %g2, %g2
67#ifdef CONFIG_SMP
68 st %g5, [%g1]
69 set bitops_spinlock, %g5
70 stb %g0, [%g5]
71#else
72 st %g5, [%g1]
73#endif
74 wr %g3, 0x0, %psr
75 nop; nop; nop
76 jmpl %o7, %g0
77 mov %g4, %o7
78
79 /* Same thing again, but this time toggles the bits from %g2. */
80 .globl ___change_bit
81___change_bit:
82 rd %psr, %g3
83 nop; nop; nop
84 or %g3, PSR_PIL, %g5
85 wr %g5, 0x0, %psr
86 nop; nop; nop
87#ifdef CONFIG_SMP
88 set bitops_spinlock, %g5
892: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
90 orcc %g7, 0x0, %g0 ! Did we get it?
91 bne 2b ! Nope...
92#endif
93 ld [%g1], %g7
94 xor %g7, %g2, %g5
95 and %g7, %g2, %g2
96#ifdef CONFIG_SMP
97 st %g5, [%g1]
98 set bitops_spinlock, %g5
99 stb %g0, [%g5]
100#else
101 st %g5, [%g1]
102#endif
103 wr %g3, 0x0, %psr
104 nop; nop; nop
105 jmpl %o7, %g0
106 mov %g4, %o7
107
108 .globl __bitops_end
109__bitops_end: