aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc
diff options
context:
space:
mode:
authorDavid S. Miller <davem@sunset.davemloft.net>2006-12-17 19:18:47 -0500
committerDavid S. Miller <davem@sunset.davemloft.net>2006-12-17 19:18:47 -0500
commit8a8b836b91aa170a383f2f360b73d3d23160d9d7 (patch)
tree875a635f634a869b801c4efa8f145c5b7b7db8e4 /arch/sparc
parent216da721b881838d639a3987bf8a825e6b4aacdd (diff)
[SPARC]: Make bitops use same spinlocks as atomics.
Recent workqueue changes basically make this a formal requirement. Also, move atomic32.o from lib-y to obj-y since it exports symbols to modules. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc')
-rw-r--r--arch/sparc/kernel/sparc_ksyms.c8
-rw-r--r--arch/sparc/kernel/time.c5
-rw-r--r--arch/sparc/lib/Makefile4
-rw-r--r--arch/sparc/lib/atomic32.c39
-rw-r--r--arch/sparc/lib/bitops.S109
5 files changed, 42 insertions, 123 deletions
diff --git a/arch/sparc/kernel/sparc_ksyms.c b/arch/sparc/kernel/sparc_ksyms.c
index 33dadd9f2871..d8e008a04e2b 100644
--- a/arch/sparc/kernel/sparc_ksyms.c
+++ b/arch/sparc/kernel/sparc_ksyms.c
@@ -83,9 +83,6 @@ extern int __divdi3(int, int);
83/* Private functions with odd calling conventions. */ 83/* Private functions with odd calling conventions. */
84extern void ___atomic24_add(void); 84extern void ___atomic24_add(void);
85extern void ___atomic24_sub(void); 85extern void ___atomic24_sub(void);
86extern void ___set_bit(void);
87extern void ___clear_bit(void);
88extern void ___change_bit(void);
89extern void ___rw_read_enter(void); 86extern void ___rw_read_enter(void);
90extern void ___rw_read_try(void); 87extern void ___rw_read_try(void);
91extern void ___rw_read_exit(void); 88extern void ___rw_read_exit(void);
@@ -125,11 +122,6 @@ EXPORT_SYMBOL(pfn_base);
125EXPORT_SYMBOL(___atomic24_add); 122EXPORT_SYMBOL(___atomic24_add);
126EXPORT_SYMBOL(___atomic24_sub); 123EXPORT_SYMBOL(___atomic24_sub);
127 124
128/* Bit operations. */
129EXPORT_SYMBOL(___set_bit);
130EXPORT_SYMBOL(___clear_bit);
131EXPORT_SYMBOL(___change_bit);
132
133/* Per-CPU information table */ 125/* Per-CPU information table */
134EXPORT_PER_CPU_SYMBOL(__cpu_data); 126EXPORT_PER_CPU_SYMBOL(__cpu_data);
135 127
diff --git a/arch/sparc/kernel/time.c b/arch/sparc/kernel/time.c
index 6c7aa51b590f..2fcce000d877 100644
--- a/arch/sparc/kernel/time.c
+++ b/arch/sparc/kernel/time.c
@@ -78,7 +78,6 @@ unsigned long profile_pc(struct pt_regs *regs)
78 extern char __copy_user_begin[], __copy_user_end[]; 78 extern char __copy_user_begin[], __copy_user_end[];
79 extern char __atomic_begin[], __atomic_end[]; 79 extern char __atomic_begin[], __atomic_end[];
80 extern char __bzero_begin[], __bzero_end[]; 80 extern char __bzero_begin[], __bzero_end[];
81 extern char __bitops_begin[], __bitops_end[];
82 81
83 unsigned long pc = regs->pc; 82 unsigned long pc = regs->pc;
84 83
@@ -88,9 +87,7 @@ unsigned long profile_pc(struct pt_regs *regs)
88 (pc >= (unsigned long) __atomic_begin && 87 (pc >= (unsigned long) __atomic_begin &&
89 pc < (unsigned long) __atomic_end) || 88 pc < (unsigned long) __atomic_end) ||
90 (pc >= (unsigned long) __bzero_begin && 89 (pc >= (unsigned long) __bzero_begin &&
91 pc < (unsigned long) __bzero_end) || 90 pc < (unsigned long) __bzero_end))
92 (pc >= (unsigned long) __bitops_begin &&
93 pc < (unsigned long) __bitops_end))
94 pc = regs->u_regs[UREG_RETPC]; 91 pc = regs->u_regs[UREG_RETPC];
95 return pc; 92 return pc;
96} 93}
diff --git a/arch/sparc/lib/Makefile b/arch/sparc/lib/Makefile
index 5db7e1d85385..9ddc5b9ce3bd 100644
--- a/arch/sparc/lib/Makefile
+++ b/arch/sparc/lib/Makefile
@@ -7,7 +7,7 @@ EXTRA_AFLAGS := -ansi -DST_DIV0=0x02
7lib-y := mul.o rem.o sdiv.o udiv.o umul.o urem.o ashrdi3.o memcpy.o memset.o \ 7lib-y := mul.o rem.o sdiv.o udiv.o umul.o urem.o ashrdi3.o memcpy.o memset.o \
8 strlen.o checksum.o blockops.o memscan.o memcmp.o strncmp.o \ 8 strlen.o checksum.o blockops.o memscan.o memcmp.o strncmp.o \
9 strncpy_from_user.o divdi3.o udivdi3.o strlen_user.o \ 9 strncpy_from_user.o divdi3.o udivdi3.o strlen_user.o \
10 copy_user.o locks.o atomic.o atomic32.o bitops.o \ 10 copy_user.o locks.o atomic.o \
11 lshrdi3.o ashldi3.o rwsem.o muldi3.o bitext.o 11 lshrdi3.o ashldi3.o rwsem.o muldi3.o bitext.o
12 12
13obj-y += iomap.o 13obj-y += iomap.o atomic32.o
diff --git a/arch/sparc/lib/atomic32.c b/arch/sparc/lib/atomic32.c
index de84f8534bac..53ddcd9d1e60 100644
--- a/arch/sparc/lib/atomic32.c
+++ b/arch/sparc/lib/atomic32.c
@@ -76,3 +76,42 @@ void atomic_set(atomic_t *v, int i)
76 spin_unlock_irqrestore(ATOMIC_HASH(v), flags); 76 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
77} 77}
78EXPORT_SYMBOL(atomic_set); 78EXPORT_SYMBOL(atomic_set);
79
80unsigned long ___set_bit(unsigned long *addr, unsigned long mask)
81{
82 unsigned long old, flags;
83
84 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
85 old = *addr;
86 *addr = old | mask;
87 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
88
89 return old & mask;
90}
91EXPORT_SYMBOL(___set_bit);
92
93unsigned long ___clear_bit(unsigned long *addr, unsigned long mask)
94{
95 unsigned long old, flags;
96
97 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
98 old = *addr;
99 *addr = old & ~mask;
100 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
101
102 return old & mask;
103}
104EXPORT_SYMBOL(___clear_bit);
105
106unsigned long ___change_bit(unsigned long *addr, unsigned long mask)
107{
108 unsigned long old, flags;
109
110 spin_lock_irqsave(ATOMIC_HASH(addr), flags);
111 old = *addr;
112 *addr = old ^ mask;
113 spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
114
115 return old & mask;
116}
117EXPORT_SYMBOL(___change_bit);
diff --git a/arch/sparc/lib/bitops.S b/arch/sparc/lib/bitops.S
deleted file mode 100644
index cb7fb66a40c8..000000000000
--- a/arch/sparc/lib/bitops.S
+++ /dev/null
@@ -1,109 +0,0 @@
1/* bitops.S: Low level assembler bit operations.
2 *
3 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
4 */
5
6#include <asm/ptrace.h>
7#include <asm/psr.h>
8
9 .text
10 .align 4
11
12 .globl __bitops_begin
13__bitops_begin:
14
15 /* Take bits in %g2 and set them in word at %g1,
16 * return whether bits were set in original value
17 * in %g2. %g4 holds value to restore into %o7
18 * in delay slot of jmpl return, %g3 + %g5 + %g7 can be
19 * used as temporaries and thus is considered clobbered
20 * by all callers.
21 */
22 .globl ___set_bit
23___set_bit:
24 rd %psr, %g3
25 nop; nop; nop;
26 or %g3, PSR_PIL, %g5
27 wr %g5, 0x0, %psr
28 nop; nop; nop
29#ifdef CONFIG_SMP
30 set bitops_spinlock, %g5
312: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
32 orcc %g7, 0x0, %g0 ! Did we get it?
33 bne 2b ! Nope...
34#endif
35 ld [%g1], %g7
36 or %g7, %g2, %g5
37 and %g7, %g2, %g2
38#ifdef CONFIG_SMP
39 st %g5, [%g1]
40 set bitops_spinlock, %g5
41 stb %g0, [%g5]
42#else
43 st %g5, [%g1]
44#endif
45 wr %g3, 0x0, %psr
46 nop; nop; nop
47 jmpl %o7, %g0
48 mov %g4, %o7
49
50 /* Same as above, but clears the bits from %g2 instead. */
51 .globl ___clear_bit
52___clear_bit:
53 rd %psr, %g3
54 nop; nop; nop
55 or %g3, PSR_PIL, %g5
56 wr %g5, 0x0, %psr
57 nop; nop; nop
58#ifdef CONFIG_SMP
59 set bitops_spinlock, %g5
602: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
61 orcc %g7, 0x0, %g0 ! Did we get it?
62 bne 2b ! Nope...
63#endif
64 ld [%g1], %g7
65 andn %g7, %g2, %g5
66 and %g7, %g2, %g2
67#ifdef CONFIG_SMP
68 st %g5, [%g1]
69 set bitops_spinlock, %g5
70 stb %g0, [%g5]
71#else
72 st %g5, [%g1]
73#endif
74 wr %g3, 0x0, %psr
75 nop; nop; nop
76 jmpl %o7, %g0
77 mov %g4, %o7
78
79 /* Same thing again, but this time toggles the bits from %g2. */
80 .globl ___change_bit
81___change_bit:
82 rd %psr, %g3
83 nop; nop; nop
84 or %g3, PSR_PIL, %g5
85 wr %g5, 0x0, %psr
86 nop; nop; nop
87#ifdef CONFIG_SMP
88 set bitops_spinlock, %g5
892: ldstub [%g5], %g7 ! Spin on the byte lock for SMP.
90 orcc %g7, 0x0, %g0 ! Did we get it?
91 bne 2b ! Nope...
92#endif
93 ld [%g1], %g7
94 xor %g7, %g2, %g5
95 and %g7, %g2, %g2
96#ifdef CONFIG_SMP
97 st %g5, [%g1]
98 set bitops_spinlock, %g5
99 stb %g0, [%g5]
100#else
101 st %g5, [%g1]
102#endif
103 wr %g3, 0x0, %psr
104 nop; nop; nop
105 jmpl %o7, %g0
106 mov %g4, %o7
107
108 .globl __bitops_end
109__bitops_end: