diff options
author | David S. Miller <davem@sunset.davemloft.net> | 2006-12-17 19:18:47 -0500 |
---|---|---|
committer | David S. Miller <davem@sunset.davemloft.net> | 2006-12-17 19:18:47 -0500 |
commit | 8a8b836b91aa170a383f2f360b73d3d23160d9d7 (patch) | |
tree | 875a635f634a869b801c4efa8f145c5b7b7db8e4 /include | |
parent | 216da721b881838d639a3987bf8a825e6b4aacdd (diff) |
[SPARC]: Make bitops use same spinlocks as atomics.
Recent workqueue changes basically make this a formal requirement.
Also, move atomic32.o from lib-y to obj-y since it exports symbols
to modules.
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-sparc/bitops.h | 100 |
1 files changed, 16 insertions, 84 deletions
diff --git a/include/asm-sparc/bitops.h b/include/asm-sparc/bitops.h index 04aa3318f76a..329e696e7751 100644 --- a/include/asm-sparc/bitops.h +++ b/include/asm-sparc/bitops.h | |||
@@ -14,6 +14,10 @@ | |||
14 | 14 | ||
15 | #ifdef __KERNEL__ | 15 | #ifdef __KERNEL__ |
16 | 16 | ||
17 | extern unsigned long ___set_bit(unsigned long *addr, unsigned long mask); | ||
18 | extern unsigned long ___clear_bit(unsigned long *addr, unsigned long mask); | ||
19 | extern unsigned long ___change_bit(unsigned long *addr, unsigned long mask); | ||
20 | |||
17 | /* | 21 | /* |
18 | * Set bit 'nr' in 32-bit quantity at address 'addr' where bit '0' | 22 | * Set bit 'nr' in 32-bit quantity at address 'addr' where bit '0' |
19 | * is in the highest of the four bytes and bit '31' is the high bit | 23 | * is in the highest of the four bytes and bit '31' is the high bit |
@@ -22,134 +26,62 @@ | |||
22 | */ | 26 | */ |
23 | static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *addr) | 27 | static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *addr) |
24 | { | 28 | { |
25 | register unsigned long mask asm("g2"); | 29 | unsigned long *ADDR, mask; |
26 | register unsigned long *ADDR asm("g1"); | ||
27 | register int tmp1 asm("g3"); | ||
28 | register int tmp2 asm("g4"); | ||
29 | register int tmp3 asm("g5"); | ||
30 | register int tmp4 asm("g7"); | ||
31 | 30 | ||
32 | ADDR = ((unsigned long *) addr) + (nr >> 5); | 31 | ADDR = ((unsigned long *) addr) + (nr >> 5); |
33 | mask = 1 << (nr & 31); | 32 | mask = 1 << (nr & 31); |
34 | 33 | ||
35 | __asm__ __volatile__( | 34 | return ___set_bit(ADDR, mask) != 0; |
36 | "mov %%o7, %%g4\n\t" | ||
37 | "call ___set_bit\n\t" | ||
38 | " add %%o7, 8, %%o7\n" | ||
39 | : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4) | ||
40 | : "0" (mask), "r" (ADDR) | ||
41 | : "memory", "cc"); | ||
42 | |||
43 | return mask != 0; | ||
44 | } | 35 | } |
45 | 36 | ||
46 | static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | 37 | static inline void set_bit(unsigned long nr, volatile unsigned long *addr) |
47 | { | 38 | { |
48 | register unsigned long mask asm("g2"); | 39 | unsigned long *ADDR, mask; |
49 | register unsigned long *ADDR asm("g1"); | ||
50 | register int tmp1 asm("g3"); | ||
51 | register int tmp2 asm("g4"); | ||
52 | register int tmp3 asm("g5"); | ||
53 | register int tmp4 asm("g7"); | ||
54 | 40 | ||
55 | ADDR = ((unsigned long *) addr) + (nr >> 5); | 41 | ADDR = ((unsigned long *) addr) + (nr >> 5); |
56 | mask = 1 << (nr & 31); | 42 | mask = 1 << (nr & 31); |
57 | 43 | ||
58 | __asm__ __volatile__( | 44 | (void) ___set_bit(ADDR, mask); |
59 | "mov %%o7, %%g4\n\t" | ||
60 | "call ___set_bit\n\t" | ||
61 | " add %%o7, 8, %%o7\n" | ||
62 | : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4) | ||
63 | : "0" (mask), "r" (ADDR) | ||
64 | : "memory", "cc"); | ||
65 | } | 45 | } |
66 | 46 | ||
67 | static inline int test_and_clear_bit(unsigned long nr, volatile unsigned long *addr) | 47 | static inline int test_and_clear_bit(unsigned long nr, volatile unsigned long *addr) |
68 | { | 48 | { |
69 | register unsigned long mask asm("g2"); | 49 | unsigned long *ADDR, mask; |
70 | register unsigned long *ADDR asm("g1"); | ||
71 | register int tmp1 asm("g3"); | ||
72 | register int tmp2 asm("g4"); | ||
73 | register int tmp3 asm("g5"); | ||
74 | register int tmp4 asm("g7"); | ||
75 | 50 | ||
76 | ADDR = ((unsigned long *) addr) + (nr >> 5); | 51 | ADDR = ((unsigned long *) addr) + (nr >> 5); |
77 | mask = 1 << (nr & 31); | 52 | mask = 1 << (nr & 31); |
78 | 53 | ||
79 | __asm__ __volatile__( | 54 | return ___clear_bit(ADDR, mask) != 0; |
80 | "mov %%o7, %%g4\n\t" | ||
81 | "call ___clear_bit\n\t" | ||
82 | " add %%o7, 8, %%o7\n" | ||
83 | : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4) | ||
84 | : "0" (mask), "r" (ADDR) | ||
85 | : "memory", "cc"); | ||
86 | |||
87 | return mask != 0; | ||
88 | } | 55 | } |
89 | 56 | ||
90 | static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | 57 | static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) |
91 | { | 58 | { |
92 | register unsigned long mask asm("g2"); | 59 | unsigned long *ADDR, mask; |
93 | register unsigned long *ADDR asm("g1"); | ||
94 | register int tmp1 asm("g3"); | ||
95 | register int tmp2 asm("g4"); | ||
96 | register int tmp3 asm("g5"); | ||
97 | register int tmp4 asm("g7"); | ||
98 | 60 | ||
99 | ADDR = ((unsigned long *) addr) + (nr >> 5); | 61 | ADDR = ((unsigned long *) addr) + (nr >> 5); |
100 | mask = 1 << (nr & 31); | 62 | mask = 1 << (nr & 31); |
101 | 63 | ||
102 | __asm__ __volatile__( | 64 | (void) ___clear_bit(ADDR, mask); |
103 | "mov %%o7, %%g4\n\t" | ||
104 | "call ___clear_bit\n\t" | ||
105 | " add %%o7, 8, %%o7\n" | ||
106 | : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4) | ||
107 | : "0" (mask), "r" (ADDR) | ||
108 | : "memory", "cc"); | ||
109 | } | 65 | } |
110 | 66 | ||
111 | static inline int test_and_change_bit(unsigned long nr, volatile unsigned long *addr) | 67 | static inline int test_and_change_bit(unsigned long nr, volatile unsigned long *addr) |
112 | { | 68 | { |
113 | register unsigned long mask asm("g2"); | 69 | unsigned long *ADDR, mask; |
114 | register unsigned long *ADDR asm("g1"); | ||
115 | register int tmp1 asm("g3"); | ||
116 | register int tmp2 asm("g4"); | ||
117 | register int tmp3 asm("g5"); | ||
118 | register int tmp4 asm("g7"); | ||
119 | 70 | ||
120 | ADDR = ((unsigned long *) addr) + (nr >> 5); | 71 | ADDR = ((unsigned long *) addr) + (nr >> 5); |
121 | mask = 1 << (nr & 31); | 72 | mask = 1 << (nr & 31); |
122 | 73 | ||
123 | __asm__ __volatile__( | 74 | return ___change_bit(ADDR, mask) != 0; |
124 | "mov %%o7, %%g4\n\t" | ||
125 | "call ___change_bit\n\t" | ||
126 | " add %%o7, 8, %%o7\n" | ||
127 | : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4) | ||
128 | : "0" (mask), "r" (ADDR) | ||
129 | : "memory", "cc"); | ||
130 | |||
131 | return mask != 0; | ||
132 | } | 75 | } |
133 | 76 | ||
134 | static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | 77 | static inline void change_bit(unsigned long nr, volatile unsigned long *addr) |
135 | { | 78 | { |
136 | register unsigned long mask asm("g2"); | 79 | unsigned long *ADDR, mask; |
137 | register unsigned long *ADDR asm("g1"); | ||
138 | register int tmp1 asm("g3"); | ||
139 | register int tmp2 asm("g4"); | ||
140 | register int tmp3 asm("g5"); | ||
141 | register int tmp4 asm("g7"); | ||
142 | 80 | ||
143 | ADDR = ((unsigned long *) addr) + (nr >> 5); | 81 | ADDR = ((unsigned long *) addr) + (nr >> 5); |
144 | mask = 1 << (nr & 31); | 82 | mask = 1 << (nr & 31); |
145 | 83 | ||
146 | __asm__ __volatile__( | 84 | (void) ___change_bit(ADDR, mask); |
147 | "mov %%o7, %%g4\n\t" | ||
148 | "call ___change_bit\n\t" | ||
149 | " add %%o7, 8, %%o7\n" | ||
150 | : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4) | ||
151 | : "0" (mask), "r" (ADDR) | ||
152 | : "memory", "cc"); | ||
153 | } | 85 | } |
154 | 86 | ||
155 | #include <asm-generic/bitops/non-atomic.h> | 87 | #include <asm-generic/bitops/non-atomic.h> |