aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc64/lib
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2008-11-15 16:33:25 -0500
committerDavid S. Miller <davem@davemloft.net>2008-12-04 12:16:47 -0500
commit293666b7a17cb7a389fc274980439212386a19c4 (patch)
tree075cc7661d2113cf04da7130b3383979d8024206 /arch/sparc64/lib
parent64f2dde3f743c8a1ad8c0a1aa74166c1034afd92 (diff)
sparc64: Stop using memory barriers for atomics and locks.
The kernel always executes in the TSO memory model now, so none of this stuff is necessary any more. With helpful feedback from Nick Piggin. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc64/lib')
-rw-r--r--arch/sparc64/lib/atomic.S26
-rw-r--r--arch/sparc64/lib/bitops.S24
-rw-r--r--arch/sparc64/lib/rwsem.S7
3 files changed, 0 insertions, 57 deletions
diff --git a/arch/sparc64/lib/atomic.S b/arch/sparc64/lib/atomic.S
index 70ac4186f62..0268210ca16 100644
--- a/arch/sparc64/lib/atomic.S
+++ b/arch/sparc64/lib/atomic.S
@@ -43,29 +43,10 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
432: BACKOFF_SPIN(%o2, %o3, 1b) 432: BACKOFF_SPIN(%o2, %o3, 1b)
44 .size atomic_sub, .-atomic_sub 44 .size atomic_sub, .-atomic_sub
45 45
46 /* On SMP we need to use memory barriers to ensure
47 * correct memory operation ordering, nop these out
48 * for uniprocessor.
49 */
50#ifdef CONFIG_SMP
51
52#define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad;
53#define ATOMIC_POST_BARRIER \
54 ba,pt %xcc, 80b; \
55 membar #StoreLoad | #StoreStore
56
5780: retl
58 nop
59#else
60#define ATOMIC_PRE_BARRIER
61#define ATOMIC_POST_BARRIER
62#endif
63
64 .globl atomic_add_ret 46 .globl atomic_add_ret
65 .type atomic_add_ret,#function 47 .type atomic_add_ret,#function
66atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ 48atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
67 BACKOFF_SETUP(%o2) 49 BACKOFF_SETUP(%o2)
68 ATOMIC_PRE_BARRIER
691: lduw [%o1], %g1 501: lduw [%o1], %g1
70 add %g1, %o0, %g7 51 add %g1, %o0, %g7
71 cas [%o1], %g1, %g7 52 cas [%o1], %g1, %g7
@@ -73,7 +54,6 @@ atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
73 bne,pn %icc, 2f 54 bne,pn %icc, 2f
74 add %g7, %o0, %g7 55 add %g7, %o0, %g7
75 sra %g7, 0, %o0 56 sra %g7, 0, %o0
76 ATOMIC_POST_BARRIER
77 retl 57 retl
78 nop 58 nop
792: BACKOFF_SPIN(%o2, %o3, 1b) 592: BACKOFF_SPIN(%o2, %o3, 1b)
@@ -83,7 +63,6 @@ atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
83 .type atomic_sub_ret,#function 63 .type atomic_sub_ret,#function
84atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ 64atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
85 BACKOFF_SETUP(%o2) 65 BACKOFF_SETUP(%o2)
86 ATOMIC_PRE_BARRIER
871: lduw [%o1], %g1 661: lduw [%o1], %g1
88 sub %g1, %o0, %g7 67 sub %g1, %o0, %g7
89 cas [%o1], %g1, %g7 68 cas [%o1], %g1, %g7
@@ -91,7 +70,6 @@ atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
91 bne,pn %icc, 2f 70 bne,pn %icc, 2f
92 sub %g7, %o0, %g7 71 sub %g7, %o0, %g7
93 sra %g7, 0, %o0 72 sra %g7, 0, %o0
94 ATOMIC_POST_BARRIER
95 retl 73 retl
96 nop 74 nop
972: BACKOFF_SPIN(%o2, %o3, 1b) 752: BACKOFF_SPIN(%o2, %o3, 1b)
@@ -131,7 +109,6 @@ atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
131 .type atomic64_add_ret,#function 109 .type atomic64_add_ret,#function
132atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ 110atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
133 BACKOFF_SETUP(%o2) 111 BACKOFF_SETUP(%o2)
134 ATOMIC_PRE_BARRIER
1351: ldx [%o1], %g1 1121: ldx [%o1], %g1
136 add %g1, %o0, %g7 113 add %g1, %o0, %g7
137 casx [%o1], %g1, %g7 114 casx [%o1], %g1, %g7
@@ -139,7 +116,6 @@ atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
139 bne,pn %xcc, 2f 116 bne,pn %xcc, 2f
140 add %g7, %o0, %g7 117 add %g7, %o0, %g7
141 mov %g7, %o0 118 mov %g7, %o0
142 ATOMIC_POST_BARRIER
143 retl 119 retl
144 nop 120 nop
1452: BACKOFF_SPIN(%o2, %o3, 1b) 1212: BACKOFF_SPIN(%o2, %o3, 1b)
@@ -149,7 +125,6 @@ atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
149 .type atomic64_sub_ret,#function 125 .type atomic64_sub_ret,#function
150atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ 126atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
151 BACKOFF_SETUP(%o2) 127 BACKOFF_SETUP(%o2)
152 ATOMIC_PRE_BARRIER
1531: ldx [%o1], %g1 1281: ldx [%o1], %g1
154 sub %g1, %o0, %g7 129 sub %g1, %o0, %g7
155 casx [%o1], %g1, %g7 130 casx [%o1], %g1, %g7
@@ -157,7 +132,6 @@ atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
157 bne,pn %xcc, 2f 132 bne,pn %xcc, 2f
158 sub %g7, %o0, %g7 133 sub %g7, %o0, %g7
159 mov %g7, %o0 134 mov %g7, %o0
160 ATOMIC_POST_BARRIER
161 retl 135 retl
162 nop 136 nop
1632: BACKOFF_SPIN(%o2, %o3, 1b) 1372: BACKOFF_SPIN(%o2, %o3, 1b)
diff --git a/arch/sparc64/lib/bitops.S b/arch/sparc64/lib/bitops.S
index 6b015a6eefb..2b7228cb8c2 100644
--- a/arch/sparc64/lib/bitops.S
+++ b/arch/sparc64/lib/bitops.S
@@ -8,29 +8,10 @@
8 8
9 .text 9 .text
10 10
11 /* On SMP we need to use memory barriers to ensure
12 * correct memory operation ordering, nop these out
13 * for uniprocessor.
14 */
15
16#ifdef CONFIG_SMP
17#define BITOP_PRE_BARRIER membar #StoreLoad | #LoadLoad
18#define BITOP_POST_BARRIER \
19 ba,pt %xcc, 80b; \
20 membar #StoreLoad | #StoreStore
21
2280: retl
23 nop
24#else
25#define BITOP_PRE_BARRIER
26#define BITOP_POST_BARRIER
27#endif
28
29 .globl test_and_set_bit 11 .globl test_and_set_bit
30 .type test_and_set_bit,#function 12 .type test_and_set_bit,#function
31test_and_set_bit: /* %o0=nr, %o1=addr */ 13test_and_set_bit: /* %o0=nr, %o1=addr */
32 BACKOFF_SETUP(%o3) 14 BACKOFF_SETUP(%o3)
33 BITOP_PRE_BARRIER
34 srlx %o0, 6, %g1 15 srlx %o0, 6, %g1
35 mov 1, %o2 16 mov 1, %o2
36 sllx %g1, 3, %g3 17 sllx %g1, 3, %g3
@@ -45,7 +26,6 @@ test_and_set_bit: /* %o0=nr, %o1=addr */
45 and %g7, %o2, %g2 26 and %g7, %o2, %g2
46 clr %o0 27 clr %o0
47 movrne %g2, 1, %o0 28 movrne %g2, 1, %o0
48 BITOP_POST_BARRIER
49 retl 29 retl
50 nop 30 nop
512: BACKOFF_SPIN(%o3, %o4, 1b) 312: BACKOFF_SPIN(%o3, %o4, 1b)
@@ -55,7 +35,6 @@ test_and_set_bit: /* %o0=nr, %o1=addr */
55 .type test_and_clear_bit,#function 35 .type test_and_clear_bit,#function
56test_and_clear_bit: /* %o0=nr, %o1=addr */ 36test_and_clear_bit: /* %o0=nr, %o1=addr */
57 BACKOFF_SETUP(%o3) 37 BACKOFF_SETUP(%o3)
58 BITOP_PRE_BARRIER
59 srlx %o0, 6, %g1 38 srlx %o0, 6, %g1
60 mov 1, %o2 39 mov 1, %o2
61 sllx %g1, 3, %g3 40 sllx %g1, 3, %g3
@@ -70,7 +49,6 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */
70 and %g7, %o2, %g2 49 and %g7, %o2, %g2
71 clr %o0 50 clr %o0
72 movrne %g2, 1, %o0 51 movrne %g2, 1, %o0
73 BITOP_POST_BARRIER
74 retl 52 retl
75 nop 53 nop
762: BACKOFF_SPIN(%o3, %o4, 1b) 542: BACKOFF_SPIN(%o3, %o4, 1b)
@@ -80,7 +58,6 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */
80 .type test_and_change_bit,#function 58 .type test_and_change_bit,#function
81test_and_change_bit: /* %o0=nr, %o1=addr */ 59test_and_change_bit: /* %o0=nr, %o1=addr */
82 BACKOFF_SETUP(%o3) 60 BACKOFF_SETUP(%o3)
83 BITOP_PRE_BARRIER
84 srlx %o0, 6, %g1 61 srlx %o0, 6, %g1
85 mov 1, %o2 62 mov 1, %o2
86 sllx %g1, 3, %g3 63 sllx %g1, 3, %g3
@@ -95,7 +72,6 @@ test_and_change_bit: /* %o0=nr, %o1=addr */
95 and %g7, %o2, %g2 72 and %g7, %o2, %g2
96 clr %o0 73 clr %o0
97 movrne %g2, 1, %o0 74 movrne %g2, 1, %o0
98 BITOP_POST_BARRIER
99 retl 75 retl
100 nop 76 nop
1012: BACKOFF_SPIN(%o3, %o4, 1b) 772: BACKOFF_SPIN(%o3, %o4, 1b)
diff --git a/arch/sparc64/lib/rwsem.S b/arch/sparc64/lib/rwsem.S
index 1a4cc5654de..91a7d29a79d 100644
--- a/arch/sparc64/lib/rwsem.S
+++ b/arch/sparc64/lib/rwsem.S
@@ -17,7 +17,6 @@ __down_read:
17 bne,pn %icc, 1b 17 bne,pn %icc, 1b
18 add %g7, 1, %g7 18 add %g7, 1, %g7
19 cmp %g7, 0 19 cmp %g7, 0
20 membar #StoreLoad | #StoreStore
21 bl,pn %icc, 3f 20 bl,pn %icc, 3f
22 nop 21 nop
232: 222:
@@ -42,7 +41,6 @@ __down_read_trylock:
42 cmp %g1, %g7 41 cmp %g1, %g7
43 bne,pn %icc, 1b 42 bne,pn %icc, 1b
44 mov 1, %o1 43 mov 1, %o1
45 membar #StoreLoad | #StoreStore
462: retl 442: retl
47 mov %o1, %o0 45 mov %o1, %o0
48 .size __down_read_trylock, .-__down_read_trylock 46 .size __down_read_trylock, .-__down_read_trylock
@@ -58,7 +56,6 @@ __down_write:
58 cmp %g3, %g7 56 cmp %g3, %g7
59 bne,pn %icc, 1b 57 bne,pn %icc, 1b
60 cmp %g7, 0 58 cmp %g7, 0
61 membar #StoreLoad | #StoreStore
62 bne,pn %icc, 3f 59 bne,pn %icc, 3f
63 nop 60 nop
642: retl 612: retl
@@ -85,7 +82,6 @@ __down_write_trylock:
85 cmp %g3, %g7 82 cmp %g3, %g7
86 bne,pn %icc, 1b 83 bne,pn %icc, 1b
87 mov 1, %o1 84 mov 1, %o1
88 membar #StoreLoad | #StoreStore
892: retl 852: retl
90 mov %o1, %o0 86 mov %o1, %o0
91 .size __down_write_trylock, .-__down_write_trylock 87 .size __down_write_trylock, .-__down_write_trylock
@@ -99,7 +95,6 @@ __up_read:
99 cmp %g1, %g7 95 cmp %g1, %g7
100 bne,pn %icc, 1b 96 bne,pn %icc, 1b
101 cmp %g7, 0 97 cmp %g7, 0
102 membar #StoreLoad | #StoreStore
103 bl,pn %icc, 3f 98 bl,pn %icc, 3f
104 nop 99 nop
1052: retl 1002: retl
@@ -129,7 +124,6 @@ __up_write:
129 bne,pn %icc, 1b 124 bne,pn %icc, 1b
130 sub %g7, %g1, %g7 125 sub %g7, %g1, %g7
131 cmp %g7, 0 126 cmp %g7, 0
132 membar #StoreLoad | #StoreStore
133 bl,pn %icc, 3f 127 bl,pn %icc, 3f
134 nop 128 nop
1352: 1292:
@@ -155,7 +149,6 @@ __downgrade_write:
155 bne,pn %icc, 1b 149 bne,pn %icc, 1b
156 sub %g7, %g1, %g7 150 sub %g7, %g1, %g7
157 cmp %g7, 0 151 cmp %g7, 0
158 membar #StoreLoad | #StoreStore
159 bl,pn %icc, 3f 152 bl,pn %icc, 3f
160 nop 153 nop
1612: 1542: