aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc64
diff options
context:
space:
mode:
authorDavid S. Miller <davem@sunset.davemloft.net>2007-10-15 19:41:44 -0400
committerDavid S. Miller <davem@sunset.davemloft.net>2007-10-17 19:24:55 -0400
commit24f287e412ae90de8d281543c8b1043b6ed6c019 (patch)
treeeb69803d187d35fd9e90c1428952c0ed5a0970c1 /arch/sparc64
parentd85714d81cc0408daddb68c10f7fd69eafe7c213 (diff)
[SPARC64]: Implement atomic backoff.
When the cpu count is high and contention hits an atomic object, the processors can synchronize such that some cpus continually get knocked out and cannot complete the atomic update. So implement an exponential backoff when SMP. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc64')
-rw-r--r--arch/sparc64/lib/atomic.S38
-rw-r--r--arch/sparc64/lib/bitops.S30
2 files changed, 48 insertions, 20 deletions
diff --git a/arch/sparc64/lib/atomic.S b/arch/sparc64/lib/atomic.S
index 9633750167d0..70ac4186f62b 100644
--- a/arch/sparc64/lib/atomic.S
+++ b/arch/sparc64/lib/atomic.S
@@ -1,10 +1,10 @@
1/* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $ 1/* atomic.S: These things are too big to do inline.
2 * atomic.S: These things are too big to do inline.
3 * 2 *
4 * Copyright (C) 1999 David S. Miller (davem@redhat.com) 3 * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
5 */ 4 */
6 5
7#include <asm/asi.h> 6#include <asm/asi.h>
7#include <asm/backoff.h>
8 8
9 .text 9 .text
10 10
@@ -16,27 +16,31 @@
16 .globl atomic_add 16 .globl atomic_add
17 .type atomic_add,#function 17 .type atomic_add,#function
18atomic_add: /* %o0 = increment, %o1 = atomic_ptr */ 18atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
19 BACKOFF_SETUP(%o2)
191: lduw [%o1], %g1 201: lduw [%o1], %g1
20 add %g1, %o0, %g7 21 add %g1, %o0, %g7
21 cas [%o1], %g1, %g7 22 cas [%o1], %g1, %g7
22 cmp %g1, %g7 23 cmp %g1, %g7
23 bne,pn %icc, 1b 24 bne,pn %icc, 2f
24 nop 25 nop
25 retl 26 retl
26 nop 27 nop
282: BACKOFF_SPIN(%o2, %o3, 1b)
27 .size atomic_add, .-atomic_add 29 .size atomic_add, .-atomic_add
28 30
29 .globl atomic_sub 31 .globl atomic_sub
30 .type atomic_sub,#function 32 .type atomic_sub,#function
31atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ 33atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
34 BACKOFF_SETUP(%o2)
321: lduw [%o1], %g1 351: lduw [%o1], %g1
33 sub %g1, %o0, %g7 36 sub %g1, %o0, %g7
34 cas [%o1], %g1, %g7 37 cas [%o1], %g1, %g7
35 cmp %g1, %g7 38 cmp %g1, %g7
36 bne,pn %icc, 1b 39 bne,pn %icc, 2f
37 nop 40 nop
38 retl 41 retl
39 nop 42 nop
432: BACKOFF_SPIN(%o2, %o3, 1b)
40 .size atomic_sub, .-atomic_sub 44 .size atomic_sub, .-atomic_sub
41 45
42 /* On SMP we need to use memory barriers to ensure 46 /* On SMP we need to use memory barriers to ensure
@@ -60,89 +64,101 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
60 .globl atomic_add_ret 64 .globl atomic_add_ret
61 .type atomic_add_ret,#function 65 .type atomic_add_ret,#function
62atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ 66atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
67 BACKOFF_SETUP(%o2)
63 ATOMIC_PRE_BARRIER 68 ATOMIC_PRE_BARRIER
641: lduw [%o1], %g1 691: lduw [%o1], %g1
65 add %g1, %o0, %g7 70 add %g1, %o0, %g7
66 cas [%o1], %g1, %g7 71 cas [%o1], %g1, %g7
67 cmp %g1, %g7 72 cmp %g1, %g7
68 bne,pn %icc, 1b 73 bne,pn %icc, 2f
69 add %g7, %o0, %g7 74 add %g7, %o0, %g7
70 sra %g7, 0, %o0 75 sra %g7, 0, %o0
71 ATOMIC_POST_BARRIER 76 ATOMIC_POST_BARRIER
72 retl 77 retl
73 nop 78 nop
792: BACKOFF_SPIN(%o2, %o3, 1b)
74 .size atomic_add_ret, .-atomic_add_ret 80 .size atomic_add_ret, .-atomic_add_ret
75 81
76 .globl atomic_sub_ret 82 .globl atomic_sub_ret
77 .type atomic_sub_ret,#function 83 .type atomic_sub_ret,#function
78atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ 84atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
85 BACKOFF_SETUP(%o2)
79 ATOMIC_PRE_BARRIER 86 ATOMIC_PRE_BARRIER
801: lduw [%o1], %g1 871: lduw [%o1], %g1
81 sub %g1, %o0, %g7 88 sub %g1, %o0, %g7
82 cas [%o1], %g1, %g7 89 cas [%o1], %g1, %g7
83 cmp %g1, %g7 90 cmp %g1, %g7
84 bne,pn %icc, 1b 91 bne,pn %icc, 2f
85 sub %g7, %o0, %g7 92 sub %g7, %o0, %g7
86 sra %g7, 0, %o0 93 sra %g7, 0, %o0
87 ATOMIC_POST_BARRIER 94 ATOMIC_POST_BARRIER
88 retl 95 retl
89 nop 96 nop
972: BACKOFF_SPIN(%o2, %o3, 1b)
90 .size atomic_sub_ret, .-atomic_sub_ret 98 .size atomic_sub_ret, .-atomic_sub_ret
91 99
92 .globl atomic64_add 100 .globl atomic64_add
93 .type atomic64_add,#function 101 .type atomic64_add,#function
94atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */ 102atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
103 BACKOFF_SETUP(%o2)
951: ldx [%o1], %g1 1041: ldx [%o1], %g1
96 add %g1, %o0, %g7 105 add %g1, %o0, %g7
97 casx [%o1], %g1, %g7 106 casx [%o1], %g1, %g7
98 cmp %g1, %g7 107 cmp %g1, %g7
99 bne,pn %xcc, 1b 108 bne,pn %xcc, 2f
100 nop 109 nop
101 retl 110 retl
102 nop 111 nop
1122: BACKOFF_SPIN(%o2, %o3, 1b)
103 .size atomic64_add, .-atomic64_add 113 .size atomic64_add, .-atomic64_add
104 114
105 .globl atomic64_sub 115 .globl atomic64_sub
106 .type atomic64_sub,#function 116 .type atomic64_sub,#function
107atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */ 117atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
118 BACKOFF_SETUP(%o2)
1081: ldx [%o1], %g1 1191: ldx [%o1], %g1
109 sub %g1, %o0, %g7 120 sub %g1, %o0, %g7
110 casx [%o1], %g1, %g7 121 casx [%o1], %g1, %g7
111 cmp %g1, %g7 122 cmp %g1, %g7
112 bne,pn %xcc, 1b 123 bne,pn %xcc, 2f
113 nop 124 nop
114 retl 125 retl
115 nop 126 nop
1272: BACKOFF_SPIN(%o2, %o3, 1b)
116 .size atomic64_sub, .-atomic64_sub 128 .size atomic64_sub, .-atomic64_sub
117 129
118 .globl atomic64_add_ret 130 .globl atomic64_add_ret
119 .type atomic64_add_ret,#function 131 .type atomic64_add_ret,#function
120atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ 132atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
133 BACKOFF_SETUP(%o2)
121 ATOMIC_PRE_BARRIER 134 ATOMIC_PRE_BARRIER
1221: ldx [%o1], %g1 1351: ldx [%o1], %g1
123 add %g1, %o0, %g7 136 add %g1, %o0, %g7
124 casx [%o1], %g1, %g7 137 casx [%o1], %g1, %g7
125 cmp %g1, %g7 138 cmp %g1, %g7
126 bne,pn %xcc, 1b 139 bne,pn %xcc, 2f
127 add %g7, %o0, %g7 140 add %g7, %o0, %g7
128 mov %g7, %o0 141 mov %g7, %o0
129 ATOMIC_POST_BARRIER 142 ATOMIC_POST_BARRIER
130 retl 143 retl
131 nop 144 nop
1452: BACKOFF_SPIN(%o2, %o3, 1b)
132 .size atomic64_add_ret, .-atomic64_add_ret 146 .size atomic64_add_ret, .-atomic64_add_ret
133 147
134 .globl atomic64_sub_ret 148 .globl atomic64_sub_ret
135 .type atomic64_sub_ret,#function 149 .type atomic64_sub_ret,#function
136atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ 150atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
151 BACKOFF_SETUP(%o2)
137 ATOMIC_PRE_BARRIER 152 ATOMIC_PRE_BARRIER
1381: ldx [%o1], %g1 1531: ldx [%o1], %g1
139 sub %g1, %o0, %g7 154 sub %g1, %o0, %g7
140 casx [%o1], %g1, %g7 155 casx [%o1], %g1, %g7
141 cmp %g1, %g7 156 cmp %g1, %g7
142 bne,pn %xcc, 1b 157 bne,pn %xcc, 2f
143 sub %g7, %o0, %g7 158 sub %g7, %o0, %g7
144 mov %g7, %o0 159 mov %g7, %o0
145 ATOMIC_POST_BARRIER 160 ATOMIC_POST_BARRIER
146 retl 161 retl
147 nop 162 nop
1632: BACKOFF_SPIN(%o2, %o3, 1b)
148 .size atomic64_sub_ret, .-atomic64_sub_ret 164 .size atomic64_sub_ret, .-atomic64_sub_ret
diff --git a/arch/sparc64/lib/bitops.S b/arch/sparc64/lib/bitops.S
index 892431a82131..6b015a6eefb5 100644
--- a/arch/sparc64/lib/bitops.S
+++ b/arch/sparc64/lib/bitops.S
@@ -1,10 +1,10 @@
1/* $Id: bitops.S,v 1.3 2001/11/18 00:12:56 davem Exp $ 1/* bitops.S: Sparc64 atomic bit operations.
2 * bitops.S: Sparc64 atomic bit operations.
3 * 2 *
4 * Copyright (C) 2000 David S. Miller (davem@redhat.com) 3 * Copyright (C) 2000, 2007 David S. Miller (davem@davemloft.net)
5 */ 4 */
6 5
7#include <asm/asi.h> 6#include <asm/asi.h>
7#include <asm/backoff.h>
8 8
9 .text 9 .text
10 10
@@ -29,6 +29,7 @@
29 .globl test_and_set_bit 29 .globl test_and_set_bit
30 .type test_and_set_bit,#function 30 .type test_and_set_bit,#function
31test_and_set_bit: /* %o0=nr, %o1=addr */ 31test_and_set_bit: /* %o0=nr, %o1=addr */
32 BACKOFF_SETUP(%o3)
32 BITOP_PRE_BARRIER 33 BITOP_PRE_BARRIER
33 srlx %o0, 6, %g1 34 srlx %o0, 6, %g1
34 mov 1, %o2 35 mov 1, %o2
@@ -40,18 +41,20 @@ test_and_set_bit: /* %o0=nr, %o1=addr */
40 or %g7, %o2, %g1 41 or %g7, %o2, %g1
41 casx [%o1], %g7, %g1 42 casx [%o1], %g7, %g1
42 cmp %g7, %g1 43 cmp %g7, %g1
43 bne,pn %xcc, 1b 44 bne,pn %xcc, 2f
44 and %g7, %o2, %g2 45 and %g7, %o2, %g2
45 clr %o0 46 clr %o0
46 movrne %g2, 1, %o0 47 movrne %g2, 1, %o0
47 BITOP_POST_BARRIER 48 BITOP_POST_BARRIER
48 retl 49 retl
49 nop 50 nop
512: BACKOFF_SPIN(%o3, %o4, 1b)
50 .size test_and_set_bit, .-test_and_set_bit 52 .size test_and_set_bit, .-test_and_set_bit
51 53
52 .globl test_and_clear_bit 54 .globl test_and_clear_bit
53 .type test_and_clear_bit,#function 55 .type test_and_clear_bit,#function
54test_and_clear_bit: /* %o0=nr, %o1=addr */ 56test_and_clear_bit: /* %o0=nr, %o1=addr */
57 BACKOFF_SETUP(%o3)
55 BITOP_PRE_BARRIER 58 BITOP_PRE_BARRIER
56 srlx %o0, 6, %g1 59 srlx %o0, 6, %g1
57 mov 1, %o2 60 mov 1, %o2
@@ -63,18 +66,20 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */
63 andn %g7, %o2, %g1 66 andn %g7, %o2, %g1
64 casx [%o1], %g7, %g1 67 casx [%o1], %g7, %g1
65 cmp %g7, %g1 68 cmp %g7, %g1
66 bne,pn %xcc, 1b 69 bne,pn %xcc, 2f
67 and %g7, %o2, %g2 70 and %g7, %o2, %g2
68 clr %o0 71 clr %o0
69 movrne %g2, 1, %o0 72 movrne %g2, 1, %o0
70 BITOP_POST_BARRIER 73 BITOP_POST_BARRIER
71 retl 74 retl
72 nop 75 nop
762: BACKOFF_SPIN(%o3, %o4, 1b)
73 .size test_and_clear_bit, .-test_and_clear_bit 77 .size test_and_clear_bit, .-test_and_clear_bit
74 78
75 .globl test_and_change_bit 79 .globl test_and_change_bit
76 .type test_and_change_bit,#function 80 .type test_and_change_bit,#function
77test_and_change_bit: /* %o0=nr, %o1=addr */ 81test_and_change_bit: /* %o0=nr, %o1=addr */
82 BACKOFF_SETUP(%o3)
78 BITOP_PRE_BARRIER 83 BITOP_PRE_BARRIER
79 srlx %o0, 6, %g1 84 srlx %o0, 6, %g1
80 mov 1, %o2 85 mov 1, %o2
@@ -86,18 +91,20 @@ test_and_change_bit: /* %o0=nr, %o1=addr */
86 xor %g7, %o2, %g1 91 xor %g7, %o2, %g1
87 casx [%o1], %g7, %g1 92 casx [%o1], %g7, %g1
88 cmp %g7, %g1 93 cmp %g7, %g1
89 bne,pn %xcc, 1b 94 bne,pn %xcc, 2f
90 and %g7, %o2, %g2 95 and %g7, %o2, %g2
91 clr %o0 96 clr %o0
92 movrne %g2, 1, %o0 97 movrne %g2, 1, %o0
93 BITOP_POST_BARRIER 98 BITOP_POST_BARRIER
94 retl 99 retl
95 nop 100 nop
1012: BACKOFF_SPIN(%o3, %o4, 1b)
96 .size test_and_change_bit, .-test_and_change_bit 102 .size test_and_change_bit, .-test_and_change_bit
97 103
98 .globl set_bit 104 .globl set_bit
99 .type set_bit,#function 105 .type set_bit,#function
100set_bit: /* %o0=nr, %o1=addr */ 106set_bit: /* %o0=nr, %o1=addr */
107 BACKOFF_SETUP(%o3)
101 srlx %o0, 6, %g1 108 srlx %o0, 6, %g1
102 mov 1, %o2 109 mov 1, %o2
103 sllx %g1, 3, %g3 110 sllx %g1, 3, %g3
@@ -108,15 +115,17 @@ set_bit: /* %o0=nr, %o1=addr */
108 or %g7, %o2, %g1 115 or %g7, %o2, %g1
109 casx [%o1], %g7, %g1 116 casx [%o1], %g7, %g1
110 cmp %g7, %g1 117 cmp %g7, %g1
111 bne,pn %xcc, 1b 118 bne,pn %xcc, 2f
112 nop 119 nop
113 retl 120 retl
114 nop 121 nop
1222: BACKOFF_SPIN(%o3, %o4, 1b)
115 .size set_bit, .-set_bit 123 .size set_bit, .-set_bit
116 124
117 .globl clear_bit 125 .globl clear_bit
118 .type clear_bit,#function 126 .type clear_bit,#function
119clear_bit: /* %o0=nr, %o1=addr */ 127clear_bit: /* %o0=nr, %o1=addr */
128 BACKOFF_SETUP(%o3)
120 srlx %o0, 6, %g1 129 srlx %o0, 6, %g1
121 mov 1, %o2 130 mov 1, %o2
122 sllx %g1, 3, %g3 131 sllx %g1, 3, %g3
@@ -127,15 +136,17 @@ clear_bit: /* %o0=nr, %o1=addr */
127 andn %g7, %o2, %g1 136 andn %g7, %o2, %g1
128 casx [%o1], %g7, %g1 137 casx [%o1], %g7, %g1
129 cmp %g7, %g1 138 cmp %g7, %g1
130 bne,pn %xcc, 1b 139 bne,pn %xcc, 2f
131 nop 140 nop
132 retl 141 retl
133 nop 142 nop
1432: BACKOFF_SPIN(%o3, %o4, 1b)
134 .size clear_bit, .-clear_bit 144 .size clear_bit, .-clear_bit
135 145
136 .globl change_bit 146 .globl change_bit
137 .type change_bit,#function 147 .type change_bit,#function
138change_bit: /* %o0=nr, %o1=addr */ 148change_bit: /* %o0=nr, %o1=addr */
149 BACKOFF_SETUP(%o3)
139 srlx %o0, 6, %g1 150 srlx %o0, 6, %g1
140 mov 1, %o2 151 mov 1, %o2
141 sllx %g1, 3, %g3 152 sllx %g1, 3, %g3
@@ -146,8 +157,9 @@ change_bit: /* %o0=nr, %o1=addr */
146 xor %g7, %o2, %g1 157 xor %g7, %o2, %g1
147 casx [%o1], %g7, %g1 158 casx [%o1], %g7, %g1
148 cmp %g7, %g1 159 cmp %g7, %g1
149 bne,pn %xcc, 1b 160 bne,pn %xcc, 2f
150 nop 161 nop
151 retl 162 retl
152 nop 163 nop
1642: BACKOFF_SPIN(%o3, %o4, 1b)
153 .size change_bit, .-change_bit 165 .size change_bit, .-change_bit