diff options
author | David S. Miller <davem@sunset.davemloft.net> | 2007-10-15 19:41:44 -0400 |
---|---|---|
committer | David S. Miller <davem@sunset.davemloft.net> | 2007-10-17 19:24:55 -0400 |
commit | 24f287e412ae90de8d281543c8b1043b6ed6c019 (patch) | |
tree | eb69803d187d35fd9e90c1428952c0ed5a0970c1 /arch/sparc64/lib/atomic.S | |
parent | d85714d81cc0408daddb68c10f7fd69eafe7c213 (diff) |
[SPARC64]: Implement atomic backoff.
When the cpu count is high and contention hits an atomic object, the
processors can synchronize such that some cpus continually get knocked
out and cannot complete the atomic update.
So implement an exponential backoff when SMP.
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc64/lib/atomic.S')
-rw-r--r-- | arch/sparc64/lib/atomic.S | 38 |
1 files changed, 27 insertions, 11 deletions
diff --git a/arch/sparc64/lib/atomic.S b/arch/sparc64/lib/atomic.S index 9633750167d..70ac4186f62 100644 --- a/arch/sparc64/lib/atomic.S +++ b/arch/sparc64/lib/atomic.S | |||
@@ -1,10 +1,10 @@ | |||
1 | /* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $ | 1 | /* atomic.S: These things are too big to do inline. |
2 | * atomic.S: These things are too big to do inline. | ||
3 | * | 2 | * |
4 | * Copyright (C) 1999 David S. Miller (davem@redhat.com) | 3 | * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net) |
5 | */ | 4 | */ |
6 | 5 | ||
7 | #include <asm/asi.h> | 6 | #include <asm/asi.h> |
7 | #include <asm/backoff.h> | ||
8 | 8 | ||
9 | .text | 9 | .text |
10 | 10 | ||
@@ -16,27 +16,31 @@ | |||
16 | .globl atomic_add | 16 | .globl atomic_add |
17 | .type atomic_add,#function | 17 | .type atomic_add,#function |
18 | atomic_add: /* %o0 = increment, %o1 = atomic_ptr */ | 18 | atomic_add: /* %o0 = increment, %o1 = atomic_ptr */ |
19 | BACKOFF_SETUP(%o2) | ||
19 | 1: lduw [%o1], %g1 | 20 | 1: lduw [%o1], %g1 |
20 | add %g1, %o0, %g7 | 21 | add %g1, %o0, %g7 |
21 | cas [%o1], %g1, %g7 | 22 | cas [%o1], %g1, %g7 |
22 | cmp %g1, %g7 | 23 | cmp %g1, %g7 |
23 | bne,pn %icc, 1b | 24 | bne,pn %icc, 2f |
24 | nop | 25 | nop |
25 | retl | 26 | retl |
26 | nop | 27 | nop |
28 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
27 | .size atomic_add, .-atomic_add | 29 | .size atomic_add, .-atomic_add |
28 | 30 | ||
29 | .globl atomic_sub | 31 | .globl atomic_sub |
30 | .type atomic_sub,#function | 32 | .type atomic_sub,#function |
31 | atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ | 33 | atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ |
34 | BACKOFF_SETUP(%o2) | ||
32 | 1: lduw [%o1], %g1 | 35 | 1: lduw [%o1], %g1 |
33 | sub %g1, %o0, %g7 | 36 | sub %g1, %o0, %g7 |
34 | cas [%o1], %g1, %g7 | 37 | cas [%o1], %g1, %g7 |
35 | cmp %g1, %g7 | 38 | cmp %g1, %g7 |
36 | bne,pn %icc, 1b | 39 | bne,pn %icc, 2f |
37 | nop | 40 | nop |
38 | retl | 41 | retl |
39 | nop | 42 | nop |
43 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
40 | .size atomic_sub, .-atomic_sub | 44 | .size atomic_sub, .-atomic_sub |
41 | 45 | ||
42 | /* On SMP we need to use memory barriers to ensure | 46 | /* On SMP we need to use memory barriers to ensure |
@@ -60,89 +64,101 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ | |||
60 | .globl atomic_add_ret | 64 | .globl atomic_add_ret |
61 | .type atomic_add_ret,#function | 65 | .type atomic_add_ret,#function |
62 | atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ | 66 | atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ |
67 | BACKOFF_SETUP(%o2) | ||
63 | ATOMIC_PRE_BARRIER | 68 | ATOMIC_PRE_BARRIER |
64 | 1: lduw [%o1], %g1 | 69 | 1: lduw [%o1], %g1 |
65 | add %g1, %o0, %g7 | 70 | add %g1, %o0, %g7 |
66 | cas [%o1], %g1, %g7 | 71 | cas [%o1], %g1, %g7 |
67 | cmp %g1, %g7 | 72 | cmp %g1, %g7 |
68 | bne,pn %icc, 1b | 73 | bne,pn %icc, 2f |
69 | add %g7, %o0, %g7 | 74 | add %g7, %o0, %g7 |
70 | sra %g7, 0, %o0 | 75 | sra %g7, 0, %o0 |
71 | ATOMIC_POST_BARRIER | 76 | ATOMIC_POST_BARRIER |
72 | retl | 77 | retl |
73 | nop | 78 | nop |
79 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
74 | .size atomic_add_ret, .-atomic_add_ret | 80 | .size atomic_add_ret, .-atomic_add_ret |
75 | 81 | ||
76 | .globl atomic_sub_ret | 82 | .globl atomic_sub_ret |
77 | .type atomic_sub_ret,#function | 83 | .type atomic_sub_ret,#function |
78 | atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ | 84 | atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ |
85 | BACKOFF_SETUP(%o2) | ||
79 | ATOMIC_PRE_BARRIER | 86 | ATOMIC_PRE_BARRIER |
80 | 1: lduw [%o1], %g1 | 87 | 1: lduw [%o1], %g1 |
81 | sub %g1, %o0, %g7 | 88 | sub %g1, %o0, %g7 |
82 | cas [%o1], %g1, %g7 | 89 | cas [%o1], %g1, %g7 |
83 | cmp %g1, %g7 | 90 | cmp %g1, %g7 |
84 | bne,pn %icc, 1b | 91 | bne,pn %icc, 2f |
85 | sub %g7, %o0, %g7 | 92 | sub %g7, %o0, %g7 |
86 | sra %g7, 0, %o0 | 93 | sra %g7, 0, %o0 |
87 | ATOMIC_POST_BARRIER | 94 | ATOMIC_POST_BARRIER |
88 | retl | 95 | retl |
89 | nop | 96 | nop |
97 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
90 | .size atomic_sub_ret, .-atomic_sub_ret | 98 | .size atomic_sub_ret, .-atomic_sub_ret |
91 | 99 | ||
92 | .globl atomic64_add | 100 | .globl atomic64_add |
93 | .type atomic64_add,#function | 101 | .type atomic64_add,#function |
94 | atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */ | 102 | atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */ |
103 | BACKOFF_SETUP(%o2) | ||
95 | 1: ldx [%o1], %g1 | 104 | 1: ldx [%o1], %g1 |
96 | add %g1, %o0, %g7 | 105 | add %g1, %o0, %g7 |
97 | casx [%o1], %g1, %g7 | 106 | casx [%o1], %g1, %g7 |
98 | cmp %g1, %g7 | 107 | cmp %g1, %g7 |
99 | bne,pn %xcc, 1b | 108 | bne,pn %xcc, 2f |
100 | nop | 109 | nop |
101 | retl | 110 | retl |
102 | nop | 111 | nop |
112 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
103 | .size atomic64_add, .-atomic64_add | 113 | .size atomic64_add, .-atomic64_add |
104 | 114 | ||
105 | .globl atomic64_sub | 115 | .globl atomic64_sub |
106 | .type atomic64_sub,#function | 116 | .type atomic64_sub,#function |
107 | atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */ | 117 | atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */ |
118 | BACKOFF_SETUP(%o2) | ||
108 | 1: ldx [%o1], %g1 | 119 | 1: ldx [%o1], %g1 |
109 | sub %g1, %o0, %g7 | 120 | sub %g1, %o0, %g7 |
110 | casx [%o1], %g1, %g7 | 121 | casx [%o1], %g1, %g7 |
111 | cmp %g1, %g7 | 122 | cmp %g1, %g7 |
112 | bne,pn %xcc, 1b | 123 | bne,pn %xcc, 2f |
113 | nop | 124 | nop |
114 | retl | 125 | retl |
115 | nop | 126 | nop |
127 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
116 | .size atomic64_sub, .-atomic64_sub | 128 | .size atomic64_sub, .-atomic64_sub |
117 | 129 | ||
118 | .globl atomic64_add_ret | 130 | .globl atomic64_add_ret |
119 | .type atomic64_add_ret,#function | 131 | .type atomic64_add_ret,#function |
120 | atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ | 132 | atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ |
133 | BACKOFF_SETUP(%o2) | ||
121 | ATOMIC_PRE_BARRIER | 134 | ATOMIC_PRE_BARRIER |
122 | 1: ldx [%o1], %g1 | 135 | 1: ldx [%o1], %g1 |
123 | add %g1, %o0, %g7 | 136 | add %g1, %o0, %g7 |
124 | casx [%o1], %g1, %g7 | 137 | casx [%o1], %g1, %g7 |
125 | cmp %g1, %g7 | 138 | cmp %g1, %g7 |
126 | bne,pn %xcc, 1b | 139 | bne,pn %xcc, 2f |
127 | add %g7, %o0, %g7 | 140 | add %g7, %o0, %g7 |
128 | mov %g7, %o0 | 141 | mov %g7, %o0 |
129 | ATOMIC_POST_BARRIER | 142 | ATOMIC_POST_BARRIER |
130 | retl | 143 | retl |
131 | nop | 144 | nop |
145 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
132 | .size atomic64_add_ret, .-atomic64_add_ret | 146 | .size atomic64_add_ret, .-atomic64_add_ret |
133 | 147 | ||
134 | .globl atomic64_sub_ret | 148 | .globl atomic64_sub_ret |
135 | .type atomic64_sub_ret,#function | 149 | .type atomic64_sub_ret,#function |
136 | atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ | 150 | atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ |
151 | BACKOFF_SETUP(%o2) | ||
137 | ATOMIC_PRE_BARRIER | 152 | ATOMIC_PRE_BARRIER |
138 | 1: ldx [%o1], %g1 | 153 | 1: ldx [%o1], %g1 |
139 | sub %g1, %o0, %g7 | 154 | sub %g1, %o0, %g7 |
140 | casx [%o1], %g1, %g7 | 155 | casx [%o1], %g1, %g7 |
141 | cmp %g1, %g7 | 156 | cmp %g1, %g7 |
142 | bne,pn %xcc, 1b | 157 | bne,pn %xcc, 2f |
143 | sub %g7, %o0, %g7 | 158 | sub %g7, %o0, %g7 |
144 | mov %g7, %o0 | 159 | mov %g7, %o0 |
145 | ATOMIC_POST_BARRIER | 160 | ATOMIC_POST_BARRIER |
146 | retl | 161 | retl |
147 | nop | 162 | nop |
163 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
148 | .size atomic64_sub_ret, .-atomic64_sub_ret | 164 | .size atomic64_sub_ret, .-atomic64_sub_ret |