aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc64/lib/atomic.S
diff options
context:
space:
mode:
authorDavid S. Miller <davem@sunset.davemloft.net>2007-10-15 19:41:44 -0400
committerDavid S. Miller <davem@sunset.davemloft.net>2007-10-17 19:24:55 -0400
commit24f287e412ae90de8d281543c8b1043b6ed6c019 (patch)
treeeb69803d187d35fd9e90c1428952c0ed5a0970c1 /arch/sparc64/lib/atomic.S
parentd85714d81cc0408daddb68c10f7fd69eafe7c213 (diff)
[SPARC64]: Implement atomic backoff.
When the cpu count is high and contention hits an atomic object, the processors can synchronize such that some cpus continually get knocked out and cannot complete the atomic update. So implement an exponential backoff when SMP. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc64/lib/atomic.S')
-rw-r--r--arch/sparc64/lib/atomic.S38
1 files changed, 27 insertions, 11 deletions
diff --git a/arch/sparc64/lib/atomic.S b/arch/sparc64/lib/atomic.S
index 9633750167d..70ac4186f62 100644
--- a/arch/sparc64/lib/atomic.S
+++ b/arch/sparc64/lib/atomic.S
@@ -1,10 +1,10 @@
1/* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $ 1/* atomic.S: These things are too big to do inline.
2 * atomic.S: These things are too big to do inline.
3 * 2 *
4 * Copyright (C) 1999 David S. Miller (davem@redhat.com) 3 * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
5 */ 4 */
6 5
7#include <asm/asi.h> 6#include <asm/asi.h>
7#include <asm/backoff.h>
8 8
9 .text 9 .text
10 10
@@ -16,27 +16,31 @@
16 .globl atomic_add 16 .globl atomic_add
17 .type atomic_add,#function 17 .type atomic_add,#function
18atomic_add: /* %o0 = increment, %o1 = atomic_ptr */ 18atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
19 BACKOFF_SETUP(%o2)
191: lduw [%o1], %g1 201: lduw [%o1], %g1
20 add %g1, %o0, %g7 21 add %g1, %o0, %g7
21 cas [%o1], %g1, %g7 22 cas [%o1], %g1, %g7
22 cmp %g1, %g7 23 cmp %g1, %g7
23 bne,pn %icc, 1b 24 bne,pn %icc, 2f
24 nop 25 nop
25 retl 26 retl
26 nop 27 nop
282: BACKOFF_SPIN(%o2, %o3, 1b)
27 .size atomic_add, .-atomic_add 29 .size atomic_add, .-atomic_add
28 30
29 .globl atomic_sub 31 .globl atomic_sub
30 .type atomic_sub,#function 32 .type atomic_sub,#function
31atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */ 33atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
34 BACKOFF_SETUP(%o2)
321: lduw [%o1], %g1 351: lduw [%o1], %g1
33 sub %g1, %o0, %g7 36 sub %g1, %o0, %g7
34 cas [%o1], %g1, %g7 37 cas [%o1], %g1, %g7
35 cmp %g1, %g7 38 cmp %g1, %g7
36 bne,pn %icc, 1b 39 bne,pn %icc, 2f
37 nop 40 nop
38 retl 41 retl
39 nop 42 nop
432: BACKOFF_SPIN(%o2, %o3, 1b)
40 .size atomic_sub, .-atomic_sub 44 .size atomic_sub, .-atomic_sub
41 45
42 /* On SMP we need to use memory barriers to ensure 46 /* On SMP we need to use memory barriers to ensure
@@ -60,89 +64,101 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
60 .globl atomic_add_ret 64 .globl atomic_add_ret
61 .type atomic_add_ret,#function 65 .type atomic_add_ret,#function
62atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ 66atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
67 BACKOFF_SETUP(%o2)
63 ATOMIC_PRE_BARRIER 68 ATOMIC_PRE_BARRIER
641: lduw [%o1], %g1 691: lduw [%o1], %g1
65 add %g1, %o0, %g7 70 add %g1, %o0, %g7
66 cas [%o1], %g1, %g7 71 cas [%o1], %g1, %g7
67 cmp %g1, %g7 72 cmp %g1, %g7
68 bne,pn %icc, 1b 73 bne,pn %icc, 2f
69 add %g7, %o0, %g7 74 add %g7, %o0, %g7
70 sra %g7, 0, %o0 75 sra %g7, 0, %o0
71 ATOMIC_POST_BARRIER 76 ATOMIC_POST_BARRIER
72 retl 77 retl
73 nop 78 nop
792: BACKOFF_SPIN(%o2, %o3, 1b)
74 .size atomic_add_ret, .-atomic_add_ret 80 .size atomic_add_ret, .-atomic_add_ret
75 81
76 .globl atomic_sub_ret 82 .globl atomic_sub_ret
77 .type atomic_sub_ret,#function 83 .type atomic_sub_ret,#function
78atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ 84atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
85 BACKOFF_SETUP(%o2)
79 ATOMIC_PRE_BARRIER 86 ATOMIC_PRE_BARRIER
801: lduw [%o1], %g1 871: lduw [%o1], %g1
81 sub %g1, %o0, %g7 88 sub %g1, %o0, %g7
82 cas [%o1], %g1, %g7 89 cas [%o1], %g1, %g7
83 cmp %g1, %g7 90 cmp %g1, %g7
84 bne,pn %icc, 1b 91 bne,pn %icc, 2f
85 sub %g7, %o0, %g7 92 sub %g7, %o0, %g7
86 sra %g7, 0, %o0 93 sra %g7, 0, %o0
87 ATOMIC_POST_BARRIER 94 ATOMIC_POST_BARRIER
88 retl 95 retl
89 nop 96 nop
972: BACKOFF_SPIN(%o2, %o3, 1b)
90 .size atomic_sub_ret, .-atomic_sub_ret 98 .size atomic_sub_ret, .-atomic_sub_ret
91 99
92 .globl atomic64_add 100 .globl atomic64_add
93 .type atomic64_add,#function 101 .type atomic64_add,#function
94atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */ 102atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
103 BACKOFF_SETUP(%o2)
951: ldx [%o1], %g1 1041: ldx [%o1], %g1
96 add %g1, %o0, %g7 105 add %g1, %o0, %g7
97 casx [%o1], %g1, %g7 106 casx [%o1], %g1, %g7
98 cmp %g1, %g7 107 cmp %g1, %g7
99 bne,pn %xcc, 1b 108 bne,pn %xcc, 2f
100 nop 109 nop
101 retl 110 retl
102 nop 111 nop
1122: BACKOFF_SPIN(%o2, %o3, 1b)
103 .size atomic64_add, .-atomic64_add 113 .size atomic64_add, .-atomic64_add
104 114
105 .globl atomic64_sub 115 .globl atomic64_sub
106 .type atomic64_sub,#function 116 .type atomic64_sub,#function
107atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */ 117atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
118 BACKOFF_SETUP(%o2)
1081: ldx [%o1], %g1 1191: ldx [%o1], %g1
109 sub %g1, %o0, %g7 120 sub %g1, %o0, %g7
110 casx [%o1], %g1, %g7 121 casx [%o1], %g1, %g7
111 cmp %g1, %g7 122 cmp %g1, %g7
112 bne,pn %xcc, 1b 123 bne,pn %xcc, 2f
113 nop 124 nop
114 retl 125 retl
115 nop 126 nop
1272: BACKOFF_SPIN(%o2, %o3, 1b)
116 .size atomic64_sub, .-atomic64_sub 128 .size atomic64_sub, .-atomic64_sub
117 129
118 .globl atomic64_add_ret 130 .globl atomic64_add_ret
119 .type atomic64_add_ret,#function 131 .type atomic64_add_ret,#function
120atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */ 132atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
133 BACKOFF_SETUP(%o2)
121 ATOMIC_PRE_BARRIER 134 ATOMIC_PRE_BARRIER
1221: ldx [%o1], %g1 1351: ldx [%o1], %g1
123 add %g1, %o0, %g7 136 add %g1, %o0, %g7
124 casx [%o1], %g1, %g7 137 casx [%o1], %g1, %g7
125 cmp %g1, %g7 138 cmp %g1, %g7
126 bne,pn %xcc, 1b 139 bne,pn %xcc, 2f
127 add %g7, %o0, %g7 140 add %g7, %o0, %g7
128 mov %g7, %o0 141 mov %g7, %o0
129 ATOMIC_POST_BARRIER 142 ATOMIC_POST_BARRIER
130 retl 143 retl
131 nop 144 nop
1452: BACKOFF_SPIN(%o2, %o3, 1b)
132 .size atomic64_add_ret, .-atomic64_add_ret 146 .size atomic64_add_ret, .-atomic64_add_ret
133 147
134 .globl atomic64_sub_ret 148 .globl atomic64_sub_ret
135 .type atomic64_sub_ret,#function 149 .type atomic64_sub_ret,#function
136atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */ 150atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
151 BACKOFF_SETUP(%o2)
137 ATOMIC_PRE_BARRIER 152 ATOMIC_PRE_BARRIER
1381: ldx [%o1], %g1 1531: ldx [%o1], %g1
139 sub %g1, %o0, %g7 154 sub %g1, %o0, %g7
140 casx [%o1], %g1, %g7 155 casx [%o1], %g1, %g7
141 cmp %g1, %g7 156 cmp %g1, %g7
142 bne,pn %xcc, 1b 157 bne,pn %xcc, 2f
143 sub %g7, %o0, %g7 158 sub %g7, %o0, %g7
144 mov %g7, %o0 159 mov %g7, %o0
145 ATOMIC_POST_BARRIER 160 ATOMIC_POST_BARRIER
146 retl 161 retl
147 nop 162 nop
1632: BACKOFF_SPIN(%o2, %o3, 1b)
148 .size atomic64_sub_ret, .-atomic64_sub_ret 164 .size atomic64_sub_ret, .-atomic64_sub_ret