diff options
author | Peter Zijlstra <peterz@infradead.org> | 2014-03-26 13:29:28 -0400 |
---|---|---|
committer | Ingo Molnar <mingo@kernel.org> | 2014-08-14 06:48:13 -0400 |
commit | 4f3316c2b5fe2062c26c9b66915b5a5c80c60a5c (patch) | |
tree | 1ed6ef20fc6b7aa7c6b33a4135dc664a0c2901fe /arch/sparc/lib | |
parent | c6470150dff9aff682063890c9b8eac71b695def (diff) |
locking,arch,sparc: Fold atomic_ops
Many of the atomic op implementations are the same except for one
instruction; fold the lot into a few CPP macros and reduce LoC.
This also prepares for easy addition of new ops.
Signed-off-by: Peter Zijlstra <peterz@infradead.org>
Acked-by: David S. Miller <davem@davemloft.net>
Cc: Bjorn Helgaas <bhelgaas@google.com>
Cc: Kirill Tkhai <tkhai@yandex.ru>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Sam Ravnborg <sam@ravnborg.org>
Cc: sparclinux@vger.kernel.org
Link: http://lkml.kernel.org/r/20140508135852.825281379@infradead.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/sparc/lib')
-rw-r--r-- | arch/sparc/lib/atomic32.c | 29 | ||||
-rw-r--r-- | arch/sparc/lib/atomic_64.S | 163 | ||||
-rw-r--r-- | arch/sparc/lib/ksyms.c | 25 |
3 files changed, 101 insertions, 116 deletions
diff --git a/arch/sparc/lib/atomic32.c b/arch/sparc/lib/atomic32.c index 1d32b54089aa..a7c418ac26af 100644 --- a/arch/sparc/lib/atomic32.c +++ b/arch/sparc/lib/atomic32.c | |||
@@ -27,18 +27,23 @@ static DEFINE_SPINLOCK(dummy); | |||
27 | 27 | ||
28 | #endif /* SMP */ | 28 | #endif /* SMP */ |
29 | 29 | ||
30 | int __atomic_add_return(int i, atomic_t *v) | 30 | #define ATOMIC_OP(op, cop) \ |
31 | { | 31 | int atomic_##op##_return(int i, atomic_t *v) \ |
32 | int ret; | 32 | { \ |
33 | unsigned long flags; | 33 | int ret; \ |
34 | spin_lock_irqsave(ATOMIC_HASH(v), flags); | 34 | unsigned long flags; \ |
35 | 35 | spin_lock_irqsave(ATOMIC_HASH(v), flags); \ | |
36 | ret = (v->counter += i); | 36 | \ |
37 | 37 | ret = (v->counter cop i); \ | |
38 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | 38 | \ |
39 | return ret; | 39 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \ |
40 | } | 40 | return ret; \ |
41 | EXPORT_SYMBOL(__atomic_add_return); | 41 | } \ |
42 | EXPORT_SYMBOL(atomic_##op##_return); | ||
43 | |||
44 | ATOMIC_OP(add, +=) | ||
45 | |||
46 | #undef ATOMIC_OP | ||
42 | 47 | ||
43 | int atomic_cmpxchg(atomic_t *v, int old, int new) | 48 | int atomic_cmpxchg(atomic_t *v, int old, int new) |
44 | { | 49 | { |
diff --git a/arch/sparc/lib/atomic_64.S b/arch/sparc/lib/atomic_64.S index 85c233d0a340..96d70b4dbe77 100644 --- a/arch/sparc/lib/atomic_64.S +++ b/arch/sparc/lib/atomic_64.S | |||
@@ -14,109 +14,80 @@ | |||
14 | * memory barriers, and a second which returns | 14 | * memory barriers, and a second which returns |
15 | * a value and does the barriers. | 15 | * a value and does the barriers. |
16 | */ | 16 | */ |
17 | ENTRY(atomic_add) /* %o0 = increment, %o1 = atomic_ptr */ | ||
18 | BACKOFF_SETUP(%o2) | ||
19 | 1: lduw [%o1], %g1 | ||
20 | add %g1, %o0, %g7 | ||
21 | cas [%o1], %g1, %g7 | ||
22 | cmp %g1, %g7 | ||
23 | bne,pn %icc, BACKOFF_LABEL(2f, 1b) | ||
24 | nop | ||
25 | retl | ||
26 | nop | ||
27 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
28 | ENDPROC(atomic_add) | ||
29 | 17 | ||
30 | ENTRY(atomic_sub) /* %o0 = decrement, %o1 = atomic_ptr */ | 18 | #define ATOMIC_OP(op) \ |
31 | BACKOFF_SETUP(%o2) | 19 | ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ |
32 | 1: lduw [%o1], %g1 | 20 | BACKOFF_SETUP(%o2); \ |
33 | sub %g1, %o0, %g7 | 21 | 1: lduw [%o1], %g1; \ |
34 | cas [%o1], %g1, %g7 | 22 | op %g1, %o0, %g7; \ |
35 | cmp %g1, %g7 | 23 | cas [%o1], %g1, %g7; \ |
36 | bne,pn %icc, BACKOFF_LABEL(2f, 1b) | 24 | cmp %g1, %g7; \ |
37 | nop | 25 | bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ |
38 | retl | 26 | nop; \ |
39 | nop | 27 | retl; \ |
40 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 28 | nop; \ |
41 | ENDPROC(atomic_sub) | 29 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
30 | ENDPROC(atomic_##op); \ | ||
42 | 31 | ||
43 | ENTRY(atomic_add_ret) /* %o0 = increment, %o1 = atomic_ptr */ | 32 | #define ATOMIC_OP_RETURN(op) \ |
44 | BACKOFF_SETUP(%o2) | 33 | ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ |
45 | 1: lduw [%o1], %g1 | 34 | BACKOFF_SETUP(%o2); \ |
46 | add %g1, %o0, %g7 | 35 | 1: lduw [%o1], %g1; \ |
47 | cas [%o1], %g1, %g7 | 36 | op %g1, %o0, %g7; \ |
48 | cmp %g1, %g7 | 37 | cas [%o1], %g1, %g7; \ |
49 | bne,pn %icc, BACKOFF_LABEL(2f, 1b) | 38 | cmp %g1, %g7; \ |
50 | add %g1, %o0, %g1 | 39 | bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ |
51 | retl | 40 | add %g1, %o0, %g1; \ |
52 | sra %g1, 0, %o0 | 41 | retl; \ |
53 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 42 | sra %g1, 0, %o0; \ |
54 | ENDPROC(atomic_add_ret) | 43 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
44 | ENDPROC(atomic_##op##_return); | ||
55 | 45 | ||
56 | ENTRY(atomic_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */ | 46 | #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) |
57 | BACKOFF_SETUP(%o2) | ||
58 | 1: lduw [%o1], %g1 | ||
59 | sub %g1, %o0, %g7 | ||
60 | cas [%o1], %g1, %g7 | ||
61 | cmp %g1, %g7 | ||
62 | bne,pn %icc, BACKOFF_LABEL(2f, 1b) | ||
63 | sub %g1, %o0, %g1 | ||
64 | retl | ||
65 | sra %g1, 0, %o0 | ||
66 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
67 | ENDPROC(atomic_sub_ret) | ||
68 | 47 | ||
69 | ENTRY(atomic64_add) /* %o0 = increment, %o1 = atomic_ptr */ | 48 | ATOMIC_OPS(add) |
70 | BACKOFF_SETUP(%o2) | 49 | ATOMIC_OPS(sub) |
71 | 1: ldx [%o1], %g1 | ||
72 | add %g1, %o0, %g7 | ||
73 | casx [%o1], %g1, %g7 | ||
74 | cmp %g1, %g7 | ||
75 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b) | ||
76 | nop | ||
77 | retl | ||
78 | nop | ||
79 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
80 | ENDPROC(atomic64_add) | ||
81 | 50 | ||
82 | ENTRY(atomic64_sub) /* %o0 = decrement, %o1 = atomic_ptr */ | 51 | #undef ATOMIC_OPS |
83 | BACKOFF_SETUP(%o2) | 52 | #undef ATOMIC_OP_RETURN |
84 | 1: ldx [%o1], %g1 | 53 | #undef ATOMIC_OP |
85 | sub %g1, %o0, %g7 | ||
86 | casx [%o1], %g1, %g7 | ||
87 | cmp %g1, %g7 | ||
88 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b) | ||
89 | nop | ||
90 | retl | ||
91 | nop | ||
92 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | ||
93 | ENDPROC(atomic64_sub) | ||
94 | 54 | ||
95 | ENTRY(atomic64_add_ret) /* %o0 = increment, %o1 = atomic_ptr */ | 55 | #define ATOMIC64_OP(op) \ |
96 | BACKOFF_SETUP(%o2) | 56 | ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ |
97 | 1: ldx [%o1], %g1 | 57 | BACKOFF_SETUP(%o2); \ |
98 | add %g1, %o0, %g7 | 58 | 1: ldx [%o1], %g1; \ |
99 | casx [%o1], %g1, %g7 | 59 | op %g1, %o0, %g7; \ |
100 | cmp %g1, %g7 | 60 | casx [%o1], %g1, %g7; \ |
101 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b) | 61 | cmp %g1, %g7; \ |
102 | nop | 62 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ |
103 | retl | 63 | nop; \ |
104 | add %g1, %o0, %o0 | 64 | retl; \ |
105 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 65 | nop; \ |
106 | ENDPROC(atomic64_add_ret) | 66 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
67 | ENDPROC(atomic64_##op); \ | ||
107 | 68 | ||
108 | ENTRY(atomic64_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */ | 69 | #define ATOMIC64_OP_RETURN(op) \ |
109 | BACKOFF_SETUP(%o2) | 70 | ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ |
110 | 1: ldx [%o1], %g1 | 71 | BACKOFF_SETUP(%o2); \ |
111 | sub %g1, %o0, %g7 | 72 | 1: ldx [%o1], %g1; \ |
112 | casx [%o1], %g1, %g7 | 73 | op %g1, %o0, %g7; \ |
113 | cmp %g1, %g7 | 74 | casx [%o1], %g1, %g7; \ |
114 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b) | 75 | cmp %g1, %g7; \ |
115 | nop | 76 | bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ |
116 | retl | 77 | nop; \ |
117 | sub %g1, %o0, %o0 | 78 | retl; \ |
118 | 2: BACKOFF_SPIN(%o2, %o3, 1b) | 79 | add %g1, %o0, %o0; \ |
119 | ENDPROC(atomic64_sub_ret) | 80 | 2: BACKOFF_SPIN(%o2, %o3, 1b); \ |
81 | ENDPROC(atomic64_##op##_return); | ||
82 | |||
83 | #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) | ||
84 | |||
85 | ATOMIC64_OPS(add) | ||
86 | ATOMIC64_OPS(sub) | ||
87 | |||
88 | #undef ATOMIC64_OPS | ||
89 | #undef ATOMIC64_OP_RETURN | ||
90 | #undef ATOMIC64_OP | ||
120 | 91 | ||
121 | ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */ | 92 | ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */ |
122 | BACKOFF_SETUP(%o2) | 93 | BACKOFF_SETUP(%o2) |
diff --git a/arch/sparc/lib/ksyms.c b/arch/sparc/lib/ksyms.c index 323335b9cd2b..1d649a95660c 100644 --- a/arch/sparc/lib/ksyms.c +++ b/arch/sparc/lib/ksyms.c | |||
@@ -99,14 +99,23 @@ EXPORT_SYMBOL(___copy_in_user); | |||
99 | EXPORT_SYMBOL(__clear_user); | 99 | EXPORT_SYMBOL(__clear_user); |
100 | 100 | ||
101 | /* Atomic counter implementation. */ | 101 | /* Atomic counter implementation. */ |
102 | EXPORT_SYMBOL(atomic_add); | 102 | #define ATOMIC_OP(op) \ |
103 | EXPORT_SYMBOL(atomic_add_ret); | 103 | EXPORT_SYMBOL(atomic_##op); \ |
104 | EXPORT_SYMBOL(atomic_sub); | 104 | EXPORT_SYMBOL(atomic64_##op); |
105 | EXPORT_SYMBOL(atomic_sub_ret); | 105 | |
106 | EXPORT_SYMBOL(atomic64_add); | 106 | #define ATOMIC_OP_RETURN(op) \ |
107 | EXPORT_SYMBOL(atomic64_add_ret); | 107 | EXPORT_SYMBOL(atomic_##op##_return); \ |
108 | EXPORT_SYMBOL(atomic64_sub); | 108 | EXPORT_SYMBOL(atomic64_##op##_return); |
109 | EXPORT_SYMBOL(atomic64_sub_ret); | 109 | |
110 | #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) | ||
111 | |||
112 | ATOMIC_OPS(add) | ||
113 | ATOMIC_OPS(sub) | ||
114 | |||
115 | #undef ATOMIC_OPS | ||
116 | #undef ATOMIC_OP_RETURN | ||
117 | #undef ATOMIC_OP | ||
118 | |||
110 | EXPORT_SYMBOL(atomic64_dec_if_positive); | 119 | EXPORT_SYMBOL(atomic64_dec_if_positive); |
111 | 120 | ||
112 | /* Atomic bit operations. */ | 121 | /* Atomic bit operations. */ |