diff options
Diffstat (limited to 'arch/hexagon')
-rw-r--r-- | arch/hexagon/include/asm/atomic.h | 68 |
1 files changed, 37 insertions, 31 deletions
diff --git a/arch/hexagon/include/asm/atomic.h b/arch/hexagon/include/asm/atomic.h index de916b11bff5..93d07025f183 100644 --- a/arch/hexagon/include/asm/atomic.h +++ b/arch/hexagon/include/asm/atomic.h | |||
@@ -94,41 +94,47 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new) | |||
94 | return __oldval; | 94 | return __oldval; |
95 | } | 95 | } |
96 | 96 | ||
97 | static inline int atomic_add_return(int i, atomic_t *v) | 97 | #define ATOMIC_OP(op) \ |
98 | { | 98 | static inline void atomic_##op(int i, atomic_t *v) \ |
99 | int output; | 99 | { \ |
100 | 100 | int output; \ | |
101 | __asm__ __volatile__ ( | 101 | \ |
102 | "1: %0 = memw_locked(%1);\n" | 102 | __asm__ __volatile__ ( \ |
103 | " %0 = add(%0,%2);\n" | 103 | "1: %0 = memw_locked(%1);\n" \ |
104 | " memw_locked(%1,P3)=%0;\n" | 104 | " %0 = "#op "(%0,%2);\n" \ |
105 | " if !P3 jump 1b;\n" | 105 | " memw_locked(%1,P3)=%0;\n" \ |
106 | : "=&r" (output) | 106 | " if !P3 jump 1b;\n" \ |
107 | : "r" (&v->counter), "r" (i) | 107 | : "=&r" (output) \ |
108 | : "memory", "p3" | 108 | : "r" (&v->counter), "r" (i) \ |
109 | ); | 109 | : "memory", "p3" \ |
110 | return output; | 110 | ); \ |
111 | 111 | } \ | |
112 | |||
113 | #define ATOMIC_OP_RETURN(op) \ | ||
114 | static inline int atomic_##op##_return(int i, atomic_t *v) \ | ||
115 | { \ | ||
116 | int output; \ | ||
117 | \ | ||
118 | __asm__ __volatile__ ( \ | ||
119 | "1: %0 = memw_locked(%1);\n" \ | ||
120 | " %0 = "#op "(%0,%2);\n" \ | ||
121 | " memw_locked(%1,P3)=%0;\n" \ | ||
122 | " if !P3 jump 1b;\n" \ | ||
123 | : "=&r" (output) \ | ||
124 | : "r" (&v->counter), "r" (i) \ | ||
125 | : "memory", "p3" \ | ||
126 | ); \ | ||
127 | return output; \ | ||
112 | } | 128 | } |
113 | 129 | ||
114 | #define atomic_add(i, v) atomic_add_return(i, (v)) | 130 | #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) |
115 | 131 | ||
116 | static inline int atomic_sub_return(int i, atomic_t *v) | 132 | ATOMIC_OPS(add) |
117 | { | 133 | ATOMIC_OPS(sub) |
118 | int output; | ||
119 | __asm__ __volatile__ ( | ||
120 | "1: %0 = memw_locked(%1);\n" | ||
121 | " %0 = sub(%0,%2);\n" | ||
122 | " memw_locked(%1,P3)=%0\n" | ||
123 | " if !P3 jump 1b;\n" | ||
124 | : "=&r" (output) | ||
125 | : "r" (&v->counter), "r" (i) | ||
126 | : "memory", "p3" | ||
127 | ); | ||
128 | return output; | ||
129 | } | ||
130 | 134 | ||
131 | #define atomic_sub(i, v) atomic_sub_return(i, (v)) | 135 | #undef ATOMIC_OPS |
136 | #undef ATOMIC_OP_RETURN | ||
137 | #undef ATOMIC_OP | ||
132 | 138 | ||
133 | /** | 139 | /** |
134 | * __atomic_add_unless - add unless the number is a given value | 140 | * __atomic_add_unless - add unless the number is a given value |