aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/mips/kernel/semaphore.c4
-rw-r--r--include/asm-mips/atomic.h20
-rw-r--r--include/asm-mips/bitops.h26
-rw-r--r--include/asm-mips/system.h10
4 files changed, 29 insertions, 31 deletions
diff --git a/arch/mips/kernel/semaphore.c b/arch/mips/kernel/semaphore.c
index dbb145ee00a7..1265358cdca1 100644
--- a/arch/mips/kernel/semaphore.c
+++ b/arch/mips/kernel/semaphore.c
@@ -42,7 +42,7 @@ static inline int __sem_update_count(struct semaphore *sem, int incr)
42 42
43 if (cpu_has_llsc && R10000_LLSC_WAR) { 43 if (cpu_has_llsc && R10000_LLSC_WAR) {
44 __asm__ __volatile__( 44 __asm__ __volatile__(
45 " .set mips2 \n" 45 " .set mips3 \n"
46 "1: ll %0, %2 # __sem_update_count \n" 46 "1: ll %0, %2 # __sem_update_count \n"
47 " sra %1, %0, 31 \n" 47 " sra %1, %0, 31 \n"
48 " not %1 \n" 48 " not %1 \n"
@@ -55,7 +55,7 @@ static inline int __sem_update_count(struct semaphore *sem, int incr)
55 : "r" (incr), "m" (sem->count)); 55 : "r" (incr), "m" (sem->count));
56 } else if (cpu_has_llsc) { 56 } else if (cpu_has_llsc) {
57 __asm__ __volatile__( 57 __asm__ __volatile__(
58 " .set mips2 \n" 58 " .set mips3 \n"
59 "1: ll %0, %2 # __sem_update_count \n" 59 "1: ll %0, %2 # __sem_update_count \n"
60 " sra %1, %0, 31 \n" 60 " sra %1, %0, 31 \n"
61 " not %1 \n" 61 " not %1 \n"
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h
index 80ea3fbd3ece..6202eb8a14b7 100644
--- a/include/asm-mips/atomic.h
+++ b/include/asm-mips/atomic.h
@@ -62,7 +62,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
62 unsigned long temp; 62 unsigned long temp;
63 63
64 __asm__ __volatile__( 64 __asm__ __volatile__(
65 " .set mips2 \n" 65 " .set mips3 \n"
66 "1: ll %0, %1 # atomic_add \n" 66 "1: ll %0, %1 # atomic_add \n"
67 " addu %0, %2 \n" 67 " addu %0, %2 \n"
68 " sc %0, %1 \n" 68 " sc %0, %1 \n"
@@ -74,7 +74,7 @@ static __inline__ void atomic_add(int i, atomic_t * v)
74 unsigned long temp; 74 unsigned long temp;
75 75
76 __asm__ __volatile__( 76 __asm__ __volatile__(
77 " .set mips2 \n" 77 " .set mips3 \n"
78 "1: ll %0, %1 # atomic_add \n" 78 "1: ll %0, %1 # atomic_add \n"
79 " addu %0, %2 \n" 79 " addu %0, %2 \n"
80 " sc %0, %1 \n" 80 " sc %0, %1 \n"
@@ -104,7 +104,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
104 unsigned long temp; 104 unsigned long temp;
105 105
106 __asm__ __volatile__( 106 __asm__ __volatile__(
107 " .set mips2 \n" 107 " .set mips3 \n"
108 "1: ll %0, %1 # atomic_sub \n" 108 "1: ll %0, %1 # atomic_sub \n"
109 " subu %0, %2 \n" 109 " subu %0, %2 \n"
110 " sc %0, %1 \n" 110 " sc %0, %1 \n"
@@ -116,7 +116,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
116 unsigned long temp; 116 unsigned long temp;
117 117
118 __asm__ __volatile__( 118 __asm__ __volatile__(
119 " .set mips2 \n" 119 " .set mips3 \n"
120 "1: ll %0, %1 # atomic_sub \n" 120 "1: ll %0, %1 # atomic_sub \n"
121 " subu %0, %2 \n" 121 " subu %0, %2 \n"
122 " sc %0, %1 \n" 122 " sc %0, %1 \n"
@@ -144,7 +144,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
144 unsigned long temp; 144 unsigned long temp;
145 145
146 __asm__ __volatile__( 146 __asm__ __volatile__(
147 " .set mips2 \n" 147 " .set mips3 \n"
148 "1: ll %1, %2 # atomic_add_return \n" 148 "1: ll %1, %2 # atomic_add_return \n"
149 " addu %0, %1, %3 \n" 149 " addu %0, %1, %3 \n"
150 " sc %0, %2 \n" 150 " sc %0, %2 \n"
@@ -159,7 +159,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
159 unsigned long temp; 159 unsigned long temp;
160 160
161 __asm__ __volatile__( 161 __asm__ __volatile__(
162 " .set mips2 \n" 162 " .set mips3 \n"
163 "1: ll %1, %2 # atomic_add_return \n" 163 "1: ll %1, %2 # atomic_add_return \n"
164 " addu %0, %1, %3 \n" 164 " addu %0, %1, %3 \n"
165 " sc %0, %2 \n" 165 " sc %0, %2 \n"
@@ -191,7 +191,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
191 unsigned long temp; 191 unsigned long temp;
192 192
193 __asm__ __volatile__( 193 __asm__ __volatile__(
194 " .set mips2 \n" 194 " .set mips3 \n"
195 "1: ll %1, %2 # atomic_sub_return \n" 195 "1: ll %1, %2 # atomic_sub_return \n"
196 " subu %0, %1, %3 \n" 196 " subu %0, %1, %3 \n"
197 " sc %0, %2 \n" 197 " sc %0, %2 \n"
@@ -206,7 +206,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
206 unsigned long temp; 206 unsigned long temp;
207 207
208 __asm__ __volatile__( 208 __asm__ __volatile__(
209 " .set mips2 \n" 209 " .set mips3 \n"
210 "1: ll %1, %2 # atomic_sub_return \n" 210 "1: ll %1, %2 # atomic_sub_return \n"
211 " subu %0, %1, %3 \n" 211 " subu %0, %1, %3 \n"
212 " sc %0, %2 \n" 212 " sc %0, %2 \n"
@@ -245,7 +245,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
245 unsigned long temp; 245 unsigned long temp;
246 246
247 __asm__ __volatile__( 247 __asm__ __volatile__(
248 " .set mips2 \n" 248 " .set mips3 \n"
249 "1: ll %1, %2 # atomic_sub_if_positive\n" 249 "1: ll %1, %2 # atomic_sub_if_positive\n"
250 " subu %0, %1, %3 \n" 250 " subu %0, %1, %3 \n"
251 " bltz %0, 1f \n" 251 " bltz %0, 1f \n"
@@ -261,7 +261,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
261 unsigned long temp; 261 unsigned long temp;
262 262
263 __asm__ __volatile__( 263 __asm__ __volatile__(
264 " .set mips2 \n" 264 " .set mips3 \n"
265 "1: ll %1, %2 # atomic_sub_if_positive\n" 265 "1: ll %1, %2 # atomic_sub_if_positive\n"
266 " subu %0, %1, %3 \n" 266 " subu %0, %1, %3 \n"
267 " bltz %0, 1f \n" 267 " bltz %0, 1f \n"
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index 1dc35879b362..d9c02bd0b18e 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -20,14 +20,12 @@
20#define SZLONG_MASK 31UL 20#define SZLONG_MASK 31UL
21#define __LL "ll " 21#define __LL "ll "
22#define __SC "sc " 22#define __SC "sc "
23#define __SET_MIPS ".set mips2 "
24#define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x)) 23#define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x))
25#elif (_MIPS_SZLONG == 64) 24#elif (_MIPS_SZLONG == 64)
26#define SZLONG_LOG 6 25#define SZLONG_LOG 6
27#define SZLONG_MASK 63UL 26#define SZLONG_MASK 63UL
28#define __LL "lld " 27#define __LL "lld "
29#define __SC "scd " 28#define __SC "scd "
30#define __SET_MIPS ".set mips3 "
31#define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x)) 29#define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x))
32#endif 30#endif
33 31
@@ -74,7 +72,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
74 72
75 if (cpu_has_llsc && R10000_LLSC_WAR) { 73 if (cpu_has_llsc && R10000_LLSC_WAR) {
76 __asm__ __volatile__( 74 __asm__ __volatile__(
77 " " __SET_MIPS " \n" 75 " .set mips3 \n"
78 "1: " __LL "%0, %1 # set_bit \n" 76 "1: " __LL "%0, %1 # set_bit \n"
79 " or %0, %2 \n" 77 " or %0, %2 \n"
80 " " __SC "%0, %1 \n" 78 " " __SC "%0, %1 \n"
@@ -84,7 +82,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
84 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); 82 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
85 } else if (cpu_has_llsc) { 83 } else if (cpu_has_llsc) {
86 __asm__ __volatile__( 84 __asm__ __volatile__(
87 " " __SET_MIPS " \n" 85 " .set mips3 \n"
88 "1: " __LL "%0, %1 # set_bit \n" 86 "1: " __LL "%0, %1 # set_bit \n"
89 " or %0, %2 \n" 87 " or %0, %2 \n"
90 " " __SC "%0, %1 \n" 88 " " __SC "%0, %1 \n"
@@ -138,7 +136,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
138 136
139 if (cpu_has_llsc && R10000_LLSC_WAR) { 137 if (cpu_has_llsc && R10000_LLSC_WAR) {
140 __asm__ __volatile__( 138 __asm__ __volatile__(
141 " " __SET_MIPS " \n" 139 " .set mips3 \n"
142 "1: " __LL "%0, %1 # clear_bit \n" 140 "1: " __LL "%0, %1 # clear_bit \n"
143 " and %0, %2 \n" 141 " and %0, %2 \n"
144 " " __SC "%0, %1 \n" 142 " " __SC "%0, %1 \n"
@@ -148,7 +146,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
148 : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); 146 : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m));
149 } else if (cpu_has_llsc) { 147 } else if (cpu_has_llsc) {
150 __asm__ __volatile__( 148 __asm__ __volatile__(
151 " " __SET_MIPS " \n" 149 " .set mips3 \n"
152 "1: " __LL "%0, %1 # clear_bit \n" 150 "1: " __LL "%0, %1 # clear_bit \n"
153 " and %0, %2 \n" 151 " and %0, %2 \n"
154 " " __SC "%0, %1 \n" 152 " " __SC "%0, %1 \n"
@@ -201,7 +199,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
201 unsigned long temp; 199 unsigned long temp;
202 200
203 __asm__ __volatile__( 201 __asm__ __volatile__(
204 " " __SET_MIPS " \n" 202 " .set mips3 \n"
205 "1: " __LL "%0, %1 # change_bit \n" 203 "1: " __LL "%0, %1 # change_bit \n"
206 " xor %0, %2 \n" 204 " xor %0, %2 \n"
207 " " __SC "%0, %1 \n" 205 " " __SC "%0, %1 \n"
@@ -214,7 +212,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
214 unsigned long temp; 212 unsigned long temp;
215 213
216 __asm__ __volatile__( 214 __asm__ __volatile__(
217 " " __SET_MIPS " \n" 215 " .set mips3 \n"
218 "1: " __LL "%0, %1 # change_bit \n" 216 "1: " __LL "%0, %1 # change_bit \n"
219 " xor %0, %2 \n" 217 " xor %0, %2 \n"
220 " " __SC "%0, %1 \n" 218 " " __SC "%0, %1 \n"
@@ -267,7 +265,7 @@ static inline int test_and_set_bit(unsigned long nr,
267 unsigned long temp, res; 265 unsigned long temp, res;
268 266
269 __asm__ __volatile__( 267 __asm__ __volatile__(
270 " " __SET_MIPS " \n" 268 " .set mips3 \n"
271 "1: " __LL "%0, %1 # test_and_set_bit \n" 269 "1: " __LL "%0, %1 # test_and_set_bit \n"
272 " or %2, %0, %3 \n" 270 " or %2, %0, %3 \n"
273 " " __SC "%2, %1 \n" 271 " " __SC "%2, %1 \n"
@@ -289,7 +287,7 @@ static inline int test_and_set_bit(unsigned long nr,
289 __asm__ __volatile__( 287 __asm__ __volatile__(
290 " .set push \n" 288 " .set push \n"
291 " .set noreorder \n" 289 " .set noreorder \n"
292 " " __SET_MIPS " \n" 290 " .set mips3 \n"
293 "1: " __LL "%0, %1 # test_and_set_bit \n" 291 "1: " __LL "%0, %1 # test_and_set_bit \n"
294 " or %2, %0, %3 \n" 292 " or %2, %0, %3 \n"
295 " " __SC "%2, %1 \n" 293 " " __SC "%2, %1 \n"
@@ -361,7 +359,7 @@ static inline int test_and_clear_bit(unsigned long nr,
361 unsigned long temp, res; 359 unsigned long temp, res;
362 360
363 __asm__ __volatile__( 361 __asm__ __volatile__(
364 " " __SET_MIPS " \n" 362 " .set mips3 \n"
365 "1: " __LL "%0, %1 # test_and_clear_bit \n" 363 "1: " __LL "%0, %1 # test_and_clear_bit \n"
366 " or %2, %0, %3 \n" 364 " or %2, %0, %3 \n"
367 " xor %2, %3 \n" 365 " xor %2, %3 \n"
@@ -384,7 +382,7 @@ static inline int test_and_clear_bit(unsigned long nr,
384 __asm__ __volatile__( 382 __asm__ __volatile__(
385 " .set push \n" 383 " .set push \n"
386 " .set noreorder \n" 384 " .set noreorder \n"
387 " " __SET_MIPS " \n" 385 " .set mips3 \n"
388 "1: " __LL "%0, %1 # test_and_clear_bit \n" 386 "1: " __LL "%0, %1 # test_and_clear_bit \n"
389 " or %2, %0, %3 \n" 387 " or %2, %0, %3 \n"
390 " xor %2, %3 \n" 388 " xor %2, %3 \n"
@@ -457,7 +455,7 @@ static inline int test_and_change_bit(unsigned long nr,
457 unsigned long temp, res; 455 unsigned long temp, res;
458 456
459 __asm__ __volatile__( 457 __asm__ __volatile__(
460 " " __SET_MIPS " \n" 458 " .set mips3 \n"
461 "1: " __LL "%0, %1 # test_and_change_bit \n" 459 "1: " __LL "%0, %1 # test_and_change_bit \n"
462 " xor %2, %0, %3 \n" 460 " xor %2, %0, %3 \n"
463 " " __SC "%2, %1 \n" 461 " " __SC "%2, %1 \n"
@@ -479,7 +477,7 @@ static inline int test_and_change_bit(unsigned long nr,
479 __asm__ __volatile__( 477 __asm__ __volatile__(
480 " .set push \n" 478 " .set push \n"
481 " .set noreorder \n" 479 " .set noreorder \n"
482 " " __SET_MIPS " \n" 480 " .set mips3 \n"
483 "1: " __LL "%0, %1 # test_and_change_bit \n" 481 "1: " __LL "%0, %1 # test_and_change_bit \n"
484 " xor %2, %0, %3 \n" 482 " xor %2, %0, %3 \n"
485 " " __SC "\t%2, %1 \n" 483 " " __SC "\t%2, %1 \n"
diff --git a/include/asm-mips/system.h b/include/asm-mips/system.h
index b9c24bbad9e2..30c7ec1675ca 100644
--- a/include/asm-mips/system.h
+++ b/include/asm-mips/system.h
@@ -176,7 +176,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
176 unsigned long dummy; 176 unsigned long dummy;
177 177
178 __asm__ __volatile__( 178 __asm__ __volatile__(
179 " .set mips2 \n" 179 " .set mips3 \n"
180 "1: ll %0, %3 # xchg_u32 \n" 180 "1: ll %0, %3 # xchg_u32 \n"
181 " move %2, %z4 \n" 181 " move %2, %z4 \n"
182 " sc %2, %1 \n" 182 " sc %2, %1 \n"
@@ -193,7 +193,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
193 unsigned long dummy; 193 unsigned long dummy;
194 194
195 __asm__ __volatile__( 195 __asm__ __volatile__(
196 " .set mips2 \n" 196 " .set mips3 \n"
197 "1: ll %0, %3 # xchg_u32 \n" 197 "1: ll %0, %3 # xchg_u32 \n"
198 " move %2, %z4 \n" 198 " move %2, %z4 \n"
199 " sc %2, %1 \n" 199 " sc %2, %1 \n"
@@ -301,7 +301,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
301 __asm__ __volatile__( 301 __asm__ __volatile__(
302 " .set push \n" 302 " .set push \n"
303 " .set noat \n" 303 " .set noat \n"
304 " .set mips2 \n" 304 " .set mips3 \n"
305 "1: ll %0, %2 # __cmpxchg_u32 \n" 305 "1: ll %0, %2 # __cmpxchg_u32 \n"
306 " bne %0, %z3, 2f \n" 306 " bne %0, %z3, 2f \n"
307 " move $1, %z4 \n" 307 " move $1, %z4 \n"
@@ -320,7 +320,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
320 __asm__ __volatile__( 320 __asm__ __volatile__(
321 " .set push \n" 321 " .set push \n"
322 " .set noat \n" 322 " .set noat \n"
323 " .set mips2 \n" 323 " .set mips3 \n"
324 "1: ll %0, %2 # __cmpxchg_u32 \n" 324 "1: ll %0, %2 # __cmpxchg_u32 \n"
325 " bne %0, %z3, 2f \n" 325 " bne %0, %z3, 2f \n"
326 " move $1, %z4 \n" 326 " move $1, %z4 \n"
@@ -376,7 +376,7 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
376 __asm__ __volatile__( 376 __asm__ __volatile__(
377 " .set push \n" 377 " .set push \n"
378 " .set noat \n" 378 " .set noat \n"
379 " .set mips2 \n" 379 " .set mips3 \n"
380 "1: lld %0, %2 # __cmpxchg_u64 \n" 380 "1: lld %0, %2 # __cmpxchg_u64 \n"
381 " bne %0, %z3, 2f \n" 381 " bne %0, %z3, 2f \n"
382 " move $1, %z4 \n" 382 " move $1, %z4 \n"