aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips/system.h
diff options
context:
space:
mode:
authorMaciej W. Rozycki <macro@linux-mips.org>2005-06-14 13:35:03 -0400
committerRalf Baechle <ralf@linux-mips.org>2005-10-29 14:31:22 -0400
commitaac8aa7717a23a9bf8740dbfb59755b1d62f04bf (patch)
treecae373db64607dafc496827c0d2f3b67b91d880f /include/asm-mips/system.h
parentfded2e508a1d3c26ab477ab3b98f13274d4359ba (diff)
Enable a suitable ISA for the assembler around ll/sc so that code
builds even for processors that don't support the instructions. Plus minor formatting fixes. Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'include/asm-mips/system.h')
-rw-r--r--include/asm-mips/system.h24
1 files changed, 20 insertions, 4 deletions
diff --git a/include/asm-mips/system.h b/include/asm-mips/system.h
index cd3a6bca7abd..ec29c9349e07 100644
--- a/include/asm-mips/system.h
+++ b/include/asm-mips/system.h
@@ -176,6 +176,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
176 unsigned long dummy; 176 unsigned long dummy;
177 177
178 __asm__ __volatile__( 178 __asm__ __volatile__(
179 " .set mips2 \n"
179 "1: ll %0, %3 # xchg_u32 \n" 180 "1: ll %0, %3 # xchg_u32 \n"
180 " move %2, %z4 \n" 181 " move %2, %z4 \n"
181 " sc %2, %1 \n" 182 " sc %2, %1 \n"
@@ -184,6 +185,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
184#ifdef CONFIG_SMP 185#ifdef CONFIG_SMP
185 " sync \n" 186 " sync \n"
186#endif 187#endif
188 " .set mips0 \n"
187 : "=&r" (retval), "=m" (*m), "=&r" (dummy) 189 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
188 : "R" (*m), "Jr" (val) 190 : "R" (*m), "Jr" (val)
189 : "memory"); 191 : "memory");
@@ -191,6 +193,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
191 unsigned long dummy; 193 unsigned long dummy;
192 194
193 __asm__ __volatile__( 195 __asm__ __volatile__(
196 " .set mips2 \n"
194 "1: ll %0, %3 # xchg_u32 \n" 197 "1: ll %0, %3 # xchg_u32 \n"
195 " move %2, %z4 \n" 198 " move %2, %z4 \n"
196 " sc %2, %1 \n" 199 " sc %2, %1 \n"
@@ -198,6 +201,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
198#ifdef CONFIG_SMP 201#ifdef CONFIG_SMP
199 " sync \n" 202 " sync \n"
200#endif 203#endif
204 " .set mips0 \n"
201 : "=&r" (retval), "=m" (*m), "=&r" (dummy) 205 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
202 : "R" (*m), "Jr" (val) 206 : "R" (*m), "Jr" (val)
203 : "memory"); 207 : "memory");
@@ -222,6 +226,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
222 unsigned long dummy; 226 unsigned long dummy;
223 227
224 __asm__ __volatile__( 228 __asm__ __volatile__(
229 " .set mips3 \n"
225 "1: lld %0, %3 # xchg_u64 \n" 230 "1: lld %0, %3 # xchg_u64 \n"
226 " move %2, %z4 \n" 231 " move %2, %z4 \n"
227 " scd %2, %1 \n" 232 " scd %2, %1 \n"
@@ -230,6 +235,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
230#ifdef CONFIG_SMP 235#ifdef CONFIG_SMP
231 " sync \n" 236 " sync \n"
232#endif 237#endif
238 " .set mips0 \n"
233 : "=&r" (retval), "=m" (*m), "=&r" (dummy) 239 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
234 : "R" (*m), "Jr" (val) 240 : "R" (*m), "Jr" (val)
235 : "memory"); 241 : "memory");
@@ -237,6 +243,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
237 unsigned long dummy; 243 unsigned long dummy;
238 244
239 __asm__ __volatile__( 245 __asm__ __volatile__(
246 " .set mips3 \n"
240 "1: lld %0, %3 # xchg_u64 \n" 247 "1: lld %0, %3 # xchg_u64 \n"
241 " move %2, %z4 \n" 248 " move %2, %z4 \n"
242 " scd %2, %1 \n" 249 " scd %2, %1 \n"
@@ -244,6 +251,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
244#ifdef CONFIG_SMP 251#ifdef CONFIG_SMP
245 " sync \n" 252 " sync \n"
246#endif 253#endif
254 " .set mips0 \n"
247 : "=&r" (retval), "=m" (*m), "=&r" (dummy) 255 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
248 : "R" (*m), "Jr" (val) 256 : "R" (*m), "Jr" (val)
249 : "memory"); 257 : "memory");
@@ -291,7 +299,9 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
291 299
292 if (cpu_has_llsc && R10000_LLSC_WAR) { 300 if (cpu_has_llsc && R10000_LLSC_WAR) {
293 __asm__ __volatile__( 301 __asm__ __volatile__(
302 " .set push \n"
294 " .set noat \n" 303 " .set noat \n"
304 " .set mips2 \n"
295 "1: ll %0, %2 # __cmpxchg_u32 \n" 305 "1: ll %0, %2 # __cmpxchg_u32 \n"
296 " bne %0, %z3, 2f \n" 306 " bne %0, %z3, 2f \n"
297 " move $1, %z4 \n" 307 " move $1, %z4 \n"
@@ -302,13 +312,15 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
302 " sync \n" 312 " sync \n"
303#endif 313#endif
304 "2: \n" 314 "2: \n"
305 " .set at \n" 315 " .set pop \n"
306 : "=&r" (retval), "=m" (*m) 316 : "=&r" (retval), "=m" (*m)
307 : "R" (*m), "Jr" (old), "Jr" (new) 317 : "R" (*m), "Jr" (old), "Jr" (new)
308 : "memory"); 318 : "memory");
309 } else if (cpu_has_llsc) { 319 } else if (cpu_has_llsc) {
310 __asm__ __volatile__( 320 __asm__ __volatile__(
321 " .set push \n"
311 " .set noat \n" 322 " .set noat \n"
323 " .set mips2 \n"
312 "1: ll %0, %2 # __cmpxchg_u32 \n" 324 "1: ll %0, %2 # __cmpxchg_u32 \n"
313 " bne %0, %z3, 2f \n" 325 " bne %0, %z3, 2f \n"
314 " move $1, %z4 \n" 326 " move $1, %z4 \n"
@@ -318,7 +330,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
318 " sync \n" 330 " sync \n"
319#endif 331#endif
320 "2: \n" 332 "2: \n"
321 " .set at \n" 333 " .set pop \n"
322 : "=&r" (retval), "=m" (*m) 334 : "=&r" (retval), "=m" (*m)
323 : "R" (*m), "Jr" (old), "Jr" (new) 335 : "R" (*m), "Jr" (old), "Jr" (new)
324 : "memory"); 336 : "memory");
@@ -343,7 +355,9 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
343 355
344 if (cpu_has_llsc) { 356 if (cpu_has_llsc) {
345 __asm__ __volatile__( 357 __asm__ __volatile__(
358 " .set push \n"
346 " .set noat \n" 359 " .set noat \n"
360 " .set mips3 \n"
347 "1: lld %0, %2 # __cmpxchg_u64 \n" 361 "1: lld %0, %2 # __cmpxchg_u64 \n"
348 " bne %0, %z3, 2f \n" 362 " bne %0, %z3, 2f \n"
349 " move $1, %z4 \n" 363 " move $1, %z4 \n"
@@ -354,13 +368,15 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
354 " sync \n" 368 " sync \n"
355#endif 369#endif
356 "2: \n" 370 "2: \n"
357 " .set at \n" 371 " .set pop \n"
358 : "=&r" (retval), "=m" (*m) 372 : "=&r" (retval), "=m" (*m)
359 : "R" (*m), "Jr" (old), "Jr" (new) 373 : "R" (*m), "Jr" (old), "Jr" (new)
360 : "memory"); 374 : "memory");
361 } else if (cpu_has_llsc) { 375 } else if (cpu_has_llsc) {
362 __asm__ __volatile__( 376 __asm__ __volatile__(
377 " .set push \n"
363 " .set noat \n" 378 " .set noat \n"
379 " .set mips2 \n"
364 "1: lld %0, %2 # __cmpxchg_u64 \n" 380 "1: lld %0, %2 # __cmpxchg_u64 \n"
365 " bne %0, %z3, 2f \n" 381 " bne %0, %z3, 2f \n"
366 " move $1, %z4 \n" 382 " move $1, %z4 \n"
@@ -370,7 +386,7 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
370 " sync \n" 386 " sync \n"
371#endif 387#endif
372 "2: \n" 388 "2: \n"
373 " .set at \n" 389 " .set pop \n"
374 : "=&r" (retval), "=m" (*m) 390 : "=&r" (retval), "=m" (*m)
375 : "R" (*m), "Jr" (old), "Jr" (new) 391 : "R" (*m), "Jr" (old), "Jr" (new)
376 : "memory"); 392 : "memory");