diff options
author | Markos Chandras <markos.chandras@imgtec.com> | 2015-01-26 07:44:11 -0500 |
---|---|---|
committer | Markos Chandras <markos.chandras@imgtec.com> | 2015-02-17 10:37:21 -0500 |
commit | 94bfb75ace81f7b09860400ba02ed1607a2e0e27 (patch) | |
tree | 43c515ed18ed72bd10e6269be1545e41bd2a5335 /arch | |
parent | a7e07b1ae550303c6611f4d3b054a4f9c2bc8a9e (diff) |
MIPS: asm: Rename GCC_OFF12_ASM to GCC_OFF_SMALL_ASM
The GCC_OFF12_ASM macro is used for 12-bit immediate constrains
but we will also use it for 9-bit constrains on MIPS R6 so we
rename it to something more appropriate.
Cc: Maciej W. Rozycki <macro@linux-mips.org>
Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/mips/include/asm/atomic.h | 30 | ||||
-rw-r--r-- | arch/mips/include/asm/bitops.h | 34 | ||||
-rw-r--r-- | arch/mips/include/asm/cmpxchg.h | 24 | ||||
-rw-r--r-- | arch/mips/include/asm/compiler.h | 4 | ||||
-rw-r--r-- | arch/mips/include/asm/edac.h | 4 | ||||
-rw-r--r-- | arch/mips/include/asm/futex.h | 16 | ||||
-rw-r--r-- | arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h | 24 | ||||
-rw-r--r-- | arch/mips/include/asm/octeon/cvmx-cmd-queue.h | 2 | ||||
-rw-r--r-- | arch/mips/include/asm/spinlock.h | 48 |
9 files changed, 93 insertions, 93 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index 857da84cfc92..3a44c2f17e53 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h | |||
@@ -54,7 +54,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \ | |||
54 | " sc %0, %1 \n" \ | 54 | " sc %0, %1 \n" \ |
55 | " beqzl %0, 1b \n" \ | 55 | " beqzl %0, 1b \n" \ |
56 | " .set mips0 \n" \ | 56 | " .set mips0 \n" \ |
57 | : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ | 57 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
58 | : "Ir" (i)); \ | 58 | : "Ir" (i)); \ |
59 | } else if (kernel_uses_llsc) { \ | 59 | } else if (kernel_uses_llsc) { \ |
60 | int temp; \ | 60 | int temp; \ |
@@ -66,7 +66,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \ | |||
66 | " " #asm_op " %0, %2 \n" \ | 66 | " " #asm_op " %0, %2 \n" \ |
67 | " sc %0, %1 \n" \ | 67 | " sc %0, %1 \n" \ |
68 | " .set mips0 \n" \ | 68 | " .set mips0 \n" \ |
69 | : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ | 69 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
70 | : "Ir" (i)); \ | 70 | : "Ir" (i)); \ |
71 | } while (unlikely(!temp)); \ | 71 | } while (unlikely(!temp)); \ |
72 | } else { \ | 72 | } else { \ |
@@ -97,7 +97,7 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \ | |||
97 | " " #asm_op " %0, %1, %3 \n" \ | 97 | " " #asm_op " %0, %1, %3 \n" \ |
98 | " .set mips0 \n" \ | 98 | " .set mips0 \n" \ |
99 | : "=&r" (result), "=&r" (temp), \ | 99 | : "=&r" (result), "=&r" (temp), \ |
100 | "+" GCC_OFF12_ASM() (v->counter) \ | 100 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
101 | : "Ir" (i)); \ | 101 | : "Ir" (i)); \ |
102 | } else if (kernel_uses_llsc) { \ | 102 | } else if (kernel_uses_llsc) { \ |
103 | int temp; \ | 103 | int temp; \ |
@@ -110,7 +110,7 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \ | |||
110 | " sc %0, %2 \n" \ | 110 | " sc %0, %2 \n" \ |
111 | " .set mips0 \n" \ | 111 | " .set mips0 \n" \ |
112 | : "=&r" (result), "=&r" (temp), \ | 112 | : "=&r" (result), "=&r" (temp), \ |
113 | "+" GCC_OFF12_ASM() (v->counter) \ | 113 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
114 | : "Ir" (i)); \ | 114 | : "Ir" (i)); \ |
115 | } while (unlikely(!result)); \ | 115 | } while (unlikely(!result)); \ |
116 | \ | 116 | \ |
@@ -171,8 +171,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
171 | "1: \n" | 171 | "1: \n" |
172 | " .set mips0 \n" | 172 | " .set mips0 \n" |
173 | : "=&r" (result), "=&r" (temp), | 173 | : "=&r" (result), "=&r" (temp), |
174 | "+" GCC_OFF12_ASM() (v->counter) | 174 | "+" GCC_OFF_SMALL_ASM() (v->counter) |
175 | : "Ir" (i), GCC_OFF12_ASM() (v->counter) | 175 | : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) |
176 | : "memory"); | 176 | : "memory"); |
177 | } else if (kernel_uses_llsc) { | 177 | } else if (kernel_uses_llsc) { |
178 | int temp; | 178 | int temp; |
@@ -190,7 +190,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
190 | "1: \n" | 190 | "1: \n" |
191 | " .set mips0 \n" | 191 | " .set mips0 \n" |
192 | : "=&r" (result), "=&r" (temp), | 192 | : "=&r" (result), "=&r" (temp), |
193 | "+" GCC_OFF12_ASM() (v->counter) | 193 | "+" GCC_OFF_SMALL_ASM() (v->counter) |
194 | : "Ir" (i)); | 194 | : "Ir" (i)); |
195 | } else { | 195 | } else { |
196 | unsigned long flags; | 196 | unsigned long flags; |
@@ -333,7 +333,7 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \ | |||
333 | " scd %0, %1 \n" \ | 333 | " scd %0, %1 \n" \ |
334 | " beqzl %0, 1b \n" \ | 334 | " beqzl %0, 1b \n" \ |
335 | " .set mips0 \n" \ | 335 | " .set mips0 \n" \ |
336 | : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ | 336 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
337 | : "Ir" (i)); \ | 337 | : "Ir" (i)); \ |
338 | } else if (kernel_uses_llsc) { \ | 338 | } else if (kernel_uses_llsc) { \ |
339 | long temp; \ | 339 | long temp; \ |
@@ -345,7 +345,7 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \ | |||
345 | " " #asm_op " %0, %2 \n" \ | 345 | " " #asm_op " %0, %2 \n" \ |
346 | " scd %0, %1 \n" \ | 346 | " scd %0, %1 \n" \ |
347 | " .set mips0 \n" \ | 347 | " .set mips0 \n" \ |
348 | : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ | 348 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
349 | : "Ir" (i)); \ | 349 | : "Ir" (i)); \ |
350 | } while (unlikely(!temp)); \ | 350 | } while (unlikely(!temp)); \ |
351 | } else { \ | 351 | } else { \ |
@@ -376,7 +376,7 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ | |||
376 | " " #asm_op " %0, %1, %3 \n" \ | 376 | " " #asm_op " %0, %1, %3 \n" \ |
377 | " .set mips0 \n" \ | 377 | " .set mips0 \n" \ |
378 | : "=&r" (result), "=&r" (temp), \ | 378 | : "=&r" (result), "=&r" (temp), \ |
379 | "+" GCC_OFF12_ASM() (v->counter) \ | 379 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
380 | : "Ir" (i)); \ | 380 | : "Ir" (i)); \ |
381 | } else if (kernel_uses_llsc) { \ | 381 | } else if (kernel_uses_llsc) { \ |
382 | long temp; \ | 382 | long temp; \ |
@@ -389,8 +389,8 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ | |||
389 | " scd %0, %2 \n" \ | 389 | " scd %0, %2 \n" \ |
390 | " .set mips0 \n" \ | 390 | " .set mips0 \n" \ |
391 | : "=&r" (result), "=&r" (temp), \ | 391 | : "=&r" (result), "=&r" (temp), \ |
392 | "=" GCC_OFF12_ASM() (v->counter) \ | 392 | "=" GCC_OFF_SMALL_ASM() (v->counter) \ |
393 | : "Ir" (i), GCC_OFF12_ASM() (v->counter) \ | 393 | : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \ |
394 | : "memory"); \ | 394 | : "memory"); \ |
395 | } while (unlikely(!result)); \ | 395 | } while (unlikely(!result)); \ |
396 | \ | 396 | \ |
@@ -452,8 +452,8 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
452 | "1: \n" | 452 | "1: \n" |
453 | " .set mips0 \n" | 453 | " .set mips0 \n" |
454 | : "=&r" (result), "=&r" (temp), | 454 | : "=&r" (result), "=&r" (temp), |
455 | "=" GCC_OFF12_ASM() (v->counter) | 455 | "=" GCC_OFF_SMALL_ASM() (v->counter) |
456 | : "Ir" (i), GCC_OFF12_ASM() (v->counter) | 456 | : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) |
457 | : "memory"); | 457 | : "memory"); |
458 | } else if (kernel_uses_llsc) { | 458 | } else if (kernel_uses_llsc) { |
459 | long temp; | 459 | long temp; |
@@ -471,7 +471,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
471 | "1: \n" | 471 | "1: \n" |
472 | " .set mips0 \n" | 472 | " .set mips0 \n" |
473 | : "=&r" (result), "=&r" (temp), | 473 | : "=&r" (result), "=&r" (temp), |
474 | "+" GCC_OFF12_ASM() (v->counter) | 474 | "+" GCC_OFF_SMALL_ASM() (v->counter) |
475 | : "Ir" (i)); | 475 | : "Ir" (i)); |
476 | } else { | 476 | } else { |
477 | unsigned long flags; | 477 | unsigned long flags; |
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h index 6663bcca9d0c..6cc1f539c79a 100644 --- a/arch/mips/include/asm/bitops.h +++ b/arch/mips/include/asm/bitops.h | |||
@@ -79,8 +79,8 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |||
79 | " " __SC "%0, %1 \n" | 79 | " " __SC "%0, %1 \n" |
80 | " beqzl %0, 1b \n" | 80 | " beqzl %0, 1b \n" |
81 | " .set mips0 \n" | 81 | " .set mips0 \n" |
82 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*m) | 82 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m) |
83 | : "ir" (1UL << bit), GCC_OFF12_ASM() (*m)); | 83 | : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m)); |
84 | #ifdef CONFIG_CPU_MIPSR2 | 84 | #ifdef CONFIG_CPU_MIPSR2 |
85 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { | 85 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { |
86 | do { | 86 | do { |
@@ -88,7 +88,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |||
88 | " " __LL "%0, %1 # set_bit \n" | 88 | " " __LL "%0, %1 # set_bit \n" |
89 | " " __INS "%0, %3, %2, 1 \n" | 89 | " " __INS "%0, %3, %2, 1 \n" |
90 | " " __SC "%0, %1 \n" | 90 | " " __SC "%0, %1 \n" |
91 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 91 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
92 | : "ir" (bit), "r" (~0)); | 92 | : "ir" (bit), "r" (~0)); |
93 | } while (unlikely(!temp)); | 93 | } while (unlikely(!temp)); |
94 | #endif /* CONFIG_CPU_MIPSR2 */ | 94 | #endif /* CONFIG_CPU_MIPSR2 */ |
@@ -100,7 +100,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |||
100 | " or %0, %2 \n" | 100 | " or %0, %2 \n" |
101 | " " __SC "%0, %1 \n" | 101 | " " __SC "%0, %1 \n" |
102 | " .set mips0 \n" | 102 | " .set mips0 \n" |
103 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 103 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
104 | : "ir" (1UL << bit)); | 104 | : "ir" (1UL << bit)); |
105 | } while (unlikely(!temp)); | 105 | } while (unlikely(!temp)); |
106 | } else | 106 | } else |
@@ -131,7 +131,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |||
131 | " " __SC "%0, %1 \n" | 131 | " " __SC "%0, %1 \n" |
132 | " beqzl %0, 1b \n" | 132 | " beqzl %0, 1b \n" |
133 | " .set mips0 \n" | 133 | " .set mips0 \n" |
134 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 134 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
135 | : "ir" (~(1UL << bit))); | 135 | : "ir" (~(1UL << bit))); |
136 | #ifdef CONFIG_CPU_MIPSR2 | 136 | #ifdef CONFIG_CPU_MIPSR2 |
137 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { | 137 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { |
@@ -140,7 +140,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |||
140 | " " __LL "%0, %1 # clear_bit \n" | 140 | " " __LL "%0, %1 # clear_bit \n" |
141 | " " __INS "%0, $0, %2, 1 \n" | 141 | " " __INS "%0, $0, %2, 1 \n" |
142 | " " __SC "%0, %1 \n" | 142 | " " __SC "%0, %1 \n" |
143 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 143 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
144 | : "ir" (bit)); | 144 | : "ir" (bit)); |
145 | } while (unlikely(!temp)); | 145 | } while (unlikely(!temp)); |
146 | #endif /* CONFIG_CPU_MIPSR2 */ | 146 | #endif /* CONFIG_CPU_MIPSR2 */ |
@@ -152,7 +152,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |||
152 | " and %0, %2 \n" | 152 | " and %0, %2 \n" |
153 | " " __SC "%0, %1 \n" | 153 | " " __SC "%0, %1 \n" |
154 | " .set mips0 \n" | 154 | " .set mips0 \n" |
155 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 155 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
156 | : "ir" (~(1UL << bit))); | 156 | : "ir" (~(1UL << bit))); |
157 | } while (unlikely(!temp)); | 157 | } while (unlikely(!temp)); |
158 | } else | 158 | } else |
@@ -197,7 +197,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |||
197 | " " __SC "%0, %1 \n" | 197 | " " __SC "%0, %1 \n" |
198 | " beqzl %0, 1b \n" | 198 | " beqzl %0, 1b \n" |
199 | " .set mips0 \n" | 199 | " .set mips0 \n" |
200 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 200 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
201 | : "ir" (1UL << bit)); | 201 | : "ir" (1UL << bit)); |
202 | } else if (kernel_uses_llsc) { | 202 | } else if (kernel_uses_llsc) { |
203 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 203 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
@@ -210,7 +210,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |||
210 | " xor %0, %2 \n" | 210 | " xor %0, %2 \n" |
211 | " " __SC "%0, %1 \n" | 211 | " " __SC "%0, %1 \n" |
212 | " .set mips0 \n" | 212 | " .set mips0 \n" |
213 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m) | 213 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
214 | : "ir" (1UL << bit)); | 214 | : "ir" (1UL << bit)); |
215 | } while (unlikely(!temp)); | 215 | } while (unlikely(!temp)); |
216 | } else | 216 | } else |
@@ -245,7 +245,7 @@ static inline int test_and_set_bit(unsigned long nr, | |||
245 | " beqzl %2, 1b \n" | 245 | " beqzl %2, 1b \n" |
246 | " and %2, %0, %3 \n" | 246 | " and %2, %0, %3 \n" |
247 | " .set mips0 \n" | 247 | " .set mips0 \n" |
248 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 248 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
249 | : "r" (1UL << bit) | 249 | : "r" (1UL << bit) |
250 | : "memory"); | 250 | : "memory"); |
251 | } else if (kernel_uses_llsc) { | 251 | } else if (kernel_uses_llsc) { |
@@ -259,7 +259,7 @@ static inline int test_and_set_bit(unsigned long nr, | |||
259 | " or %2, %0, %3 \n" | 259 | " or %2, %0, %3 \n" |
260 | " " __SC "%2, %1 \n" | 260 | " " __SC "%2, %1 \n" |
261 | " .set mips0 \n" | 261 | " .set mips0 \n" |
262 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 262 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
263 | : "r" (1UL << bit) | 263 | : "r" (1UL << bit) |
264 | : "memory"); | 264 | : "memory"); |
265 | } while (unlikely(!res)); | 265 | } while (unlikely(!res)); |
@@ -313,7 +313,7 @@ static inline int test_and_set_bit_lock(unsigned long nr, | |||
313 | " or %2, %0, %3 \n" | 313 | " or %2, %0, %3 \n" |
314 | " " __SC "%2, %1 \n" | 314 | " " __SC "%2, %1 \n" |
315 | " .set mips0 \n" | 315 | " .set mips0 \n" |
316 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 316 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
317 | : "r" (1UL << bit) | 317 | : "r" (1UL << bit) |
318 | : "memory"); | 318 | : "memory"); |
319 | } while (unlikely(!res)); | 319 | } while (unlikely(!res)); |
@@ -355,7 +355,7 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
355 | " beqzl %2, 1b \n" | 355 | " beqzl %2, 1b \n" |
356 | " and %2, %0, %3 \n" | 356 | " and %2, %0, %3 \n" |
357 | " .set mips0 \n" | 357 | " .set mips0 \n" |
358 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 358 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
359 | : "r" (1UL << bit) | 359 | : "r" (1UL << bit) |
360 | : "memory"); | 360 | : "memory"); |
361 | #ifdef CONFIG_CPU_MIPSR2 | 361 | #ifdef CONFIG_CPU_MIPSR2 |
@@ -369,7 +369,7 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
369 | " " __EXT "%2, %0, %3, 1 \n" | 369 | " " __EXT "%2, %0, %3, 1 \n" |
370 | " " __INS "%0, $0, %3, 1 \n" | 370 | " " __INS "%0, $0, %3, 1 \n" |
371 | " " __SC "%0, %1 \n" | 371 | " " __SC "%0, %1 \n" |
372 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 372 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
373 | : "ir" (bit) | 373 | : "ir" (bit) |
374 | : "memory"); | 374 | : "memory"); |
375 | } while (unlikely(!temp)); | 375 | } while (unlikely(!temp)); |
@@ -386,7 +386,7 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
386 | " xor %2, %3 \n" | 386 | " xor %2, %3 \n" |
387 | " " __SC "%2, %1 \n" | 387 | " " __SC "%2, %1 \n" |
388 | " .set mips0 \n" | 388 | " .set mips0 \n" |
389 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 389 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
390 | : "r" (1UL << bit) | 390 | : "r" (1UL << bit) |
391 | : "memory"); | 391 | : "memory"); |
392 | } while (unlikely(!res)); | 392 | } while (unlikely(!res)); |
@@ -428,7 +428,7 @@ static inline int test_and_change_bit(unsigned long nr, | |||
428 | " beqzl %2, 1b \n" | 428 | " beqzl %2, 1b \n" |
429 | " and %2, %0, %3 \n" | 429 | " and %2, %0, %3 \n" |
430 | " .set mips0 \n" | 430 | " .set mips0 \n" |
431 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 431 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
432 | : "r" (1UL << bit) | 432 | : "r" (1UL << bit) |
433 | : "memory"); | 433 | : "memory"); |
434 | } else if (kernel_uses_llsc) { | 434 | } else if (kernel_uses_llsc) { |
@@ -442,7 +442,7 @@ static inline int test_and_change_bit(unsigned long nr, | |||
442 | " xor %2, %0, %3 \n" | 442 | " xor %2, %0, %3 \n" |
443 | " " __SC "\t%2, %1 \n" | 443 | " " __SC "\t%2, %1 \n" |
444 | " .set mips0 \n" | 444 | " .set mips0 \n" |
445 | : "=&r" (temp), "+" GCC_OFF12_ASM() (*m), "=&r" (res) | 445 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
446 | : "r" (1UL << bit) | 446 | : "r" (1UL << bit) |
447 | : "memory"); | 447 | : "memory"); |
448 | } while (unlikely(!res)); | 448 | } while (unlikely(!res)); |
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h index 28b1edf19501..68baa0cf521a 100644 --- a/arch/mips/include/asm/cmpxchg.h +++ b/arch/mips/include/asm/cmpxchg.h | |||
@@ -31,8 +31,8 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) | |||
31 | " sc %2, %1 \n" | 31 | " sc %2, %1 \n" |
32 | " beqzl %2, 1b \n" | 32 | " beqzl %2, 1b \n" |
33 | " .set mips0 \n" | 33 | " .set mips0 \n" |
34 | : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy) | 34 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy) |
35 | : GCC_OFF12_ASM() (*m), "Jr" (val) | 35 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) |
36 | : "memory"); | 36 | : "memory"); |
37 | } else if (kernel_uses_llsc) { | 37 | } else if (kernel_uses_llsc) { |
38 | unsigned long dummy; | 38 | unsigned long dummy; |
@@ -46,9 +46,9 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) | |||
46 | " .set arch=r4000 \n" | 46 | " .set arch=r4000 \n" |
47 | " sc %2, %1 \n" | 47 | " sc %2, %1 \n" |
48 | " .set mips0 \n" | 48 | " .set mips0 \n" |
49 | : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), | 49 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), |
50 | "=&r" (dummy) | 50 | "=&r" (dummy) |
51 | : GCC_OFF12_ASM() (*m), "Jr" (val) | 51 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) |
52 | : "memory"); | 52 | : "memory"); |
53 | } while (unlikely(!dummy)); | 53 | } while (unlikely(!dummy)); |
54 | } else { | 54 | } else { |
@@ -82,8 +82,8 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) | |||
82 | " scd %2, %1 \n" | 82 | " scd %2, %1 \n" |
83 | " beqzl %2, 1b \n" | 83 | " beqzl %2, 1b \n" |
84 | " .set mips0 \n" | 84 | " .set mips0 \n" |
85 | : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy) | 85 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy) |
86 | : GCC_OFF12_ASM() (*m), "Jr" (val) | 86 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) |
87 | : "memory"); | 87 | : "memory"); |
88 | } else if (kernel_uses_llsc) { | 88 | } else if (kernel_uses_llsc) { |
89 | unsigned long dummy; | 89 | unsigned long dummy; |
@@ -95,9 +95,9 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) | |||
95 | " move %2, %z4 \n" | 95 | " move %2, %z4 \n" |
96 | " scd %2, %1 \n" | 96 | " scd %2, %1 \n" |
97 | " .set mips0 \n" | 97 | " .set mips0 \n" |
98 | : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), | 98 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), |
99 | "=&r" (dummy) | 99 | "=&r" (dummy) |
100 | : GCC_OFF12_ASM() (*m), "Jr" (val) | 100 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) |
101 | : "memory"); | 101 | : "memory"); |
102 | } while (unlikely(!dummy)); | 102 | } while (unlikely(!dummy)); |
103 | } else { | 103 | } else { |
@@ -158,8 +158,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz | |||
158 | " beqzl $1, 1b \n" \ | 158 | " beqzl $1, 1b \n" \ |
159 | "2: \n" \ | 159 | "2: \n" \ |
160 | " .set pop \n" \ | 160 | " .set pop \n" \ |
161 | : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m) \ | 161 | : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \ |
162 | : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new) \ | 162 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \ |
163 | : "memory"); \ | 163 | : "memory"); \ |
164 | } else if (kernel_uses_llsc) { \ | 164 | } else if (kernel_uses_llsc) { \ |
165 | __asm__ __volatile__( \ | 165 | __asm__ __volatile__( \ |
@@ -175,8 +175,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz | |||
175 | " beqz $1, 1b \n" \ | 175 | " beqz $1, 1b \n" \ |
176 | " .set pop \n" \ | 176 | " .set pop \n" \ |
177 | "2: \n" \ | 177 | "2: \n" \ |
178 | : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m) \ | 178 | : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \ |
179 | : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new) \ | 179 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \ |
180 | : "memory"); \ | 180 | : "memory"); \ |
181 | } else { \ | 181 | } else { \ |
182 | unsigned long __flags; \ | 182 | unsigned long __flags; \ |
diff --git a/arch/mips/include/asm/compiler.h b/arch/mips/include/asm/compiler.h index 10b642f55eb7..34ad65a7801f 100644 --- a/arch/mips/include/asm/compiler.h +++ b/arch/mips/include/asm/compiler.h | |||
@@ -17,9 +17,9 @@ | |||
17 | #endif | 17 | #endif |
18 | 18 | ||
19 | #ifndef CONFIG_CPU_MICROMIPS | 19 | #ifndef CONFIG_CPU_MICROMIPS |
20 | #define GCC_OFF12_ASM() "R" | 20 | #define GCC_OFF_SMALL_ASM() "R" |
21 | #elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9) | 21 | #elif __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9) |
22 | #define GCC_OFF12_ASM() "ZC" | 22 | #define GCC_OFF_SMALL_ASM() "ZC" |
23 | #else | 23 | #else |
24 | #error "microMIPS compilation unsupported with GCC older than 4.9" | 24 | #error "microMIPS compilation unsupported with GCC older than 4.9" |
25 | #endif | 25 | #endif |
diff --git a/arch/mips/include/asm/edac.h b/arch/mips/include/asm/edac.h index ae6fedcb0060..94105d3f58f4 100644 --- a/arch/mips/include/asm/edac.h +++ b/arch/mips/include/asm/edac.h | |||
@@ -26,8 +26,8 @@ static inline void atomic_scrub(void *va, u32 size) | |||
26 | " sc %0, %1 \n" | 26 | " sc %0, %1 \n" |
27 | " beqz %0, 1b \n" | 27 | " beqz %0, 1b \n" |
28 | " .set mips0 \n" | 28 | " .set mips0 \n" |
29 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*virt_addr) | 29 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*virt_addr) |
30 | : GCC_OFF12_ASM() (*virt_addr)); | 30 | : GCC_OFF_SMALL_ASM() (*virt_addr)); |
31 | 31 | ||
32 | virt_addr++; | 32 | virt_addr++; |
33 | } | 33 | } |
diff --git a/arch/mips/include/asm/futex.h b/arch/mips/include/asm/futex.h index ef9987a61d88..f666c0608c11 100644 --- a/arch/mips/include/asm/futex.h +++ b/arch/mips/include/asm/futex.h | |||
@@ -45,8 +45,8 @@ | |||
45 | " "__UA_ADDR "\t2b, 4b \n" \ | 45 | " "__UA_ADDR "\t2b, 4b \n" \ |
46 | " .previous \n" \ | 46 | " .previous \n" \ |
47 | : "=r" (ret), "=&r" (oldval), \ | 47 | : "=r" (ret), "=&r" (oldval), \ |
48 | "=" GCC_OFF12_ASM() (*uaddr) \ | 48 | "=" GCC_OFF_SMALL_ASM() (*uaddr) \ |
49 | : "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg), \ | 49 | : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \ |
50 | "i" (-EFAULT) \ | 50 | "i" (-EFAULT) \ |
51 | : "memory"); \ | 51 | : "memory"); \ |
52 | } else if (cpu_has_llsc) { \ | 52 | } else if (cpu_has_llsc) { \ |
@@ -74,8 +74,8 @@ | |||
74 | " "__UA_ADDR "\t2b, 4b \n" \ | 74 | " "__UA_ADDR "\t2b, 4b \n" \ |
75 | " .previous \n" \ | 75 | " .previous \n" \ |
76 | : "=r" (ret), "=&r" (oldval), \ | 76 | : "=r" (ret), "=&r" (oldval), \ |
77 | "=" GCC_OFF12_ASM() (*uaddr) \ | 77 | "=" GCC_OFF_SMALL_ASM() (*uaddr) \ |
78 | : "0" (0), GCC_OFF12_ASM() (*uaddr), "Jr" (oparg), \ | 78 | : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \ |
79 | "i" (-EFAULT) \ | 79 | "i" (-EFAULT) \ |
80 | : "memory"); \ | 80 | : "memory"); \ |
81 | } else \ | 81 | } else \ |
@@ -174,8 +174,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |||
174 | " "__UA_ADDR "\t1b, 4b \n" | 174 | " "__UA_ADDR "\t1b, 4b \n" |
175 | " "__UA_ADDR "\t2b, 4b \n" | 175 | " "__UA_ADDR "\t2b, 4b \n" |
176 | " .previous \n" | 176 | " .previous \n" |
177 | : "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr) | 177 | : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr) |
178 | : GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), | 178 | : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), |
179 | "i" (-EFAULT) | 179 | "i" (-EFAULT) |
180 | : "memory"); | 180 | : "memory"); |
181 | } else if (cpu_has_llsc) { | 181 | } else if (cpu_has_llsc) { |
@@ -203,8 +203,8 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |||
203 | " "__UA_ADDR "\t1b, 4b \n" | 203 | " "__UA_ADDR "\t1b, 4b \n" |
204 | " "__UA_ADDR "\t2b, 4b \n" | 204 | " "__UA_ADDR "\t2b, 4b \n" |
205 | " .previous \n" | 205 | " .previous \n" |
206 | : "+r" (ret), "=&r" (val), "=" GCC_OFF12_ASM() (*uaddr) | 206 | : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr) |
207 | : GCC_OFF12_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), | 207 | : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval), |
208 | "i" (-EFAULT) | 208 | "i" (-EFAULT) |
209 | : "memory"); | 209 | : "memory"); |
210 | } else | 210 | } else |
diff --git a/arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h b/arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h index 2e54b4bff5cf..90dbe43c8d27 100644 --- a/arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h +++ b/arch/mips/include/asm/mach-pmcs-msp71xx/msp_regops.h | |||
@@ -85,8 +85,8 @@ static inline void set_value_reg32(volatile u32 *const addr, | |||
85 | " "__beqz"%0, 1b \n" | 85 | " "__beqz"%0, 1b \n" |
86 | " nop \n" | 86 | " nop \n" |
87 | " .set pop \n" | 87 | " .set pop \n" |
88 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) | 88 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) |
89 | : "ir" (~mask), "ir" (value), GCC_OFF12_ASM() (*addr)); | 89 | : "ir" (~mask), "ir" (value), GCC_OFF_SMALL_ASM() (*addr)); |
90 | } | 90 | } |
91 | 91 | ||
92 | /* | 92 | /* |
@@ -106,8 +106,8 @@ static inline void set_reg32(volatile u32 *const addr, | |||
106 | " "__beqz"%0, 1b \n" | 106 | " "__beqz"%0, 1b \n" |
107 | " nop \n" | 107 | " nop \n" |
108 | " .set pop \n" | 108 | " .set pop \n" |
109 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) | 109 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) |
110 | : "ir" (mask), GCC_OFF12_ASM() (*addr)); | 110 | : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr)); |
111 | } | 111 | } |
112 | 112 | ||
113 | /* | 113 | /* |
@@ -127,8 +127,8 @@ static inline void clear_reg32(volatile u32 *const addr, | |||
127 | " "__beqz"%0, 1b \n" | 127 | " "__beqz"%0, 1b \n" |
128 | " nop \n" | 128 | " nop \n" |
129 | " .set pop \n" | 129 | " .set pop \n" |
130 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) | 130 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) |
131 | : "ir" (~mask), GCC_OFF12_ASM() (*addr)); | 131 | : "ir" (~mask), GCC_OFF_SMALL_ASM() (*addr)); |
132 | } | 132 | } |
133 | 133 | ||
134 | /* | 134 | /* |
@@ -148,8 +148,8 @@ static inline void toggle_reg32(volatile u32 *const addr, | |||
148 | " "__beqz"%0, 1b \n" | 148 | " "__beqz"%0, 1b \n" |
149 | " nop \n" | 149 | " nop \n" |
150 | " .set pop \n" | 150 | " .set pop \n" |
151 | : "=&r" (temp), "=" GCC_OFF12_ASM() (*addr) | 151 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr) |
152 | : "ir" (mask), GCC_OFF12_ASM() (*addr)); | 152 | : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr)); |
153 | } | 153 | } |
154 | 154 | ||
155 | /* | 155 | /* |
@@ -220,8 +220,8 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr) | |||
220 | " .set arch=r4000 \n" \ | 220 | " .set arch=r4000 \n" \ |
221 | "1: ll %0, %1 #custom_read_reg32 \n" \ | 221 | "1: ll %0, %1 #custom_read_reg32 \n" \ |
222 | " .set pop \n" \ | 222 | " .set pop \n" \ |
223 | : "=r" (tmp), "=" GCC_OFF12_ASM() (*address) \ | 223 | : "=r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address) \ |
224 | : GCC_OFF12_ASM() (*address)) | 224 | : GCC_OFF_SMALL_ASM() (*address)) |
225 | 225 | ||
226 | #define custom_write_reg32(address, tmp) \ | 226 | #define custom_write_reg32(address, tmp) \ |
227 | __asm__ __volatile__( \ | 227 | __asm__ __volatile__( \ |
@@ -231,7 +231,7 @@ static inline u32 blocking_read_reg32(volatile u32 *const addr) | |||
231 | " "__beqz"%0, 1b \n" \ | 231 | " "__beqz"%0, 1b \n" \ |
232 | " nop \n" \ | 232 | " nop \n" \ |
233 | " .set pop \n" \ | 233 | " .set pop \n" \ |
234 | : "=&r" (tmp), "=" GCC_OFF12_ASM() (*address) \ | 234 | : "=&r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address) \ |
235 | : "0" (tmp), GCC_OFF12_ASM() (*address)) | 235 | : "0" (tmp), GCC_OFF_SMALL_ASM() (*address)) |
236 | 236 | ||
237 | #endif /* __ASM_REGOPS_H__ */ | 237 | #endif /* __ASM_REGOPS_H__ */ |
diff --git a/arch/mips/include/asm/octeon/cvmx-cmd-queue.h b/arch/mips/include/asm/octeon/cvmx-cmd-queue.h index 75739c83f07e..8d05d9069823 100644 --- a/arch/mips/include/asm/octeon/cvmx-cmd-queue.h +++ b/arch/mips/include/asm/octeon/cvmx-cmd-queue.h | |||
@@ -275,7 +275,7 @@ static inline void __cvmx_cmd_queue_lock(cvmx_cmd_queue_id_t queue_id, | |||
275 | " lbu %[ticket], %[now_serving]\n" | 275 | " lbu %[ticket], %[now_serving]\n" |
276 | "4:\n" | 276 | "4:\n" |
277 | ".set pop\n" : | 277 | ".set pop\n" : |
278 | [ticket_ptr] "=" GCC_OFF12_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]), | 278 | [ticket_ptr] "=" GCC_OFF_SMALL_ASM()(__cvmx_cmd_queue_state_ptr->ticket[__cvmx_cmd_queue_get_index(queue_id)]), |
279 | [now_serving] "=m"(qptr->now_serving), [ticket] "=r"(tmp), | 279 | [now_serving] "=m"(qptr->now_serving), [ticket] "=r"(tmp), |
280 | [my_ticket] "=r"(my_ticket) | 280 | [my_ticket] "=r"(my_ticket) |
281 | ); | 281 | ); |
diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h index c6d06d383ef9..b5238404c059 100644 --- a/arch/mips/include/asm/spinlock.h +++ b/arch/mips/include/asm/spinlock.h | |||
@@ -89,7 +89,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) | |||
89 | " subu %[ticket], %[ticket], 1 \n" | 89 | " subu %[ticket], %[ticket], 1 \n" |
90 | " .previous \n" | 90 | " .previous \n" |
91 | " .set pop \n" | 91 | " .set pop \n" |
92 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 92 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
93 | [serving_now_ptr] "+m" (lock->h.serving_now), | 93 | [serving_now_ptr] "+m" (lock->h.serving_now), |
94 | [ticket] "=&r" (tmp), | 94 | [ticket] "=&r" (tmp), |
95 | [my_ticket] "=&r" (my_ticket) | 95 | [my_ticket] "=&r" (my_ticket) |
@@ -122,7 +122,7 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) | |||
122 | " subu %[ticket], %[ticket], 1 \n" | 122 | " subu %[ticket], %[ticket], 1 \n" |
123 | " .previous \n" | 123 | " .previous \n" |
124 | " .set pop \n" | 124 | " .set pop \n" |
125 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 125 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
126 | [serving_now_ptr] "+m" (lock->h.serving_now), | 126 | [serving_now_ptr] "+m" (lock->h.serving_now), |
127 | [ticket] "=&r" (tmp), | 127 | [ticket] "=&r" (tmp), |
128 | [my_ticket] "=&r" (my_ticket) | 128 | [my_ticket] "=&r" (my_ticket) |
@@ -164,7 +164,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) | |||
164 | " li %[ticket], 0 \n" | 164 | " li %[ticket], 0 \n" |
165 | " .previous \n" | 165 | " .previous \n" |
166 | " .set pop \n" | 166 | " .set pop \n" |
167 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 167 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
168 | [ticket] "=&r" (tmp), | 168 | [ticket] "=&r" (tmp), |
169 | [my_ticket] "=&r" (tmp2), | 169 | [my_ticket] "=&r" (tmp2), |
170 | [now_serving] "=&r" (tmp3) | 170 | [now_serving] "=&r" (tmp3) |
@@ -188,7 +188,7 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) | |||
188 | " li %[ticket], 0 \n" | 188 | " li %[ticket], 0 \n" |
189 | " .previous \n" | 189 | " .previous \n" |
190 | " .set pop \n" | 190 | " .set pop \n" |
191 | : [ticket_ptr] "+" GCC_OFF12_ASM() (lock->lock), | 191 | : [ticket_ptr] "+" GCC_OFF_SMALL_ASM() (lock->lock), |
192 | [ticket] "=&r" (tmp), | 192 | [ticket] "=&r" (tmp), |
193 | [my_ticket] "=&r" (tmp2), | 193 | [my_ticket] "=&r" (tmp2), |
194 | [now_serving] "=&r" (tmp3) | 194 | [now_serving] "=&r" (tmp3) |
@@ -235,8 +235,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw) | |||
235 | " beqzl %1, 1b \n" | 235 | " beqzl %1, 1b \n" |
236 | " nop \n" | 236 | " nop \n" |
237 | " .set reorder \n" | 237 | " .set reorder \n" |
238 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 238 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
239 | : GCC_OFF12_ASM() (rw->lock) | 239 | : GCC_OFF_SMALL_ASM() (rw->lock) |
240 | : "memory"); | 240 | : "memory"); |
241 | } else { | 241 | } else { |
242 | do { | 242 | do { |
@@ -245,8 +245,8 @@ static inline void arch_read_lock(arch_rwlock_t *rw) | |||
245 | " bltz %1, 1b \n" | 245 | " bltz %1, 1b \n" |
246 | " addu %1, 1 \n" | 246 | " addu %1, 1 \n" |
247 | "2: sc %1, %0 \n" | 247 | "2: sc %1, %0 \n" |
248 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 248 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
249 | : GCC_OFF12_ASM() (rw->lock) | 249 | : GCC_OFF_SMALL_ASM() (rw->lock) |
250 | : "memory"); | 250 | : "memory"); |
251 | } while (unlikely(!tmp)); | 251 | } while (unlikely(!tmp)); |
252 | } | 252 | } |
@@ -269,8 +269,8 @@ static inline void arch_read_unlock(arch_rwlock_t *rw) | |||
269 | " sub %1, 1 \n" | 269 | " sub %1, 1 \n" |
270 | " sc %1, %0 \n" | 270 | " sc %1, %0 \n" |
271 | " beqzl %1, 1b \n" | 271 | " beqzl %1, 1b \n" |
272 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 272 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
273 | : GCC_OFF12_ASM() (rw->lock) | 273 | : GCC_OFF_SMALL_ASM() (rw->lock) |
274 | : "memory"); | 274 | : "memory"); |
275 | } else { | 275 | } else { |
276 | do { | 276 | do { |
@@ -278,8 +278,8 @@ static inline void arch_read_unlock(arch_rwlock_t *rw) | |||
278 | "1: ll %1, %2 # arch_read_unlock \n" | 278 | "1: ll %1, %2 # arch_read_unlock \n" |
279 | " sub %1, 1 \n" | 279 | " sub %1, 1 \n" |
280 | " sc %1, %0 \n" | 280 | " sc %1, %0 \n" |
281 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 281 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
282 | : GCC_OFF12_ASM() (rw->lock) | 282 | : GCC_OFF_SMALL_ASM() (rw->lock) |
283 | : "memory"); | 283 | : "memory"); |
284 | } while (unlikely(!tmp)); | 284 | } while (unlikely(!tmp)); |
285 | } | 285 | } |
@@ -299,8 +299,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw) | |||
299 | " beqzl %1, 1b \n" | 299 | " beqzl %1, 1b \n" |
300 | " nop \n" | 300 | " nop \n" |
301 | " .set reorder \n" | 301 | " .set reorder \n" |
302 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 302 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
303 | : GCC_OFF12_ASM() (rw->lock) | 303 | : GCC_OFF_SMALL_ASM() (rw->lock) |
304 | : "memory"); | 304 | : "memory"); |
305 | } else { | 305 | } else { |
306 | do { | 306 | do { |
@@ -309,8 +309,8 @@ static inline void arch_write_lock(arch_rwlock_t *rw) | |||
309 | " bnez %1, 1b \n" | 309 | " bnez %1, 1b \n" |
310 | " lui %1, 0x8000 \n" | 310 | " lui %1, 0x8000 \n" |
311 | "2: sc %1, %0 \n" | 311 | "2: sc %1, %0 \n" |
312 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp) | 312 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) |
313 | : GCC_OFF12_ASM() (rw->lock) | 313 | : GCC_OFF_SMALL_ASM() (rw->lock) |
314 | : "memory"); | 314 | : "memory"); |
315 | } while (unlikely(!tmp)); | 315 | } while (unlikely(!tmp)); |
316 | } | 316 | } |
@@ -349,8 +349,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw) | |||
349 | __WEAK_LLSC_MB | 349 | __WEAK_LLSC_MB |
350 | " li %2, 1 \n" | 350 | " li %2, 1 \n" |
351 | "2: \n" | 351 | "2: \n" |
352 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) | 352 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) |
353 | : GCC_OFF12_ASM() (rw->lock) | 353 | : GCC_OFF_SMALL_ASM() (rw->lock) |
354 | : "memory"); | 354 | : "memory"); |
355 | } else { | 355 | } else { |
356 | __asm__ __volatile__( | 356 | __asm__ __volatile__( |
@@ -366,8 +366,8 @@ static inline int arch_read_trylock(arch_rwlock_t *rw) | |||
366 | __WEAK_LLSC_MB | 366 | __WEAK_LLSC_MB |
367 | " li %2, 1 \n" | 367 | " li %2, 1 \n" |
368 | "2: \n" | 368 | "2: \n" |
369 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) | 369 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) |
370 | : GCC_OFF12_ASM() (rw->lock) | 370 | : GCC_OFF_SMALL_ASM() (rw->lock) |
371 | : "memory"); | 371 | : "memory"); |
372 | } | 372 | } |
373 | 373 | ||
@@ -393,8 +393,8 @@ static inline int arch_write_trylock(arch_rwlock_t *rw) | |||
393 | " li %2, 1 \n" | 393 | " li %2, 1 \n" |
394 | " .set reorder \n" | 394 | " .set reorder \n" |
395 | "2: \n" | 395 | "2: \n" |
396 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) | 396 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) |
397 | : GCC_OFF12_ASM() (rw->lock) | 397 | : GCC_OFF_SMALL_ASM() (rw->lock) |
398 | : "memory"); | 398 | : "memory"); |
399 | } else { | 399 | } else { |
400 | do { | 400 | do { |
@@ -406,9 +406,9 @@ static inline int arch_write_trylock(arch_rwlock_t *rw) | |||
406 | " sc %1, %0 \n" | 406 | " sc %1, %0 \n" |
407 | " li %2, 1 \n" | 407 | " li %2, 1 \n" |
408 | "2: \n" | 408 | "2: \n" |
409 | : "=" GCC_OFF12_ASM() (rw->lock), "=&r" (tmp), | 409 | : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), |
410 | "=&r" (ret) | 410 | "=&r" (ret) |
411 | : GCC_OFF12_ASM() (rw->lock) | 411 | : GCC_OFF_SMALL_ASM() (rw->lock) |
412 | : "memory"); | 412 | : "memory"); |
413 | } while (unlikely(!tmp)); | 413 | } while (unlikely(!tmp)); |
414 | 414 | ||