aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips
diff options
context:
space:
mode:
authorDavid Daney <ddaney@avtrex.com>2008-06-11 13:04:25 -0400
committerRalf Baechle <ralf@linux-mips.org>2008-07-15 13:44:30 -0400
commit94daeb90698c56a85ed219eeb18d4a8cddde7b03 (patch)
tree94bbc78b07543fc332b7df03d923d202800597e8 /include/asm-mips
parent043ebd6c9de7500a399017643bbc5cafd4e37060 (diff)
[MIPS] Fix asm constraints for 'ins' instructions.
The third operand to 'ins' must be a constant int, not a register. [Ralf: The bug was actually intensional. Some versions used to throw an error under certain circumstances for code like: static inline void f(unsigned nr, unsigned *p) { unsigned short bit = nr & 5; if (__builtin_constant_p(bit)) { __asm__ __volatile__ (" foo %0, %1" : "=m" (*p) : "i" (bit)); } else { /* Do something else. */ } } because gcc was not able to figure out that the "i" constraint was possibly at the early stage when the constraint are getting verified. The solution was using "ri" instead of "i". The "ri" would keep gcc happy but in the end for code generation always the "i" constraint would be satisfied. The problem afair originally appeared in the i386 io.h and also hit it's mips equivalent. From there the workaround spread to many of the inline assembler functions.] Signed-off-by: David Daney <ddaney@avtrex.com> Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'include/asm-mips')
-rw-r--r--include/asm-mips/bitops.h6
1 files changed, 3 insertions, 3 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index 642724734eba..9a7274ba6a0b 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -82,7 +82,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
82 "2: b 1b \n" 82 "2: b 1b \n"
83 " .previous \n" 83 " .previous \n"
84 : "=&r" (temp), "=m" (*m) 84 : "=&r" (temp), "=m" (*m)
85 : "ir" (bit), "m" (*m), "r" (~0)); 85 : "i" (bit), "m" (*m), "r" (~0));
86#endif /* CONFIG_CPU_MIPSR2 */ 86#endif /* CONFIG_CPU_MIPSR2 */
87 } else if (cpu_has_llsc) { 87 } else if (cpu_has_llsc) {
88 __asm__ __volatile__( 88 __asm__ __volatile__(
@@ -147,7 +147,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
147 "2: b 1b \n" 147 "2: b 1b \n"
148 " .previous \n" 148 " .previous \n"
149 : "=&r" (temp), "=m" (*m) 149 : "=&r" (temp), "=m" (*m)
150 : "ir" (bit), "m" (*m)); 150 : "i" (bit), "m" (*m));
151#endif /* CONFIG_CPU_MIPSR2 */ 151#endif /* CONFIG_CPU_MIPSR2 */
152 } else if (cpu_has_llsc) { 152 } else if (cpu_has_llsc) {
153 __asm__ __volatile__( 153 __asm__ __volatile__(
@@ -428,7 +428,7 @@ static inline int test_and_clear_bit(unsigned long nr,
428 "2: b 1b \n" 428 "2: b 1b \n"
429 " .previous \n" 429 " .previous \n"
430 : "=&r" (temp), "=m" (*m), "=&r" (res) 430 : "=&r" (temp), "=m" (*m), "=&r" (res)
431 : "ri" (bit), "m" (*m) 431 : "i" (bit), "m" (*m)
432 : "memory"); 432 : "memory");
433#endif 433#endif
434 } else if (cpu_has_llsc) { 434 } else if (cpu_has_llsc) {