aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/lib
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>2013-02-08 12:13:30 -0500
committerRalf Baechle <ralf@linux-mips.org>2013-04-11 09:39:51 -0400
commit02b849f7613003fe5f9e58bf233d49b0ebd4a5e8 (patch)
tree78db26af28f5da12eddd69ad3c54752e6379118e /arch/mips/lib
parent0bfbf6a256348b1543e638c7d7b2f3004b289fdb (diff)
MIPS: Get rid of the use of .macro in C code.
It fails with LTO and probably has always been a fragile. Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/lib')
-rw-r--r--arch/mips/lib/mips-atomic.c149
1 files changed, 84 insertions, 65 deletions
diff --git a/arch/mips/lib/mips-atomic.c b/arch/mips/lib/mips-atomic.c
index cd160be3ce4d..6807f7172eaf 100644
--- a/arch/mips/lib/mips-atomic.c
+++ b/arch/mips/lib/mips-atomic.c
@@ -13,6 +13,7 @@
13#include <linux/compiler.h> 13#include <linux/compiler.h>
14#include <linux/preempt.h> 14#include <linux/preempt.h>
15#include <linux/export.h> 15#include <linux/export.h>
16#include <linux/stringify.h>
16 17
17#if !defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_MIPS_MT_SMTC) 18#if !defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_MIPS_MT_SMTC)
18 19
@@ -34,8 +35,11 @@
34 * 35 *
35 * Workaround: mask EXL bit of the result or place a nop before mfc0. 36 * Workaround: mask EXL bit of the result or place a nop before mfc0.
36 */ 37 */
37__asm__( 38notrace void arch_local_irq_disable(void)
38 " .macro arch_local_irq_disable\n" 39{
40 preempt_disable();
41
42 __asm__ __volatile__(
39 " .set push \n" 43 " .set push \n"
40 " .set noat \n" 44 " .set noat \n"
41#ifdef CONFIG_MIPS_MT_SMTC 45#ifdef CONFIG_MIPS_MT_SMTC
@@ -52,108 +56,98 @@ __asm__(
52 " .set noreorder \n" 56 " .set noreorder \n"
53 " mtc0 $1,$12 \n" 57 " mtc0 $1,$12 \n"
54#endif 58#endif
55 " irq_disable_hazard \n" 59 " " __stringify(__irq_disable_hazard) " \n"
56 " .set pop \n" 60 " .set pop \n"
57 " .endm \n"); 61 : /* no outputs */
62 : /* no inputs */
63 : "memory");
58 64
59notrace void arch_local_irq_disable(void)
60{
61 preempt_disable();
62 __asm__ __volatile__(
63 "arch_local_irq_disable"
64 : /* no outputs */
65 : /* no inputs */
66 : "memory");
67 preempt_enable(); 65 preempt_enable();
68} 66}
69EXPORT_SYMBOL(arch_local_irq_disable); 67EXPORT_SYMBOL(arch_local_irq_disable);
70 68
71 69
72__asm__( 70notrace unsigned long arch_local_irq_save(void)
73 " .macro arch_local_irq_save result \n" 71{
72 unsigned long flags;
73
74 preempt_disable();
75
76 __asm__ __volatile__(
74 " .set push \n" 77 " .set push \n"
75 " .set reorder \n" 78 " .set reorder \n"
76 " .set noat \n" 79 " .set noat \n"
77#ifdef CONFIG_MIPS_MT_SMTC 80#ifdef CONFIG_MIPS_MT_SMTC
78 " mfc0 \\result, $2, 1 \n" 81 " mfc0 %[flags], $2, 1 \n"
79 " ori $1, \\result, 0x400 \n" 82 " ori $1, %[flags], 0x400 \n"
80 " .set noreorder \n" 83 " .set noreorder \n"
81 " mtc0 $1, $2, 1 \n" 84 " mtc0 $1, $2, 1 \n"
82 " andi \\result, \\result, 0x400 \n" 85 " andi %[flags], %[flags], 0x400 \n"
83#elif defined(CONFIG_CPU_MIPSR2) 86#elif defined(CONFIG_CPU_MIPSR2)
84 /* see irqflags.h for inline function */ 87 /* see irqflags.h for inline function */
85#else 88#else
86 " mfc0 \\result, $12 \n" 89 " mfc0 %[flags], $12 \n"
87 " ori $1, \\result, 0x1f \n" 90 " ori $1, %[flags], 0x1f \n"
88 " xori $1, 0x1f \n" 91 " xori $1, 0x1f \n"
89 " .set noreorder \n" 92 " .set noreorder \n"
90 " mtc0 $1, $12 \n" 93 " mtc0 $1, $12 \n"
91#endif 94#endif
92 " irq_disable_hazard \n" 95 " " __stringify(__irq_disable_hazard) " \n"
93 " .set pop \n" 96 " .set pop \n"
94 " .endm \n"); 97 : [flags] "=r" (flags)
98 : /* no inputs */
99 : "memory");
95 100
96notrace unsigned long arch_local_irq_save(void)
97{
98 unsigned long flags;
99 preempt_disable();
100 asm volatile("arch_local_irq_save\t%0"
101 : "=r" (flags)
102 : /* no inputs */
103 : "memory");
104 preempt_enable(); 101 preempt_enable();
102
105 return flags; 103 return flags;
106} 104}
107EXPORT_SYMBOL(arch_local_irq_save); 105EXPORT_SYMBOL(arch_local_irq_save);
108 106
107notrace void arch_local_irq_restore(unsigned long flags)
108{
109 unsigned long __tmp1;
110
111#ifdef CONFIG_MIPS_MT_SMTC
112 /*
113 * SMTC kernel needs to do a software replay of queued
114 * IPIs, at the cost of branch and call overhead on each
115 * local_irq_restore()
116 */
117 if (unlikely(!(flags & 0x0400)))
118 smtc_ipi_replay();
119#endif
120 preempt_disable();
109 121
110__asm__( 122 __asm__ __volatile__(
111 " .macro arch_local_irq_restore flags \n"
112 " .set push \n" 123 " .set push \n"
113 " .set noreorder \n" 124 " .set noreorder \n"
114 " .set noat \n" 125 " .set noat \n"
115#ifdef CONFIG_MIPS_MT_SMTC 126#ifdef CONFIG_MIPS_MT_SMTC
116 "mfc0 $1, $2, 1 \n" 127 " mfc0 $1, $2, 1 \n"
117 "andi \\flags, 0x400 \n" 128 " andi %[flags], 0x400 \n"
118 "ori $1, 0x400 \n" 129 " ori $1, 0x400 \n"
119 "xori $1, 0x400 \n" 130 " xori $1, 0x400 \n"
120 "or \\flags, $1 \n" 131 " or %[flags], $1 \n"
121 "mtc0 \\flags, $2, 1 \n" 132 " mtc0 %[flags], $2, 1 \n"
122#elif defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU) 133#elif defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU)
123 /* see irqflags.h for inline function */ 134 /* see irqflags.h for inline function */
124#elif defined(CONFIG_CPU_MIPSR2) 135#elif defined(CONFIG_CPU_MIPSR2)
125 /* see irqflags.h for inline function */ 136 /* see irqflags.h for inline function */
126#else 137#else
127 " mfc0 $1, $12 \n" 138 " mfc0 $1, $12 \n"
128 " andi \\flags, 1 \n" 139 " andi %[flags], 1 \n"
129 " ori $1, 0x1f \n" 140 " ori $1, 0x1f \n"
130 " xori $1, 0x1f \n" 141 " xori $1, 0x1f \n"
131 " or \\flags, $1 \n" 142 " or %[flags], $1 \n"
132 " mtc0 \\flags, $12 \n" 143 " mtc0 %[flags], $12 \n"
133#endif 144#endif
134 " irq_disable_hazard \n" 145 " " __stringify(__irq_disable_hazard) " \n"
135 " .set pop \n" 146 " .set pop \n"
136 " .endm \n"); 147 : [flags] "=r" (__tmp1)
148 : "0" (flags)
149 : "memory");
137 150
138notrace void arch_local_irq_restore(unsigned long flags)
139{
140 unsigned long __tmp1;
141
142#ifdef CONFIG_MIPS_MT_SMTC
143 /*
144 * SMTC kernel needs to do a software replay of queued
145 * IPIs, at the cost of branch and call overhead on each
146 * local_irq_restore()
147 */
148 if (unlikely(!(flags & 0x0400)))
149 smtc_ipi_replay();
150#endif
151 preempt_disable();
152 __asm__ __volatile__(
153 "arch_local_irq_restore\t%0"
154 : "=r" (__tmp1)
155 : "0" (flags)
156 : "memory");
157 preempt_enable(); 151 preempt_enable();
158} 152}
159EXPORT_SYMBOL(arch_local_irq_restore); 153EXPORT_SYMBOL(arch_local_irq_restore);
@@ -164,11 +158,36 @@ notrace void __arch_local_irq_restore(unsigned long flags)
164 unsigned long __tmp1; 158 unsigned long __tmp1;
165 159
166 preempt_disable(); 160 preempt_disable();
161
167 __asm__ __volatile__( 162 __asm__ __volatile__(
168 "arch_local_irq_restore\t%0" 163 " .set push \n"
169 : "=r" (__tmp1) 164 " .set noreorder \n"
170 : "0" (flags) 165 " .set noat \n"
171 : "memory"); 166#ifdef CONFIG_MIPS_MT_SMTC
167 " mfc0 $1, $2, 1 \n"
168 " andi %[flags], 0x400 \n"
169 " ori $1, 0x400 \n"
170 " xori $1, 0x400 \n"
171 " or %[flags], $1 \n"
172 " mtc0 %[flags], $2, 1 \n"
173#elif defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU)
174 /* see irqflags.h for inline function */
175#elif defined(CONFIG_CPU_MIPSR2)
176 /* see irqflags.h for inline function */
177#else
178 " mfc0 $1, $12 \n"
179 " andi %[flags], 1 \n"
180 " ori $1, 0x1f \n"
181 " xori $1, 0x1f \n"
182 " or %[flags], $1 \n"
183 " mtc0 %[flags], $12 \n"
184#endif
185 " " __stringify(__irq_disable_hazard) " \n"
186 " .set pop \n"
187 : [flags] "=r" (__tmp1)
188 : "0" (flags)
189 : "memory");
190
172 preempt_enable(); 191 preempt_enable();
173} 192}
174EXPORT_SYMBOL(__arch_local_irq_restore); 193EXPORT_SYMBOL(__arch_local_irq_restore);