diff options
Diffstat (limited to 'include/asm-mips/hazards.h')
-rw-r--r-- | include/asm-mips/hazards.h | 360 |
1 files changed, 125 insertions, 235 deletions
diff --git a/include/asm-mips/hazards.h b/include/asm-mips/hazards.h index 25f5e8a4177d..0fe02945feba 100644 --- a/include/asm-mips/hazards.h +++ b/include/asm-mips/hazards.h | |||
@@ -12,102 +12,95 @@ | |||
12 | 12 | ||
13 | 13 | ||
14 | #ifdef __ASSEMBLY__ | 14 | #ifdef __ASSEMBLY__ |
15 | 15 | #define ASMMACRO(name, code...) .macro name; code; .endm | |
16 | .macro _ssnop | ||
17 | sll $0, $0, 1 | ||
18 | .endm | ||
19 | |||
20 | .macro _ehb | ||
21 | sll $0, $0, 3 | ||
22 | .endm | ||
23 | |||
24 | /* | ||
25 | * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent | ||
26 | * use of the JTLB for instructions should not occur for 4 cpu cycles and use | ||
27 | * for data translations should not occur for 3 cpu cycles. | ||
28 | */ | ||
29 | #ifdef CONFIG_CPU_RM9000 | ||
30 | |||
31 | .macro mtc0_tlbw_hazard | ||
32 | .set push | ||
33 | .set mips32 | ||
34 | _ssnop; _ssnop; _ssnop; _ssnop | ||
35 | .set pop | ||
36 | .endm | ||
37 | |||
38 | .macro tlbw_eret_hazard | ||
39 | .set push | ||
40 | .set mips32 | ||
41 | _ssnop; _ssnop; _ssnop; _ssnop | ||
42 | .set pop | ||
43 | .endm | ||
44 | |||
45 | #else | 16 | #else |
46 | 17 | ||
47 | /* | 18 | #define ASMMACRO(name, code...) \ |
48 | * The taken branch will result in a two cycle penalty for the two killed | 19 | __asm__(".macro " #name "; " #code "; .endm"); \ |
49 | * instructions on R4000 / R4400. Other processors only have a single cycle | 20 | \ |
50 | * hazard so this is nice trick to have an optimal code for a range of | 21 | static inline void name(void) \ |
51 | * processors. | 22 | { \ |
52 | */ | 23 | __asm__ __volatile__ (#name); \ |
53 | .macro mtc0_tlbw_hazard | 24 | } |
54 | b . + 8 | ||
55 | .endm | ||
56 | 25 | ||
57 | .macro tlbw_eret_hazard | ||
58 | .endm | ||
59 | #endif | 26 | #endif |
60 | 27 | ||
28 | ASMMACRO(_ssnop, | ||
29 | sll $0, $0, 1 | ||
30 | ) | ||
31 | |||
32 | ASMMACRO(_ehb, | ||
33 | sll $0, $0, 3 | ||
34 | ) | ||
35 | |||
61 | /* | 36 | /* |
62 | * mtc0->mfc0 hazard | 37 | * TLB hazards |
63 | * The 24K has a 2 cycle mtc0/mfc0 execution hazard. | ||
64 | * It is a MIPS32R2 processor so ehb will clear the hazard. | ||
65 | */ | 38 | */ |
39 | #if defined(CONFIG_CPU_MIPSR2) | ||
66 | 40 | ||
67 | #ifdef CONFIG_CPU_MIPSR2 | ||
68 | /* | 41 | /* |
69 | * Use a macro for ehb unless explicit support for MIPSR2 is enabled | 42 | * MIPSR2 defines ehb for hazard avoidance |
70 | */ | 43 | */ |
71 | 44 | ||
72 | #define irq_enable_hazard \ | 45 | ASMMACRO(mtc0_tlbw_hazard, |
46 | _ehb | ||
47 | ) | ||
48 | ASMMACRO(tlbw_use_hazard, | ||
49 | _ehb | ||
50 | ) | ||
51 | ASMMACRO(tlb_probe_hazard, | ||
52 | _ehb | ||
53 | ) | ||
54 | ASMMACRO(irq_enable_hazard, | ||
55 | ) | ||
56 | ASMMACRO(irq_disable_hazard, | ||
73 | _ehb | 57 | _ehb |
74 | 58 | ) | |
75 | #define irq_disable_hazard \ | 59 | ASMMACRO(back_to_back_c0_hazard, |
76 | _ehb | 60 | _ehb |
77 | 61 | ) | |
78 | #elif defined(CONFIG_CPU_R10000) || defined(CONFIG_CPU_RM9000) | ||
79 | |||
80 | /* | 62 | /* |
81 | * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer. | 63 | * gcc has a tradition of misscompiling the previous construct using the |
64 | * address of a label as argument to inline assembler. Gas otoh has the | ||
65 | * annoying difference between la and dla which are only usable for 32-bit | ||
66 | * rsp. 64-bit code, so can't be used without conditional compilation. | ||
67 | * The alterantive is switching the assembler to 64-bit code which happens | ||
68 | * to work right even for 32-bit code ... | ||
82 | */ | 69 | */ |
70 | #define instruction_hazard() \ | ||
71 | do { \ | ||
72 | unsigned long tmp; \ | ||
73 | \ | ||
74 | __asm__ __volatile__( \ | ||
75 | " .set mips64r2 \n" \ | ||
76 | " dla %0, 1f \n" \ | ||
77 | " jr.hb %0 \n" \ | ||
78 | " .set mips0 \n" \ | ||
79 | "1: \n" \ | ||
80 | : "=r" (tmp)); \ | ||
81 | } while (0) | ||
83 | 82 | ||
84 | #define irq_enable_hazard | 83 | #elif defined(CONFIG_CPU_R10000) |
85 | |||
86 | #define irq_disable_hazard | ||
87 | |||
88 | #else | ||
89 | 84 | ||
90 | /* | 85 | /* |
91 | * Classic MIPS needs 1 - 3 nops or ssnops | 86 | * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer. |
92 | */ | 87 | */ |
93 | #define irq_enable_hazard | ||
94 | #define irq_disable_hazard \ | ||
95 | _ssnop; _ssnop; _ssnop | ||
96 | 88 | ||
97 | #endif | 89 | ASMMACRO(mtc0_tlbw_hazard, |
98 | 90 | ) | |
99 | #else /* __ASSEMBLY__ */ | 91 | ASMMACRO(tlbw_use_hazard, |
100 | 92 | ) | |
101 | __asm__( | 93 | ASMMACRO(tlb_probe_hazard, |
102 | " .macro _ssnop \n" | 94 | ) |
103 | " sll $0, $0, 1 \n" | 95 | ASMMACRO(irq_enable_hazard, |
104 | " .endm \n" | 96 | ) |
105 | " \n" | 97 | ASMMACRO(irq_disable_hazard, |
106 | " .macro _ehb \n" | 98 | ) |
107 | " sll $0, $0, 3 \n" | 99 | ASMMACRO(back_to_back_c0_hazard, |
108 | " .endm \n"); | 100 | ) |
101 | #define instruction_hazard() do { } while (0) | ||
109 | 102 | ||
110 | #ifdef CONFIG_CPU_RM9000 | 103 | #elif defined(CONFIG_CPU_RM9000) |
111 | 104 | ||
112 | /* | 105 | /* |
113 | * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent | 106 | * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent |
@@ -115,176 +108,73 @@ __asm__( | |||
115 | * for data translations should not occur for 3 cpu cycles. | 108 | * for data translations should not occur for 3 cpu cycles. |
116 | */ | 109 | */ |
117 | 110 | ||
118 | #define mtc0_tlbw_hazard() \ | 111 | ASMMACRO(mtc0_tlbw_hazard, |
119 | __asm__ __volatile__( \ | 112 | _ssnop; _ssnop; _ssnop; _ssnop |
120 | " .set mips32 \n" \ | 113 | ) |
121 | " _ssnop \n" \ | 114 | ASMMACRO(tlbw_use_hazard, |
122 | " _ssnop \n" \ | 115 | _ssnop; _ssnop; _ssnop; _ssnop |
123 | " _ssnop \n" \ | 116 | ) |
124 | " _ssnop \n" \ | 117 | ASMMACRO(tlb_probe_hazard, |
125 | " .set mips0 \n") | 118 | _ssnop; _ssnop; _ssnop; _ssnop |
126 | 119 | ) | |
127 | #define tlbw_use_hazard() \ | 120 | ASMMACRO(irq_enable_hazard, |
128 | __asm__ __volatile__( \ | 121 | ) |
129 | " .set mips32 \n" \ | 122 | ASMMACRO(irq_disable_hazard, |
130 | " _ssnop \n" \ | 123 | ) |
131 | " _ssnop \n" \ | 124 | ASMMACRO(back_to_back_c0_hazard, |
132 | " _ssnop \n" \ | 125 | ) |
133 | " _ssnop \n" \ | 126 | #define instruction_hazard() do { } while (0) |
134 | " .set mips0 \n") | ||
135 | |||
136 | #else | ||
137 | |||
138 | /* | ||
139 | * Overkill warning ... | ||
140 | */ | ||
141 | #define mtc0_tlbw_hazard() \ | ||
142 | __asm__ __volatile__( \ | ||
143 | " .set noreorder \n" \ | ||
144 | " nop \n" \ | ||
145 | " nop \n" \ | ||
146 | " nop \n" \ | ||
147 | " nop \n" \ | ||
148 | " nop \n" \ | ||
149 | " nop \n" \ | ||
150 | " .set reorder \n") | ||
151 | |||
152 | #define tlbw_use_hazard() \ | ||
153 | __asm__ __volatile__( \ | ||
154 | " .set noreorder \n" \ | ||
155 | " nop \n" \ | ||
156 | " nop \n" \ | ||
157 | " nop \n" \ | ||
158 | " nop \n" \ | ||
159 | " nop \n" \ | ||
160 | " nop \n" \ | ||
161 | " .set reorder \n") | ||
162 | |||
163 | #endif | ||
164 | |||
165 | /* | ||
166 | * Interrupt enable/disable hazards | ||
167 | * Some processors have hazards when modifying | ||
168 | * the status register to change the interrupt state | ||
169 | */ | ||
170 | |||
171 | #ifdef CONFIG_CPU_MIPSR2 | ||
172 | |||
173 | __asm__(" .macro irq_enable_hazard \n" | ||
174 | " _ehb \n" | ||
175 | " .endm \n" | ||
176 | " \n" | ||
177 | " .macro irq_disable_hazard \n" | ||
178 | " _ehb \n" | ||
179 | " .endm \n"); | ||
180 | 127 | ||
181 | #elif defined(CONFIG_CPU_R10000) || defined(CONFIG_CPU_RM9000) | 128 | #elif defined(CONFIG_CPU_SB1) |
182 | 129 | ||
183 | /* | 130 | /* |
184 | * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer. | 131 | * Mostly like R4000 for historic reasons |
185 | */ | 132 | */ |
186 | 133 | ASMMACRO(mtc0_tlbw_hazard, | |
187 | __asm__( | 134 | ) |
188 | " .macro irq_enable_hazard \n" | 135 | ASMMACRO(tlbw_use_hazard, |
189 | " .endm \n" | 136 | ) |
190 | " \n" | 137 | ASMMACRO(tlb_probe_hazard, |
191 | " .macro irq_disable_hazard \n" | 138 | ) |
192 | " .endm \n"); | 139 | ASMMACRO(irq_enable_hazard, |
140 | ) | ||
141 | ASMMACRO(irq_disable_hazard, | ||
142 | _ssnop; _ssnop; _ssnop | ||
143 | ) | ||
144 | ASMMACRO(back_to_back_c0_hazard, | ||
145 | ) | ||
146 | #define instruction_hazard() do { } while (0) | ||
193 | 147 | ||
194 | #else | 148 | #else |
195 | 149 | ||
196 | /* | 150 | /* |
197 | * Default for classic MIPS processors. Assume worst case hazards but don't | 151 | * Finally the catchall case for all other processors including R4000, R4400, |
198 | * care about the irq_enable_hazard - sooner or later the hardware will | 152 | * R4600, R4700, R5000, RM7000, NEC VR41xx etc. |
199 | * enable it and we don't care when exactly. | ||
200 | */ | ||
201 | |||
202 | __asm__( | ||
203 | " # \n" | ||
204 | " # There is a hazard but we do not care \n" | ||
205 | " # \n" | ||
206 | " .macro\tirq_enable_hazard \n" | ||
207 | " .endm \n" | ||
208 | " \n" | ||
209 | " .macro\tirq_disable_hazard \n" | ||
210 | " _ssnop \n" | ||
211 | " _ssnop \n" | ||
212 | " _ssnop \n" | ||
213 | " .endm \n"); | ||
214 | |||
215 | #endif | ||
216 | |||
217 | #define irq_enable_hazard() \ | ||
218 | __asm__ __volatile__("irq_enable_hazard") | ||
219 | #define irq_disable_hazard() \ | ||
220 | __asm__ __volatile__("irq_disable_hazard") | ||
221 | |||
222 | |||
223 | /* | ||
224 | * Back-to-back hazards - | ||
225 | * | 153 | * |
226 | * What is needed to separate a move to cp0 from a subsequent read from the | 154 | * The taken branch will result in a two cycle penalty for the two killed |
227 | * same cp0 register? | 155 | * instructions on R4000 / R4400. Other processors only have a single cycle |
228 | */ | 156 | * hazard so this is nice trick to have an optimal code for a range of |
229 | #ifdef CONFIG_CPU_MIPSR2 | 157 | * processors. |
230 | |||
231 | __asm__(" .macro back_to_back_c0_hazard \n" | ||
232 | " _ehb \n" | ||
233 | " .endm \n"); | ||
234 | |||
235 | #elif defined(CONFIG_CPU_R10000) || defined(CONFIG_CPU_RM9000) || \ | ||
236 | defined(CONFIG_CPU_SB1) | ||
237 | |||
238 | __asm__(" .macro back_to_back_c0_hazard \n" | ||
239 | " .endm \n"); | ||
240 | |||
241 | #else | ||
242 | |||
243 | __asm__(" .macro back_to_back_c0_hazard \n" | ||
244 | " .set noreorder \n" | ||
245 | " _ssnop \n" | ||
246 | " _ssnop \n" | ||
247 | " _ssnop \n" | ||
248 | " .set reorder \n" | ||
249 | " .endm"); | ||
250 | |||
251 | #endif | ||
252 | |||
253 | #define back_to_back_c0_hazard() \ | ||
254 | __asm__ __volatile__("back_to_back_c0_hazard") | ||
255 | |||
256 | |||
257 | /* | ||
258 | * Instruction execution hazard | ||
259 | */ | ||
260 | #ifdef CONFIG_CPU_MIPSR2 | ||
261 | /* | ||
262 | * gcc has a tradition of misscompiling the previous construct using the | ||
263 | * address of a label as argument to inline assembler. Gas otoh has the | ||
264 | * annoying difference between la and dla which are only usable for 32-bit | ||
265 | * rsp. 64-bit code, so can't be used without conditional compilation. | ||
266 | * The alterantive is switching the assembler to 64-bit code which happens | ||
267 | * to work right even for 32-bit code ... | ||
268 | */ | 158 | */ |
269 | #define instruction_hazard() \ | 159 | ASMMACRO(mtc0_tlbw_hazard, |
270 | do { \ | 160 | nop |
271 | unsigned long tmp; \ | 161 | ) |
272 | \ | 162 | ASMMACRO(tlbw_use_hazard, |
273 | __asm__ __volatile__( \ | 163 | nop; nop; nop |
274 | " .set mips64r2 \n" \ | 164 | ) |
275 | " dla %0, 1f \n" \ | 165 | ASMMACRO(tlb_probe_hazard, |
276 | " jr.hb %0 \n" \ | 166 | nop; nop; nop |
277 | " .set mips0 \n" \ | 167 | ) |
278 | "1: \n" \ | 168 | ASMMACRO(irq_enable_hazard, |
279 | : "=r" (tmp)); \ | 169 | ) |
280 | } while (0) | 170 | ASMMACRO(irq_disable_hazard, |
281 | 171 | nop; nop; nop | |
282 | #else | 172 | ) |
173 | ASMMACRO(back_to_back_c0_hazard, | ||
174 | _ssnop; _ssnop; _ssnop; | ||
175 | ) | ||
283 | #define instruction_hazard() do { } while (0) | 176 | #define instruction_hazard() do { } while (0) |
284 | #endif | ||
285 | |||
286 | extern void mips_ihb(void); | ||
287 | 177 | ||
288 | #endif /* __ASSEMBLY__ */ | 178 | #endif |
289 | 179 | ||
290 | #endif /* _ASM_HAZARDS_H */ | 180 | #endif /* _ASM_HAZARDS_H */ |