diff options
Diffstat (limited to 'include/asm-mips/hazards.h')
-rw-r--r-- | include/asm-mips/hazards.h | 271 |
1 files changed, 0 insertions, 271 deletions
diff --git a/include/asm-mips/hazards.h b/include/asm-mips/hazards.h deleted file mode 100644 index 2de638f84c86..000000000000 --- a/include/asm-mips/hazards.h +++ /dev/null | |||
@@ -1,271 +0,0 @@ | |||
1 | /* | ||
2 | * This file is subject to the terms and conditions of the GNU General Public | ||
3 | * License. See the file "COPYING" in the main directory of this archive | ||
4 | * for more details. | ||
5 | * | ||
6 | * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org> | ||
7 | * Copyright (C) MIPS Technologies, Inc. | ||
8 | * written by Ralf Baechle <ralf@linux-mips.org> | ||
9 | */ | ||
10 | #ifndef _ASM_HAZARDS_H | ||
11 | #define _ASM_HAZARDS_H | ||
12 | |||
13 | #ifdef __ASSEMBLY__ | ||
14 | #define ASMMACRO(name, code...) .macro name; code; .endm | ||
15 | #else | ||
16 | |||
17 | #include <asm/cpu-features.h> | ||
18 | |||
19 | #define ASMMACRO(name, code...) \ | ||
20 | __asm__(".macro " #name "; " #code "; .endm"); \ | ||
21 | \ | ||
22 | static inline void name(void) \ | ||
23 | { \ | ||
24 | __asm__ __volatile__ (#name); \ | ||
25 | } | ||
26 | |||
27 | /* | ||
28 | * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine. | ||
29 | */ | ||
30 | extern void mips_ihb(void); | ||
31 | |||
32 | #endif | ||
33 | |||
34 | ASMMACRO(_ssnop, | ||
35 | sll $0, $0, 1 | ||
36 | ) | ||
37 | |||
38 | ASMMACRO(_ehb, | ||
39 | sll $0, $0, 3 | ||
40 | ) | ||
41 | |||
42 | /* | ||
43 | * TLB hazards | ||
44 | */ | ||
45 | #if defined(CONFIG_CPU_MIPSR2) | ||
46 | |||
47 | /* | ||
48 | * MIPSR2 defines ehb for hazard avoidance | ||
49 | */ | ||
50 | |||
51 | ASMMACRO(mtc0_tlbw_hazard, | ||
52 | _ehb | ||
53 | ) | ||
54 | ASMMACRO(tlbw_use_hazard, | ||
55 | _ehb | ||
56 | ) | ||
57 | ASMMACRO(tlb_probe_hazard, | ||
58 | _ehb | ||
59 | ) | ||
60 | ASMMACRO(irq_enable_hazard, | ||
61 | _ehb | ||
62 | ) | ||
63 | ASMMACRO(irq_disable_hazard, | ||
64 | _ehb | ||
65 | ) | ||
66 | ASMMACRO(back_to_back_c0_hazard, | ||
67 | _ehb | ||
68 | ) | ||
69 | /* | ||
70 | * gcc has a tradition of misscompiling the previous construct using the | ||
71 | * address of a label as argument to inline assembler. Gas otoh has the | ||
72 | * annoying difference between la and dla which are only usable for 32-bit | ||
73 | * rsp. 64-bit code, so can't be used without conditional compilation. | ||
74 | * The alterantive is switching the assembler to 64-bit code which happens | ||
75 | * to work right even for 32-bit code ... | ||
76 | */ | ||
77 | #define instruction_hazard() \ | ||
78 | do { \ | ||
79 | unsigned long tmp; \ | ||
80 | \ | ||
81 | __asm__ __volatile__( \ | ||
82 | " .set mips64r2 \n" \ | ||
83 | " dla %0, 1f \n" \ | ||
84 | " jr.hb %0 \n" \ | ||
85 | " .set mips0 \n" \ | ||
86 | "1: \n" \ | ||
87 | : "=r" (tmp)); \ | ||
88 | } while (0) | ||
89 | |||
90 | #elif defined(CONFIG_CPU_MIPSR1) | ||
91 | |||
92 | /* | ||
93 | * These are slightly complicated by the fact that we guarantee R1 kernels to | ||
94 | * run fine on R2 processors. | ||
95 | */ | ||
96 | ASMMACRO(mtc0_tlbw_hazard, | ||
97 | _ssnop; _ssnop; _ehb | ||
98 | ) | ||
99 | ASMMACRO(tlbw_use_hazard, | ||
100 | _ssnop; _ssnop; _ssnop; _ehb | ||
101 | ) | ||
102 | ASMMACRO(tlb_probe_hazard, | ||
103 | _ssnop; _ssnop; _ssnop; _ehb | ||
104 | ) | ||
105 | ASMMACRO(irq_enable_hazard, | ||
106 | _ssnop; _ssnop; _ssnop; _ehb | ||
107 | ) | ||
108 | ASMMACRO(irq_disable_hazard, | ||
109 | _ssnop; _ssnop; _ssnop; _ehb | ||
110 | ) | ||
111 | ASMMACRO(back_to_back_c0_hazard, | ||
112 | _ssnop; _ssnop; _ssnop; _ehb | ||
113 | ) | ||
114 | /* | ||
115 | * gcc has a tradition of misscompiling the previous construct using the | ||
116 | * address of a label as argument to inline assembler. Gas otoh has the | ||
117 | * annoying difference between la and dla which are only usable for 32-bit | ||
118 | * rsp. 64-bit code, so can't be used without conditional compilation. | ||
119 | * The alterantive is switching the assembler to 64-bit code which happens | ||
120 | * to work right even for 32-bit code ... | ||
121 | */ | ||
122 | #define __instruction_hazard() \ | ||
123 | do { \ | ||
124 | unsigned long tmp; \ | ||
125 | \ | ||
126 | __asm__ __volatile__( \ | ||
127 | " .set mips64r2 \n" \ | ||
128 | " dla %0, 1f \n" \ | ||
129 | " jr.hb %0 \n" \ | ||
130 | " .set mips0 \n" \ | ||
131 | "1: \n" \ | ||
132 | : "=r" (tmp)); \ | ||
133 | } while (0) | ||
134 | |||
135 | #define instruction_hazard() \ | ||
136 | do { \ | ||
137 | if (cpu_has_mips_r2) \ | ||
138 | __instruction_hazard(); \ | ||
139 | } while (0) | ||
140 | |||
141 | #elif defined(CONFIG_CPU_R10000) | ||
142 | |||
143 | /* | ||
144 | * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer. | ||
145 | */ | ||
146 | |||
147 | ASMMACRO(mtc0_tlbw_hazard, | ||
148 | ) | ||
149 | ASMMACRO(tlbw_use_hazard, | ||
150 | ) | ||
151 | ASMMACRO(tlb_probe_hazard, | ||
152 | ) | ||
153 | ASMMACRO(irq_enable_hazard, | ||
154 | ) | ||
155 | ASMMACRO(irq_disable_hazard, | ||
156 | ) | ||
157 | ASMMACRO(back_to_back_c0_hazard, | ||
158 | ) | ||
159 | #define instruction_hazard() do { } while (0) | ||
160 | |||
161 | #elif defined(CONFIG_CPU_RM9000) | ||
162 | |||
163 | /* | ||
164 | * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent | ||
165 | * use of the JTLB for instructions should not occur for 4 cpu cycles and use | ||
166 | * for data translations should not occur for 3 cpu cycles. | ||
167 | */ | ||
168 | |||
169 | ASMMACRO(mtc0_tlbw_hazard, | ||
170 | _ssnop; _ssnop; _ssnop; _ssnop | ||
171 | ) | ||
172 | ASMMACRO(tlbw_use_hazard, | ||
173 | _ssnop; _ssnop; _ssnop; _ssnop | ||
174 | ) | ||
175 | ASMMACRO(tlb_probe_hazard, | ||
176 | _ssnop; _ssnop; _ssnop; _ssnop | ||
177 | ) | ||
178 | ASMMACRO(irq_enable_hazard, | ||
179 | ) | ||
180 | ASMMACRO(irq_disable_hazard, | ||
181 | ) | ||
182 | ASMMACRO(back_to_back_c0_hazard, | ||
183 | ) | ||
184 | #define instruction_hazard() do { } while (0) | ||
185 | |||
186 | #elif defined(CONFIG_CPU_SB1) | ||
187 | |||
188 | /* | ||
189 | * Mostly like R4000 for historic reasons | ||
190 | */ | ||
191 | ASMMACRO(mtc0_tlbw_hazard, | ||
192 | ) | ||
193 | ASMMACRO(tlbw_use_hazard, | ||
194 | ) | ||
195 | ASMMACRO(tlb_probe_hazard, | ||
196 | ) | ||
197 | ASMMACRO(irq_enable_hazard, | ||
198 | ) | ||
199 | ASMMACRO(irq_disable_hazard, | ||
200 | _ssnop; _ssnop; _ssnop | ||
201 | ) | ||
202 | ASMMACRO(back_to_back_c0_hazard, | ||
203 | ) | ||
204 | #define instruction_hazard() do { } while (0) | ||
205 | |||
206 | #else | ||
207 | |||
208 | /* | ||
209 | * Finally the catchall case for all other processors including R4000, R4400, | ||
210 | * R4600, R4700, R5000, RM7000, NEC VR41xx etc. | ||
211 | * | ||
212 | * The taken branch will result in a two cycle penalty for the two killed | ||
213 | * instructions on R4000 / R4400. Other processors only have a single cycle | ||
214 | * hazard so this is nice trick to have an optimal code for a range of | ||
215 | * processors. | ||
216 | */ | ||
217 | ASMMACRO(mtc0_tlbw_hazard, | ||
218 | nop; nop | ||
219 | ) | ||
220 | ASMMACRO(tlbw_use_hazard, | ||
221 | nop; nop; nop | ||
222 | ) | ||
223 | ASMMACRO(tlb_probe_hazard, | ||
224 | nop; nop; nop | ||
225 | ) | ||
226 | ASMMACRO(irq_enable_hazard, | ||
227 | _ssnop; _ssnop; _ssnop; | ||
228 | ) | ||
229 | ASMMACRO(irq_disable_hazard, | ||
230 | nop; nop; nop | ||
231 | ) | ||
232 | ASMMACRO(back_to_back_c0_hazard, | ||
233 | _ssnop; _ssnop; _ssnop; | ||
234 | ) | ||
235 | #define instruction_hazard() do { } while (0) | ||
236 | |||
237 | #endif | ||
238 | |||
239 | |||
240 | /* FPU hazards */ | ||
241 | |||
242 | #if defined(CONFIG_CPU_SB1) | ||
243 | ASMMACRO(enable_fpu_hazard, | ||
244 | .set push; | ||
245 | .set mips64; | ||
246 | .set noreorder; | ||
247 | _ssnop; | ||
248 | bnezl $0, .+4; | ||
249 | _ssnop; | ||
250 | .set pop | ||
251 | ) | ||
252 | ASMMACRO(disable_fpu_hazard, | ||
253 | ) | ||
254 | |||
255 | #elif defined(CONFIG_CPU_MIPSR2) | ||
256 | ASMMACRO(enable_fpu_hazard, | ||
257 | _ehb | ||
258 | ) | ||
259 | ASMMACRO(disable_fpu_hazard, | ||
260 | _ehb | ||
261 | ) | ||
262 | #else | ||
263 | ASMMACRO(enable_fpu_hazard, | ||
264 | nop; nop; nop; nop | ||
265 | ) | ||
266 | ASMMACRO(disable_fpu_hazard, | ||
267 | _ehb | ||
268 | ) | ||
269 | #endif | ||
270 | |||
271 | #endif /* _ASM_HAZARDS_H */ | ||