diff options
Diffstat (limited to 'arch/arm/kvm/interrupts_head.S')
-rw-r--r-- | arch/arm/kvm/interrupts_head.S | 441 |
1 files changed, 441 insertions, 0 deletions
diff --git a/arch/arm/kvm/interrupts_head.S b/arch/arm/kvm/interrupts_head.S new file mode 100644 index 000000000000..6a95d341e9c5 --- /dev/null +++ b/arch/arm/kvm/interrupts_head.S | |||
@@ -0,0 +1,441 @@ | |||
1 | #define VCPU_USR_REG(_reg_nr) (VCPU_USR_REGS + (_reg_nr * 4)) | ||
2 | #define VCPU_USR_SP (VCPU_USR_REG(13)) | ||
3 | #define VCPU_USR_LR (VCPU_USR_REG(14)) | ||
4 | #define CP15_OFFSET(_cp15_reg_idx) (VCPU_CP15 + (_cp15_reg_idx * 4)) | ||
5 | |||
6 | /* | ||
7 | * Many of these macros need to access the VCPU structure, which is always | ||
8 | * held in r0. These macros should never clobber r1, as it is used to hold the | ||
9 | * exception code on the return path (except of course the macro that switches | ||
10 | * all the registers before the final jump to the VM). | ||
11 | */ | ||
12 | vcpu .req r0 @ vcpu pointer always in r0 | ||
13 | |||
14 | /* Clobbers {r2-r6} */ | ||
15 | .macro store_vfp_state vfp_base | ||
16 | @ The VFPFMRX and VFPFMXR macros are the VMRS and VMSR instructions | ||
17 | VFPFMRX r2, FPEXC | ||
18 | @ Make sure VFP is enabled so we can touch the registers. | ||
19 | orr r6, r2, #FPEXC_EN | ||
20 | VFPFMXR FPEXC, r6 | ||
21 | |||
22 | VFPFMRX r3, FPSCR | ||
23 | tst r2, #FPEXC_EX @ Check for VFP Subarchitecture | ||
24 | beq 1f | ||
25 | @ If FPEXC_EX is 0, then FPINST/FPINST2 reads are upredictable, so | ||
26 | @ we only need to save them if FPEXC_EX is set. | ||
27 | VFPFMRX r4, FPINST | ||
28 | tst r2, #FPEXC_FP2V | ||
29 | VFPFMRX r5, FPINST2, ne @ vmrsne | ||
30 | bic r6, r2, #FPEXC_EX @ FPEXC_EX disable | ||
31 | VFPFMXR FPEXC, r6 | ||
32 | 1: | ||
33 | VFPFSTMIA \vfp_base, r6 @ Save VFP registers | ||
34 | stm \vfp_base, {r2-r5} @ Save FPEXC, FPSCR, FPINST, FPINST2 | ||
35 | .endm | ||
36 | |||
37 | /* Assume FPEXC_EN is on and FPEXC_EX is off, clobbers {r2-r6} */ | ||
38 | .macro restore_vfp_state vfp_base | ||
39 | VFPFLDMIA \vfp_base, r6 @ Load VFP registers | ||
40 | ldm \vfp_base, {r2-r5} @ Load FPEXC, FPSCR, FPINST, FPINST2 | ||
41 | |||
42 | VFPFMXR FPSCR, r3 | ||
43 | tst r2, #FPEXC_EX @ Check for VFP Subarchitecture | ||
44 | beq 1f | ||
45 | VFPFMXR FPINST, r4 | ||
46 | tst r2, #FPEXC_FP2V | ||
47 | VFPFMXR FPINST2, r5, ne | ||
48 | 1: | ||
49 | VFPFMXR FPEXC, r2 @ FPEXC (last, in case !EN) | ||
50 | .endm | ||
51 | |||
52 | /* These are simply for the macros to work - value don't have meaning */ | ||
53 | .equ usr, 0 | ||
54 | .equ svc, 1 | ||
55 | .equ abt, 2 | ||
56 | .equ und, 3 | ||
57 | .equ irq, 4 | ||
58 | .equ fiq, 5 | ||
59 | |||
60 | .macro push_host_regs_mode mode | ||
61 | mrs r2, SP_\mode | ||
62 | mrs r3, LR_\mode | ||
63 | mrs r4, SPSR_\mode | ||
64 | push {r2, r3, r4} | ||
65 | .endm | ||
66 | |||
67 | /* | ||
68 | * Store all host persistent registers on the stack. | ||
69 | * Clobbers all registers, in all modes, except r0 and r1. | ||
70 | */ | ||
71 | .macro save_host_regs | ||
72 | /* Hyp regs. Only ELR_hyp (SPSR_hyp already saved) */ | ||
73 | mrs r2, ELR_hyp | ||
74 | push {r2} | ||
75 | |||
76 | /* usr regs */ | ||
77 | push {r4-r12} @ r0-r3 are always clobbered | ||
78 | mrs r2, SP_usr | ||
79 | mov r3, lr | ||
80 | push {r2, r3} | ||
81 | |||
82 | push_host_regs_mode svc | ||
83 | push_host_regs_mode abt | ||
84 | push_host_regs_mode und | ||
85 | push_host_regs_mode irq | ||
86 | |||
87 | /* fiq regs */ | ||
88 | mrs r2, r8_fiq | ||
89 | mrs r3, r9_fiq | ||
90 | mrs r4, r10_fiq | ||
91 | mrs r5, r11_fiq | ||
92 | mrs r6, r12_fiq | ||
93 | mrs r7, SP_fiq | ||
94 | mrs r8, LR_fiq | ||
95 | mrs r9, SPSR_fiq | ||
96 | push {r2-r9} | ||
97 | .endm | ||
98 | |||
99 | .macro pop_host_regs_mode mode | ||
100 | pop {r2, r3, r4} | ||
101 | msr SP_\mode, r2 | ||
102 | msr LR_\mode, r3 | ||
103 | msr SPSR_\mode, r4 | ||
104 | .endm | ||
105 | |||
106 | /* | ||
107 | * Restore all host registers from the stack. | ||
108 | * Clobbers all registers, in all modes, except r0 and r1. | ||
109 | */ | ||
110 | .macro restore_host_regs | ||
111 | pop {r2-r9} | ||
112 | msr r8_fiq, r2 | ||
113 | msr r9_fiq, r3 | ||
114 | msr r10_fiq, r4 | ||
115 | msr r11_fiq, r5 | ||
116 | msr r12_fiq, r6 | ||
117 | msr SP_fiq, r7 | ||
118 | msr LR_fiq, r8 | ||
119 | msr SPSR_fiq, r9 | ||
120 | |||
121 | pop_host_regs_mode irq | ||
122 | pop_host_regs_mode und | ||
123 | pop_host_regs_mode abt | ||
124 | pop_host_regs_mode svc | ||
125 | |||
126 | pop {r2, r3} | ||
127 | msr SP_usr, r2 | ||
128 | mov lr, r3 | ||
129 | pop {r4-r12} | ||
130 | |||
131 | pop {r2} | ||
132 | msr ELR_hyp, r2 | ||
133 | .endm | ||
134 | |||
135 | /* | ||
136 | * Restore SP, LR and SPSR for a given mode. offset is the offset of | ||
137 | * this mode's registers from the VCPU base. | ||
138 | * | ||
139 | * Assumes vcpu pointer in vcpu reg | ||
140 | * | ||
141 | * Clobbers r1, r2, r3, r4. | ||
142 | */ | ||
143 | .macro restore_guest_regs_mode mode, offset | ||
144 | add r1, vcpu, \offset | ||
145 | ldm r1, {r2, r3, r4} | ||
146 | msr SP_\mode, r2 | ||
147 | msr LR_\mode, r3 | ||
148 | msr SPSR_\mode, r4 | ||
149 | .endm | ||
150 | |||
151 | /* | ||
152 | * Restore all guest registers from the vcpu struct. | ||
153 | * | ||
154 | * Assumes vcpu pointer in vcpu reg | ||
155 | * | ||
156 | * Clobbers *all* registers. | ||
157 | */ | ||
158 | .macro restore_guest_regs | ||
159 | restore_guest_regs_mode svc, #VCPU_SVC_REGS | ||
160 | restore_guest_regs_mode abt, #VCPU_ABT_REGS | ||
161 | restore_guest_regs_mode und, #VCPU_UND_REGS | ||
162 | restore_guest_regs_mode irq, #VCPU_IRQ_REGS | ||
163 | |||
164 | add r1, vcpu, #VCPU_FIQ_REGS | ||
165 | ldm r1, {r2-r9} | ||
166 | msr r8_fiq, r2 | ||
167 | msr r9_fiq, r3 | ||
168 | msr r10_fiq, r4 | ||
169 | msr r11_fiq, r5 | ||
170 | msr r12_fiq, r6 | ||
171 | msr SP_fiq, r7 | ||
172 | msr LR_fiq, r8 | ||
173 | msr SPSR_fiq, r9 | ||
174 | |||
175 | @ Load return state | ||
176 | ldr r2, [vcpu, #VCPU_PC] | ||
177 | ldr r3, [vcpu, #VCPU_CPSR] | ||
178 | msr ELR_hyp, r2 | ||
179 | msr SPSR_cxsf, r3 | ||
180 | |||
181 | @ Load user registers | ||
182 | ldr r2, [vcpu, #VCPU_USR_SP] | ||
183 | ldr r3, [vcpu, #VCPU_USR_LR] | ||
184 | msr SP_usr, r2 | ||
185 | mov lr, r3 | ||
186 | add vcpu, vcpu, #(VCPU_USR_REGS) | ||
187 | ldm vcpu, {r0-r12} | ||
188 | .endm | ||
189 | |||
190 | /* | ||
191 | * Save SP, LR and SPSR for a given mode. offset is the offset of | ||
192 | * this mode's registers from the VCPU base. | ||
193 | * | ||
194 | * Assumes vcpu pointer in vcpu reg | ||
195 | * | ||
196 | * Clobbers r2, r3, r4, r5. | ||
197 | */ | ||
198 | .macro save_guest_regs_mode mode, offset | ||
199 | add r2, vcpu, \offset | ||
200 | mrs r3, SP_\mode | ||
201 | mrs r4, LR_\mode | ||
202 | mrs r5, SPSR_\mode | ||
203 | stm r2, {r3, r4, r5} | ||
204 | .endm | ||
205 | |||
206 | /* | ||
207 | * Save all guest registers to the vcpu struct | ||
208 | * Expects guest's r0, r1, r2 on the stack. | ||
209 | * | ||
210 | * Assumes vcpu pointer in vcpu reg | ||
211 | * | ||
212 | * Clobbers r2, r3, r4, r5. | ||
213 | */ | ||
214 | .macro save_guest_regs | ||
215 | @ Store usr registers | ||
216 | add r2, vcpu, #VCPU_USR_REG(3) | ||
217 | stm r2, {r3-r12} | ||
218 | add r2, vcpu, #VCPU_USR_REG(0) | ||
219 | pop {r3, r4, r5} @ r0, r1, r2 | ||
220 | stm r2, {r3, r4, r5} | ||
221 | mrs r2, SP_usr | ||
222 | mov r3, lr | ||
223 | str r2, [vcpu, #VCPU_USR_SP] | ||
224 | str r3, [vcpu, #VCPU_USR_LR] | ||
225 | |||
226 | @ Store return state | ||
227 | mrs r2, ELR_hyp | ||
228 | mrs r3, spsr | ||
229 | str r2, [vcpu, #VCPU_PC] | ||
230 | str r3, [vcpu, #VCPU_CPSR] | ||
231 | |||
232 | @ Store other guest registers | ||
233 | save_guest_regs_mode svc, #VCPU_SVC_REGS | ||
234 | save_guest_regs_mode abt, #VCPU_ABT_REGS | ||
235 | save_guest_regs_mode und, #VCPU_UND_REGS | ||
236 | save_guest_regs_mode irq, #VCPU_IRQ_REGS | ||
237 | .endm | ||
238 | |||
239 | /* Reads cp15 registers from hardware and stores them in memory | ||
240 | * @store_to_vcpu: If 0, registers are written in-order to the stack, | ||
241 | * otherwise to the VCPU struct pointed to by vcpup | ||
242 | * | ||
243 | * Assumes vcpu pointer in vcpu reg | ||
244 | * | ||
245 | * Clobbers r2 - r12 | ||
246 | */ | ||
247 | .macro read_cp15_state store_to_vcpu | ||
248 | mrc p15, 0, r2, c1, c0, 0 @ SCTLR | ||
249 | mrc p15, 0, r3, c1, c0, 2 @ CPACR | ||
250 | mrc p15, 0, r4, c2, c0, 2 @ TTBCR | ||
251 | mrc p15, 0, r5, c3, c0, 0 @ DACR | ||
252 | mrrc p15, 0, r6, r7, c2 @ TTBR 0 | ||
253 | mrrc p15, 1, r8, r9, c2 @ TTBR 1 | ||
254 | mrc p15, 0, r10, c10, c2, 0 @ PRRR | ||
255 | mrc p15, 0, r11, c10, c2, 1 @ NMRR | ||
256 | mrc p15, 2, r12, c0, c0, 0 @ CSSELR | ||
257 | |||
258 | .if \store_to_vcpu == 0 | ||
259 | push {r2-r12} @ Push CP15 registers | ||
260 | .else | ||
261 | str r2, [vcpu, #CP15_OFFSET(c1_SCTLR)] | ||
262 | str r3, [vcpu, #CP15_OFFSET(c1_CPACR)] | ||
263 | str r4, [vcpu, #CP15_OFFSET(c2_TTBCR)] | ||
264 | str r5, [vcpu, #CP15_OFFSET(c3_DACR)] | ||
265 | add r2, vcpu, #CP15_OFFSET(c2_TTBR0) | ||
266 | strd r6, r7, [r2] | ||
267 | add r2, vcpu, #CP15_OFFSET(c2_TTBR1) | ||
268 | strd r8, r9, [r2] | ||
269 | str r10, [vcpu, #CP15_OFFSET(c10_PRRR)] | ||
270 | str r11, [vcpu, #CP15_OFFSET(c10_NMRR)] | ||
271 | str r12, [vcpu, #CP15_OFFSET(c0_CSSELR)] | ||
272 | .endif | ||
273 | |||
274 | mrc p15, 0, r2, c13, c0, 1 @ CID | ||
275 | mrc p15, 0, r3, c13, c0, 2 @ TID_URW | ||
276 | mrc p15, 0, r4, c13, c0, 3 @ TID_URO | ||
277 | mrc p15, 0, r5, c13, c0, 4 @ TID_PRIV | ||
278 | mrc p15, 0, r6, c5, c0, 0 @ DFSR | ||
279 | mrc p15, 0, r7, c5, c0, 1 @ IFSR | ||
280 | mrc p15, 0, r8, c5, c1, 0 @ ADFSR | ||
281 | mrc p15, 0, r9, c5, c1, 1 @ AIFSR | ||
282 | mrc p15, 0, r10, c6, c0, 0 @ DFAR | ||
283 | mrc p15, 0, r11, c6, c0, 2 @ IFAR | ||
284 | mrc p15, 0, r12, c12, c0, 0 @ VBAR | ||
285 | |||
286 | .if \store_to_vcpu == 0 | ||
287 | push {r2-r12} @ Push CP15 registers | ||
288 | .else | ||
289 | str r2, [vcpu, #CP15_OFFSET(c13_CID)] | ||
290 | str r3, [vcpu, #CP15_OFFSET(c13_TID_URW)] | ||
291 | str r4, [vcpu, #CP15_OFFSET(c13_TID_URO)] | ||
292 | str r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)] | ||
293 | str r6, [vcpu, #CP15_OFFSET(c5_DFSR)] | ||
294 | str r7, [vcpu, #CP15_OFFSET(c5_IFSR)] | ||
295 | str r8, [vcpu, #CP15_OFFSET(c5_ADFSR)] | ||
296 | str r9, [vcpu, #CP15_OFFSET(c5_AIFSR)] | ||
297 | str r10, [vcpu, #CP15_OFFSET(c6_DFAR)] | ||
298 | str r11, [vcpu, #CP15_OFFSET(c6_IFAR)] | ||
299 | str r12, [vcpu, #CP15_OFFSET(c12_VBAR)] | ||
300 | .endif | ||
301 | .endm | ||
302 | |||
303 | /* | ||
304 | * Reads cp15 registers from memory and writes them to hardware | ||
305 | * @read_from_vcpu: If 0, registers are read in-order from the stack, | ||
306 | * otherwise from the VCPU struct pointed to by vcpup | ||
307 | * | ||
308 | * Assumes vcpu pointer in vcpu reg | ||
309 | */ | ||
310 | .macro write_cp15_state read_from_vcpu | ||
311 | .if \read_from_vcpu == 0 | ||
312 | pop {r2-r12} | ||
313 | .else | ||
314 | ldr r2, [vcpu, #CP15_OFFSET(c13_CID)] | ||
315 | ldr r3, [vcpu, #CP15_OFFSET(c13_TID_URW)] | ||
316 | ldr r4, [vcpu, #CP15_OFFSET(c13_TID_URO)] | ||
317 | ldr r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)] | ||
318 | ldr r6, [vcpu, #CP15_OFFSET(c5_DFSR)] | ||
319 | ldr r7, [vcpu, #CP15_OFFSET(c5_IFSR)] | ||
320 | ldr r8, [vcpu, #CP15_OFFSET(c5_ADFSR)] | ||
321 | ldr r9, [vcpu, #CP15_OFFSET(c5_AIFSR)] | ||
322 | ldr r10, [vcpu, #CP15_OFFSET(c6_DFAR)] | ||
323 | ldr r11, [vcpu, #CP15_OFFSET(c6_IFAR)] | ||
324 | ldr r12, [vcpu, #CP15_OFFSET(c12_VBAR)] | ||
325 | .endif | ||
326 | |||
327 | mcr p15, 0, r2, c13, c0, 1 @ CID | ||
328 | mcr p15, 0, r3, c13, c0, 2 @ TID_URW | ||
329 | mcr p15, 0, r4, c13, c0, 3 @ TID_URO | ||
330 | mcr p15, 0, r5, c13, c0, 4 @ TID_PRIV | ||
331 | mcr p15, 0, r6, c5, c0, 0 @ DFSR | ||
332 | mcr p15, 0, r7, c5, c0, 1 @ IFSR | ||
333 | mcr p15, 0, r8, c5, c1, 0 @ ADFSR | ||
334 | mcr p15, 0, r9, c5, c1, 1 @ AIFSR | ||
335 | mcr p15, 0, r10, c6, c0, 0 @ DFAR | ||
336 | mcr p15, 0, r11, c6, c0, 2 @ IFAR | ||
337 | mcr p15, 0, r12, c12, c0, 0 @ VBAR | ||
338 | |||
339 | .if \read_from_vcpu == 0 | ||
340 | pop {r2-r12} | ||
341 | .else | ||
342 | ldr r2, [vcpu, #CP15_OFFSET(c1_SCTLR)] | ||
343 | ldr r3, [vcpu, #CP15_OFFSET(c1_CPACR)] | ||
344 | ldr r4, [vcpu, #CP15_OFFSET(c2_TTBCR)] | ||
345 | ldr r5, [vcpu, #CP15_OFFSET(c3_DACR)] | ||
346 | add r12, vcpu, #CP15_OFFSET(c2_TTBR0) | ||
347 | ldrd r6, r7, [r12] | ||
348 | add r12, vcpu, #CP15_OFFSET(c2_TTBR1) | ||
349 | ldrd r8, r9, [r12] | ||
350 | ldr r10, [vcpu, #CP15_OFFSET(c10_PRRR)] | ||
351 | ldr r11, [vcpu, #CP15_OFFSET(c10_NMRR)] | ||
352 | ldr r12, [vcpu, #CP15_OFFSET(c0_CSSELR)] | ||
353 | .endif | ||
354 | |||
355 | mcr p15, 0, r2, c1, c0, 0 @ SCTLR | ||
356 | mcr p15, 0, r3, c1, c0, 2 @ CPACR | ||
357 | mcr p15, 0, r4, c2, c0, 2 @ TTBCR | ||
358 | mcr p15, 0, r5, c3, c0, 0 @ DACR | ||
359 | mcrr p15, 0, r6, r7, c2 @ TTBR 0 | ||
360 | mcrr p15, 1, r8, r9, c2 @ TTBR 1 | ||
361 | mcr p15, 0, r10, c10, c2, 0 @ PRRR | ||
362 | mcr p15, 0, r11, c10, c2, 1 @ NMRR | ||
363 | mcr p15, 2, r12, c0, c0, 0 @ CSSELR | ||
364 | .endm | ||
365 | |||
366 | /* | ||
367 | * Save the VGIC CPU state into memory | ||
368 | * | ||
369 | * Assumes vcpu pointer in vcpu reg | ||
370 | */ | ||
371 | .macro save_vgic_state | ||
372 | .endm | ||
373 | |||
374 | /* | ||
375 | * Restore the VGIC CPU state from memory | ||
376 | * | ||
377 | * Assumes vcpu pointer in vcpu reg | ||
378 | */ | ||
379 | .macro restore_vgic_state | ||
380 | .endm | ||
381 | |||
382 | .equ vmentry, 0 | ||
383 | .equ vmexit, 1 | ||
384 | |||
385 | /* Configures the HSTR (Hyp System Trap Register) on entry/return | ||
386 | * (hardware reset value is 0) */ | ||
387 | .macro set_hstr operation | ||
388 | mrc p15, 4, r2, c1, c1, 3 | ||
389 | ldr r3, =HSTR_T(15) | ||
390 | .if \operation == vmentry | ||
391 | orr r2, r2, r3 @ Trap CR{15} | ||
392 | .else | ||
393 | bic r2, r2, r3 @ Don't trap any CRx accesses | ||
394 | .endif | ||
395 | mcr p15, 4, r2, c1, c1, 3 | ||
396 | .endm | ||
397 | |||
398 | /* Configures the HCPTR (Hyp Coprocessor Trap Register) on entry/return | ||
399 | * (hardware reset value is 0). Keep previous value in r2. */ | ||
400 | .macro set_hcptr operation, mask | ||
401 | mrc p15, 4, r2, c1, c1, 2 | ||
402 | ldr r3, =\mask | ||
403 | .if \operation == vmentry | ||
404 | orr r3, r2, r3 @ Trap coproc-accesses defined in mask | ||
405 | .else | ||
406 | bic r3, r2, r3 @ Don't trap defined coproc-accesses | ||
407 | .endif | ||
408 | mcr p15, 4, r3, c1, c1, 2 | ||
409 | .endm | ||
410 | |||
411 | /* Configures the HDCR (Hyp Debug Configuration Register) on entry/return | ||
412 | * (hardware reset value is 0) */ | ||
413 | .macro set_hdcr operation | ||
414 | mrc p15, 4, r2, c1, c1, 1 | ||
415 | ldr r3, =(HDCR_TPM|HDCR_TPMCR) | ||
416 | .if \operation == vmentry | ||
417 | orr r2, r2, r3 @ Trap some perfmon accesses | ||
418 | .else | ||
419 | bic r2, r2, r3 @ Don't trap any perfmon accesses | ||
420 | .endif | ||
421 | mcr p15, 4, r2, c1, c1, 1 | ||
422 | .endm | ||
423 | |||
424 | /* Enable/Disable: stage-2 trans., trap interrupts, trap wfi, trap smc */ | ||
425 | .macro configure_hyp_role operation | ||
426 | mrc p15, 4, r2, c1, c1, 0 @ HCR | ||
427 | bic r2, r2, #HCR_VIRT_EXCP_MASK | ||
428 | ldr r3, =HCR_GUEST_MASK | ||
429 | .if \operation == vmentry | ||
430 | orr r2, r2, r3 | ||
431 | ldr r3, [vcpu, #VCPU_IRQ_LINES] | ||
432 | orr r2, r2, r3 | ||
433 | .else | ||
434 | bic r2, r2, r3 | ||
435 | .endif | ||
436 | mcr p15, 4, r2, c1, c1, 0 | ||
437 | .endm | ||
438 | |||
439 | .macro load_vcpu | ||
440 | mrc p15, 4, vcpu, c13, c0, 2 @ HTPIDR | ||
441 | .endm | ||