aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/include/asm/exception-64s.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/include/asm/exception-64s.h')
-rw-r--r--arch/powerpc/include/asm/exception-64s.h97
1 files changed, 95 insertions, 2 deletions
diff --git a/arch/powerpc/include/asm/exception-64s.h b/arch/powerpc/include/asm/exception-64s.h
index a43c1473915f..ad708dda3ba3 100644
--- a/arch/powerpc/include/asm/exception-64s.h
+++ b/arch/powerpc/include/asm/exception-64s.h
@@ -48,6 +48,35 @@
48#define EX_LR 72 48#define EX_LR 72
49#define EX_CFAR 80 49#define EX_CFAR 80
50 50
51#ifdef CONFIG_RELOCATABLE
52#define EXCEPTION_RELON_PROLOG_PSERIES_1(label, h) \
53 ld r12,PACAKBASE(r13); /* get high part of &label */ \
54 mfspr r11,SPRN_##h##SRR0; /* save SRR0 */ \
55 LOAD_HANDLER(r12,label); \
56 mtlr r12; \
57 mfspr r12,SPRN_##h##SRR1; /* and SRR1 */ \
58 li r10,MSR_RI; \
59 mtmsrd r10,1; /* Set RI (EE=0) */ \
60 blr;
61#else
62/* If not relocatable, we can jump directly -- and save messing with LR */
63#define EXCEPTION_RELON_PROLOG_PSERIES_1(label, h) \
64 mfspr r11,SPRN_##h##SRR0; /* save SRR0 */ \
65 mfspr r12,SPRN_##h##SRR1; /* and SRR1 */ \
66 li r10,MSR_RI; \
67 mtmsrd r10,1; /* Set RI (EE=0) */ \
68 b label;
69#endif
70
71/*
72 * As EXCEPTION_PROLOG_PSERIES(), except we've already got relocation on
73 * so no need to rfid. Save lr in case we're CONFIG_RELOCATABLE, in which
74 * case EXCEPTION_RELON_PROLOG_PSERIES_1 will be using lr.
75 */
76#define EXCEPTION_RELON_PROLOG_PSERIES(area, label, h, extra, vec) \
77 EXCEPTION_PROLOG_1(area, extra, vec); \
78 EXCEPTION_RELON_PROLOG_PSERIES_1(label, h)
79
51/* 80/*
52 * We're short on space and time in the exception prolog, so we can't 81 * We're short on space and time in the exception prolog, so we can't
53 * use the normal SET_REG_IMMEDIATE macro. Normally we just need the 82 * use the normal SET_REG_IMMEDIATE macro. Normally we just need the
@@ -55,12 +84,29 @@
55 * word. 84 * word.
56 */ 85 */
57#define LOAD_HANDLER(reg, label) \ 86#define LOAD_HANDLER(reg, label) \
58 addi reg,reg,(label)-_stext; /* virt addr of handler ... */ 87 /* Handlers must be within 64K of kbase, which must be 64k aligned */ \
88 ori reg,reg,(label)-_stext; /* virt addr of handler ... */
59 89
60/* Exception register prefixes */ 90/* Exception register prefixes */
61#define EXC_HV H 91#define EXC_HV H
62#define EXC_STD 92#define EXC_STD
63 93
94#if defined(CONFIG_RELOCATABLE)
95/*
96 * If we support interrupts with relocation on AND we're a relocatable
97 * kernel, we need to use LR to get to the 2nd level handler. So, save/restore
98 * it when required.
99 */
100#define SAVE_LR(reg, area) mflr reg ; std reg,area+EX_LR(r13)
101#define GET_LR(reg, area) ld reg,area+EX_LR(r13)
102#define RESTORE_LR(reg, area) ld reg,area+EX_LR(r13) ; mtlr reg
103#else
104/* ...else LR is unused and in register. */
105#define SAVE_LR(reg, area)
106#define GET_LR(reg, area) mflr reg
107#define RESTORE_LR(reg, area)
108#endif
109
64#define __EXCEPTION_PROLOG_1(area, extra, vec) \ 110#define __EXCEPTION_PROLOG_1(area, extra, vec) \
65 GET_PACA(r13); \ 111 GET_PACA(r13); \
66 std r9,area+EX_R9(r13); /* save r9 - r12 */ \ 112 std r9,area+EX_R9(r13); /* save r9 - r12 */ \
@@ -69,6 +115,7 @@
69 mfspr r10,SPRN_CFAR; \ 115 mfspr r10,SPRN_CFAR; \
70 std r10,area+EX_CFAR(r13); \ 116 std r10,area+EX_CFAR(r13); \
71 END_FTR_SECTION_NESTED(CPU_FTR_CFAR, CPU_FTR_CFAR, 66); \ 117 END_FTR_SECTION_NESTED(CPU_FTR_CFAR, CPU_FTR_CFAR, 66); \
118 SAVE_LR(r10, area); \
72 mfcr r9; \ 119 mfcr r9; \
73 extra(vec); \ 120 extra(vec); \
74 std r11,area+EX_R11(r13); \ 121 std r11,area+EX_R11(r13); \
@@ -169,6 +216,7 @@ do_kvm_##n: \
169 sth r1,PACA_TRAP_SAVE(r13); \ 216 sth r1,PACA_TRAP_SAVE(r13); \
170 std r3,area+EX_R3(r13); \ 217 std r3,area+EX_R3(r13); \
171 addi r3,r13,area; /* r3 -> where regs are saved*/ \ 218 addi r3,r13,area; /* r3 -> where regs are saved*/ \
219 RESTORE_LR(r1, area); \
172 b bad_stack; \ 220 b bad_stack; \
1733: std r9,_CCR(r1); /* save CR in stackframe */ \ 2213: std r9,_CCR(r1); /* save CR in stackframe */ \
174 std r11,_NIP(r1); /* save SRR0 in stackframe */ \ 222 std r11,_NIP(r1); /* save SRR0 in stackframe */ \
@@ -194,8 +242,8 @@ do_kvm_##n: \
194 ld r10,area+EX_CFAR(r13); \ 242 ld r10,area+EX_CFAR(r13); \
195 std r10,ORIG_GPR3(r1); \ 243 std r10,ORIG_GPR3(r1); \
196 END_FTR_SECTION_NESTED(CPU_FTR_CFAR, CPU_FTR_CFAR, 66); \ 244 END_FTR_SECTION_NESTED(CPU_FTR_CFAR, CPU_FTR_CFAR, 66); \
245 GET_LR(r9,area); /* Get LR, later save to stack */ \
197 ld r2,PACATOC(r13); /* get kernel TOC into r2 */ \ 246 ld r2,PACATOC(r13); /* get kernel TOC into r2 */ \
198 mflr r9; /* save LR in stackframe */ \
199 std r9,_LINK(r1); \ 247 std r9,_LINK(r1); \
200 mfctr r10; /* save CTR in stackframe */ \ 248 mfctr r10; /* save CTR in stackframe */ \
201 std r10,_CTR(r1); \ 249 std r10,_CTR(r1); \
@@ -232,6 +280,26 @@ label##_hv: \
232 EXCEPTION_PROLOG_PSERIES(PACA_EXGEN, label##_common, \ 280 EXCEPTION_PROLOG_PSERIES(PACA_EXGEN, label##_common, \
233 EXC_HV, KVMTEST, vec) 281 EXC_HV, KVMTEST, vec)
234 282
283#define STD_RELON_EXCEPTION_PSERIES(loc, vec, label) \
284 . = loc; \
285 .globl label##_relon_pSeries; \
286label##_relon_pSeries: \
287 HMT_MEDIUM; \
288 /* No guest interrupts come through here */ \
289 SET_SCRATCH0(r13); /* save r13 */ \
290 EXCEPTION_RELON_PROLOG_PSERIES(PACA_EXGEN, label##_common, \
291 EXC_STD, KVMTEST_PR, vec)
292
293#define STD_RELON_EXCEPTION_HV(loc, vec, label) \
294 . = loc; \
295 .globl label##_relon_hv; \
296label##_relon_hv: \
297 HMT_MEDIUM; \
298 /* No guest interrupts come through here */ \
299 SET_SCRATCH0(r13); /* save r13 */ \
300 EXCEPTION_RELON_PROLOG_PSERIES(PACA_EXGEN, label##_common, \
301 EXC_HV, KVMTEST, vec)
302
235/* This associate vector numbers with bits in paca->irq_happened */ 303/* This associate vector numbers with bits in paca->irq_happened */
236#define SOFTEN_VALUE_0x500 PACA_IRQ_EE 304#define SOFTEN_VALUE_0x500 PACA_IRQ_EE
237#define SOFTEN_VALUE_0x502 PACA_IRQ_EE 305#define SOFTEN_VALUE_0x502 PACA_IRQ_EE
@@ -257,6 +325,9 @@ label##_hv: \
257 KVMTEST(vec); \ 325 KVMTEST(vec); \
258 _SOFTEN_TEST(EXC_STD, vec) 326 _SOFTEN_TEST(EXC_STD, vec)
259 327
328#define SOFTEN_NOTEST_PR(vec) _SOFTEN_TEST(EXC_STD, vec)
329#define SOFTEN_NOTEST_HV(vec) _SOFTEN_TEST(EXC_HV, vec)
330
260#define __MASKABLE_EXCEPTION_PSERIES(vec, label, h, extra) \ 331#define __MASKABLE_EXCEPTION_PSERIES(vec, label, h, extra) \
261 HMT_MEDIUM; \ 332 HMT_MEDIUM; \
262 SET_SCRATCH0(r13); /* save r13 */ \ 333 SET_SCRATCH0(r13); /* save r13 */ \
@@ -279,6 +350,28 @@ label##_hv: \
279 _MASKABLE_EXCEPTION_PSERIES(vec, label, \ 350 _MASKABLE_EXCEPTION_PSERIES(vec, label, \
280 EXC_HV, SOFTEN_TEST_HV) 351 EXC_HV, SOFTEN_TEST_HV)
281 352
353#define __MASKABLE_RELON_EXCEPTION_PSERIES(vec, label, h, extra) \
354 HMT_MEDIUM; \
355 SET_SCRATCH0(r13); /* save r13 */ \
356 __EXCEPTION_PROLOG_1(PACA_EXGEN, extra, vec); \
357 EXCEPTION_RELON_PROLOG_PSERIES_1(label##_common, h);
358#define _MASKABLE_RELON_EXCEPTION_PSERIES(vec, label, h, extra) \
359 __MASKABLE_RELON_EXCEPTION_PSERIES(vec, label, h, extra)
360
361#define MASKABLE_RELON_EXCEPTION_PSERIES(loc, vec, label) \
362 . = loc; \
363 .globl label##_relon_pSeries; \
364label##_relon_pSeries: \
365 _MASKABLE_RELON_EXCEPTION_PSERIES(vec, label, \
366 EXC_STD, SOFTEN_NOTEST_PR)
367
368#define MASKABLE_RELON_EXCEPTION_HV(loc, vec, label) \
369 . = loc; \
370 .globl label##_relon_hv; \
371label##_relon_hv: \
372 _MASKABLE_RELON_EXCEPTION_PSERIES(vec, label, \
373 EXC_HV, SOFTEN_NOTEST_HV)
374
282/* 375/*
283 * Our exception common code can be passed various "additions" 376 * Our exception common code can be passed various "additions"
284 * to specify the behaviour of interrupts, whether to kick the 377 * to specify the behaviour of interrupts, whether to kick the