diff options
-rw-r--r-- | arch/powerpc/kernel/fpu.S | 54 | ||||
-rw-r--r-- | arch/powerpc/kernel/vector.S | 51 |
2 files changed, 105 insertions, 0 deletions
diff --git a/arch/powerpc/kernel/fpu.S b/arch/powerpc/kernel/fpu.S index adb155195394..caeaabf11a2f 100644 --- a/arch/powerpc/kernel/fpu.S +++ b/arch/powerpc/kernel/fpu.S | |||
@@ -62,6 +62,60 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX); \ | |||
62 | __REST_32FPVSRS_TRANSACT(n,__REG_##c,__REG_##base) | 62 | __REST_32FPVSRS_TRANSACT(n,__REG_##c,__REG_##base) |
63 | #define SAVE_32FPVSRS(n,c,base) __SAVE_32FPVSRS(n,__REG_##c,__REG_##base) | 63 | #define SAVE_32FPVSRS(n,c,base) __SAVE_32FPVSRS(n,__REG_##c,__REG_##base) |
64 | 64 | ||
65 | #ifdef CONFIG_PPC_TRANSACTIONAL_MEM | ||
66 | /* | ||
67 | * Wrapper to call load_up_fpu from C. | ||
68 | * void do_load_up_fpu(struct pt_regs *regs); | ||
69 | */ | ||
70 | _GLOBAL(do_load_up_fpu) | ||
71 | mflr r0 | ||
72 | std r0, 16(r1) | ||
73 | stdu r1, -112(r1) | ||
74 | |||
75 | subi r6, r3, STACK_FRAME_OVERHEAD | ||
76 | /* load_up_fpu expects r12=MSR, r13=PACA, and returns | ||
77 | * with r12 = new MSR. | ||
78 | */ | ||
79 | ld r12,_MSR(r6) | ||
80 | GET_PACA(r13) | ||
81 | |||
82 | bl load_up_fpu | ||
83 | std r12,_MSR(r6) | ||
84 | |||
85 | ld r0, 112+16(r1) | ||
86 | addi r1, r1, 112 | ||
87 | mtlr r0 | ||
88 | blr | ||
89 | |||
90 | |||
91 | /* void do_load_up_transact_fpu(struct thread_struct *thread) | ||
92 | * | ||
93 | * This is similar to load_up_fpu but for the transactional version of the FP | ||
94 | * register set. It doesn't mess with the task MSR or valid flags. | ||
95 | * Furthermore, we don't do lazy FP with TM currently. | ||
96 | */ | ||
97 | _GLOBAL(do_load_up_transact_fpu) | ||
98 | mfmsr r6 | ||
99 | ori r5,r6,MSR_FP | ||
100 | #ifdef CONFIG_VSX | ||
101 | BEGIN_FTR_SECTION | ||
102 | oris r5,r5,MSR_VSX@h | ||
103 | END_FTR_SECTION_IFSET(CPU_FTR_VSX) | ||
104 | #endif | ||
105 | SYNC | ||
106 | MTMSRD(r5) | ||
107 | |||
108 | lfd fr0,THREAD_TRANSACT_FPSCR(r3) | ||
109 | MTFSF_L(fr0) | ||
110 | REST_32FPVSRS_TRANSACT(0, R4, R3) | ||
111 | |||
112 | /* FP/VSX off again */ | ||
113 | MTMSRD(r6) | ||
114 | SYNC | ||
115 | |||
116 | blr | ||
117 | #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ | ||
118 | |||
65 | /* | 119 | /* |
66 | * This task wants to use the FPU now. | 120 | * This task wants to use the FPU now. |
67 | * On UP, disable FP for the task which had the FPU previously, | 121 | * On UP, disable FP for the task which had the FPU previously, |
diff --git a/arch/powerpc/kernel/vector.S b/arch/powerpc/kernel/vector.S index e830289d2e48..9e20999aaef2 100644 --- a/arch/powerpc/kernel/vector.S +++ b/arch/powerpc/kernel/vector.S | |||
@@ -7,6 +7,57 @@ | |||
7 | #include <asm/page.h> | 7 | #include <asm/page.h> |
8 | #include <asm/ptrace.h> | 8 | #include <asm/ptrace.h> |
9 | 9 | ||
10 | #ifdef CONFIG_PPC_TRANSACTIONAL_MEM | ||
11 | /* | ||
12 | * Wrapper to call load_up_altivec from C. | ||
13 | * void do_load_up_altivec(struct pt_regs *regs); | ||
14 | */ | ||
15 | _GLOBAL(do_load_up_altivec) | ||
16 | mflr r0 | ||
17 | std r0, 16(r1) | ||
18 | stdu r1, -112(r1) | ||
19 | |||
20 | subi r6, r3, STACK_FRAME_OVERHEAD | ||
21 | /* load_up_altivec expects r12=MSR, r13=PACA, and returns | ||
22 | * with r12 = new MSR. | ||
23 | */ | ||
24 | ld r12,_MSR(r6) | ||
25 | GET_PACA(r13) | ||
26 | bl load_up_altivec | ||
27 | std r12,_MSR(r6) | ||
28 | |||
29 | ld r0, 112+16(r1) | ||
30 | addi r1, r1, 112 | ||
31 | mtlr r0 | ||
32 | blr | ||
33 | |||
34 | /* void do_load_up_transact_altivec(struct thread_struct *thread) | ||
35 | * | ||
36 | * This is similar to load_up_altivec but for the transactional version of the | ||
37 | * vector regs. It doesn't mess with the task MSR or valid flags. | ||
38 | * Furthermore, VEC laziness is not supported with TM currently. | ||
39 | */ | ||
40 | _GLOBAL(do_load_up_transact_altivec) | ||
41 | mfmsr r6 | ||
42 | oris r5,r6,MSR_VEC@h | ||
43 | MTMSRD(r5) | ||
44 | isync | ||
45 | |||
46 | li r4,1 | ||
47 | stw r4,THREAD_USED_VR(r3) | ||
48 | |||
49 | li r10,THREAD_TRANSACT_VSCR | ||
50 | lvx vr0,r10,r3 | ||
51 | mtvscr vr0 | ||
52 | REST_32VRS_TRANSACT(0,r4,r3) | ||
53 | |||
54 | /* Disable VEC again. */ | ||
55 | MTMSRD(r6) | ||
56 | isync | ||
57 | |||
58 | blr | ||
59 | #endif | ||
60 | |||
10 | /* | 61 | /* |
11 | * load_up_altivec(unused, unused, tsk) | 62 | * load_up_altivec(unused, unused, tsk) |
12 | * Disable VMX for the task which had it previously, | 63 | * Disable VMX for the task which had it previously, |