diff options
author | Anton Blanchard <anton@samba.org> | 2015-10-28 20:44:11 -0400 |
---|---|---|
committer | Michael Ellerman <mpe@ellerman.id.au> | 2015-12-02 03:34:41 -0500 |
commit | d1e1cf2e38def301fde42c1a33f896f974941d7b (patch) | |
tree | 637ae6411bdf02dd8e7e29a57bf416beb52deca4 /arch/powerpc/include/asm/switch_to.h | |
parent | f3d885ccba8539f62e8be3ba29ecf91687120252 (diff) |
powerpc: clean up asm/switch_to.h
Remove a bunch of unnecessary fallback functions and group
things in a more logical way.
Signed-off-by: Anton Blanchard <anton@samba.org>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'arch/powerpc/include/asm/switch_to.h')
-rw-r--r-- | arch/powerpc/include/asm/switch_to.h | 35 |
1 files changed, 10 insertions, 25 deletions
diff --git a/arch/powerpc/include/asm/switch_to.h b/arch/powerpc/include/asm/switch_to.h index 81d46a433c03..5b268b6be74c 100644 --- a/arch/powerpc/include/asm/switch_to.h +++ b/arch/powerpc/include/asm/switch_to.h | |||
@@ -14,23 +14,18 @@ extern struct task_struct *__switch_to(struct task_struct *, | |||
14 | struct task_struct *); | 14 | struct task_struct *); |
15 | #define switch_to(prev, next, last) ((last) = __switch_to((prev), (next))) | 15 | #define switch_to(prev, next, last) ((last) = __switch_to((prev), (next))) |
16 | 16 | ||
17 | struct thread_struct; | ||
18 | extern struct task_struct *_switch(struct thread_struct *prev, | 17 | extern struct task_struct *_switch(struct thread_struct *prev, |
19 | struct thread_struct *next); | 18 | struct thread_struct *next); |
20 | 19 | ||
21 | extern void enable_kernel_fp(void); | 20 | extern void switch_booke_debug_regs(struct debug_reg *new_debug); |
22 | extern void enable_kernel_altivec(void); | 21 | |
23 | extern void enable_kernel_vsx(void); | ||
24 | extern int emulate_altivec(struct pt_regs *); | 22 | extern int emulate_altivec(struct pt_regs *); |
25 | extern void __giveup_vsx(struct task_struct *); | 23 | |
26 | extern void giveup_vsx(struct task_struct *); | ||
27 | extern void enable_kernel_spe(void); | ||
28 | extern void load_up_spe(struct task_struct *); | ||
29 | extern void giveup_all(struct task_struct *); | ||
30 | extern void flush_all_to_thread(struct task_struct *); | 24 | extern void flush_all_to_thread(struct task_struct *); |
31 | extern void switch_booke_debug_regs(struct debug_reg *new_debug); | 25 | extern void giveup_all(struct task_struct *); |
32 | 26 | ||
33 | #ifdef CONFIG_PPC_FPU | 27 | #ifdef CONFIG_PPC_FPU |
28 | extern void enable_kernel_fp(void); | ||
34 | extern void flush_fp_to_thread(struct task_struct *); | 29 | extern void flush_fp_to_thread(struct task_struct *); |
35 | extern void giveup_fpu(struct task_struct *); | 30 | extern void giveup_fpu(struct task_struct *); |
36 | extern void __giveup_fpu(struct task_struct *); | 31 | extern void __giveup_fpu(struct task_struct *); |
@@ -38,14 +33,12 @@ static inline void disable_kernel_fp(void) | |||
38 | { | 33 | { |
39 | msr_check_and_clear(MSR_FP); | 34 | msr_check_and_clear(MSR_FP); |
40 | } | 35 | } |
41 | |||
42 | #else | 36 | #else |
43 | static inline void flush_fp_to_thread(struct task_struct *t) { } | 37 | static inline void flush_fp_to_thread(struct task_struct *t) { } |
44 | static inline void giveup_fpu(struct task_struct *t) { } | ||
45 | static inline void __giveup_fpu(struct task_struct *t) { } | ||
46 | #endif | 38 | #endif |
47 | 39 | ||
48 | #ifdef CONFIG_ALTIVEC | 40 | #ifdef CONFIG_ALTIVEC |
41 | extern void enable_kernel_altivec(void); | ||
49 | extern void flush_altivec_to_thread(struct task_struct *); | 42 | extern void flush_altivec_to_thread(struct task_struct *); |
50 | extern void giveup_altivec(struct task_struct *); | 43 | extern void giveup_altivec(struct task_struct *); |
51 | extern void __giveup_altivec(struct task_struct *); | 44 | extern void __giveup_altivec(struct task_struct *); |
@@ -53,25 +46,21 @@ static inline void disable_kernel_altivec(void) | |||
53 | { | 46 | { |
54 | msr_check_and_clear(MSR_VEC); | 47 | msr_check_and_clear(MSR_VEC); |
55 | } | 48 | } |
56 | #else | ||
57 | static inline void flush_altivec_to_thread(struct task_struct *t) { } | ||
58 | static inline void giveup_altivec(struct task_struct *t) { } | ||
59 | static inline void __giveup_altivec(struct task_struct *t) { } | ||
60 | #endif | 49 | #endif |
61 | 50 | ||
62 | #ifdef CONFIG_VSX | 51 | #ifdef CONFIG_VSX |
52 | extern void enable_kernel_vsx(void); | ||
63 | extern void flush_vsx_to_thread(struct task_struct *); | 53 | extern void flush_vsx_to_thread(struct task_struct *); |
54 | extern void giveup_vsx(struct task_struct *); | ||
55 | extern void __giveup_vsx(struct task_struct *); | ||
64 | static inline void disable_kernel_vsx(void) | 56 | static inline void disable_kernel_vsx(void) |
65 | { | 57 | { |
66 | msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); | 58 | msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); |
67 | } | 59 | } |
68 | #else | ||
69 | static inline void flush_vsx_to_thread(struct task_struct *t) | ||
70 | { | ||
71 | } | ||
72 | #endif | 60 | #endif |
73 | 61 | ||
74 | #ifdef CONFIG_SPE | 62 | #ifdef CONFIG_SPE |
63 | extern void enable_kernel_spe(void); | ||
75 | extern void flush_spe_to_thread(struct task_struct *); | 64 | extern void flush_spe_to_thread(struct task_struct *); |
76 | extern void giveup_spe(struct task_struct *); | 65 | extern void giveup_spe(struct task_struct *); |
77 | extern void __giveup_spe(struct task_struct *); | 66 | extern void __giveup_spe(struct task_struct *); |
@@ -79,10 +68,6 @@ static inline void disable_kernel_spe(void) | |||
79 | { | 68 | { |
80 | msr_check_and_clear(MSR_SPE); | 69 | msr_check_and_clear(MSR_SPE); |
81 | } | 70 | } |
82 | #else | ||
83 | static inline void flush_spe_to_thread(struct task_struct *t) { } | ||
84 | static inline void giveup_spe(struct task_struct *t) { } | ||
85 | static inline void __giveup_spe(struct task_struct *t) { } | ||
86 | #endif | 71 | #endif |
87 | 72 | ||
88 | static inline void clear_task_ebb(struct task_struct *t) | 73 | static inline void clear_task_ebb(struct task_struct *t) |