diff options
Diffstat (limited to 'arch/x86/include/asm/switch_to.h')
-rw-r--r-- | arch/x86/include/asm/switch_to.h | 129 |
1 files changed, 129 insertions, 0 deletions
diff --git a/arch/x86/include/asm/switch_to.h b/arch/x86/include/asm/switch_to.h new file mode 100644 index 000000000000..4ec45b3abba1 --- /dev/null +++ b/arch/x86/include/asm/switch_to.h | |||
@@ -0,0 +1,129 @@ | |||
1 | #ifndef _ASM_X86_SWITCH_TO_H | ||
2 | #define _ASM_X86_SWITCH_TO_H | ||
3 | |||
4 | struct task_struct; /* one of the stranger aspects of C forward declarations */ | ||
5 | struct task_struct *__switch_to(struct task_struct *prev, | ||
6 | struct task_struct *next); | ||
7 | struct tss_struct; | ||
8 | void __switch_to_xtra(struct task_struct *prev_p, struct task_struct *next_p, | ||
9 | struct tss_struct *tss); | ||
10 | |||
11 | #ifdef CONFIG_X86_32 | ||
12 | |||
13 | #ifdef CONFIG_CC_STACKPROTECTOR | ||
14 | #define __switch_canary \ | ||
15 | "movl %P[task_canary](%[next]), %%ebx\n\t" \ | ||
16 | "movl %%ebx, "__percpu_arg([stack_canary])"\n\t" | ||
17 | #define __switch_canary_oparam \ | ||
18 | , [stack_canary] "=m" (stack_canary.canary) | ||
19 | #define __switch_canary_iparam \ | ||
20 | , [task_canary] "i" (offsetof(struct task_struct, stack_canary)) | ||
21 | #else /* CC_STACKPROTECTOR */ | ||
22 | #define __switch_canary | ||
23 | #define __switch_canary_oparam | ||
24 | #define __switch_canary_iparam | ||
25 | #endif /* CC_STACKPROTECTOR */ | ||
26 | |||
27 | /* | ||
28 | * Saving eflags is important. It switches not only IOPL between tasks, | ||
29 | * it also protects other tasks from NT leaking through sysenter etc. | ||
30 | */ | ||
31 | #define switch_to(prev, next, last) \ | ||
32 | do { \ | ||
33 | /* \ | ||
34 | * Context-switching clobbers all registers, so we clobber \ | ||
35 | * them explicitly, via unused output variables. \ | ||
36 | * (EAX and EBP is not listed because EBP is saved/restored \ | ||
37 | * explicitly for wchan access and EAX is the return value of \ | ||
38 | * __switch_to()) \ | ||
39 | */ \ | ||
40 | unsigned long ebx, ecx, edx, esi, edi; \ | ||
41 | \ | ||
42 | asm volatile("pushfl\n\t" /* save flags */ \ | ||
43 | "pushl %%ebp\n\t" /* save EBP */ \ | ||
44 | "movl %%esp,%[prev_sp]\n\t" /* save ESP */ \ | ||
45 | "movl %[next_sp],%%esp\n\t" /* restore ESP */ \ | ||
46 | "movl $1f,%[prev_ip]\n\t" /* save EIP */ \ | ||
47 | "pushl %[next_ip]\n\t" /* restore EIP */ \ | ||
48 | __switch_canary \ | ||
49 | "jmp __switch_to\n" /* regparm call */ \ | ||
50 | "1:\t" \ | ||
51 | "popl %%ebp\n\t" /* restore EBP */ \ | ||
52 | "popfl\n" /* restore flags */ \ | ||
53 | \ | ||
54 | /* output parameters */ \ | ||
55 | : [prev_sp] "=m" (prev->thread.sp), \ | ||
56 | [prev_ip] "=m" (prev->thread.ip), \ | ||
57 | "=a" (last), \ | ||
58 | \ | ||
59 | /* clobbered output registers: */ \ | ||
60 | "=b" (ebx), "=c" (ecx), "=d" (edx), \ | ||
61 | "=S" (esi), "=D" (edi) \ | ||
62 | \ | ||
63 | __switch_canary_oparam \ | ||
64 | \ | ||
65 | /* input parameters: */ \ | ||
66 | : [next_sp] "m" (next->thread.sp), \ | ||
67 | [next_ip] "m" (next->thread.ip), \ | ||
68 | \ | ||
69 | /* regparm parameters for __switch_to(): */ \ | ||
70 | [prev] "a" (prev), \ | ||
71 | [next] "d" (next) \ | ||
72 | \ | ||
73 | __switch_canary_iparam \ | ||
74 | \ | ||
75 | : /* reloaded segment registers */ \ | ||
76 | "memory"); \ | ||
77 | } while (0) | ||
78 | |||
79 | #else /* CONFIG_X86_32 */ | ||
80 | |||
81 | /* frame pointer must be last for get_wchan */ | ||
82 | #define SAVE_CONTEXT "pushf ; pushq %%rbp ; movq %%rsi,%%rbp\n\t" | ||
83 | #define RESTORE_CONTEXT "movq %%rbp,%%rsi ; popq %%rbp ; popf\t" | ||
84 | |||
85 | #define __EXTRA_CLOBBER \ | ||
86 | , "rcx", "rbx", "rdx", "r8", "r9", "r10", "r11", \ | ||
87 | "r12", "r13", "r14", "r15" | ||
88 | |||
89 | #ifdef CONFIG_CC_STACKPROTECTOR | ||
90 | #define __switch_canary \ | ||
91 | "movq %P[task_canary](%%rsi),%%r8\n\t" \ | ||
92 | "movq %%r8,"__percpu_arg([gs_canary])"\n\t" | ||
93 | #define __switch_canary_oparam \ | ||
94 | , [gs_canary] "=m" (irq_stack_union.stack_canary) | ||
95 | #define __switch_canary_iparam \ | ||
96 | , [task_canary] "i" (offsetof(struct task_struct, stack_canary)) | ||
97 | #else /* CC_STACKPROTECTOR */ | ||
98 | #define __switch_canary | ||
99 | #define __switch_canary_oparam | ||
100 | #define __switch_canary_iparam | ||
101 | #endif /* CC_STACKPROTECTOR */ | ||
102 | |||
103 | /* Save restore flags to clear handle leaking NT */ | ||
104 | #define switch_to(prev, next, last) \ | ||
105 | asm volatile(SAVE_CONTEXT \ | ||
106 | "movq %%rsp,%P[threadrsp](%[prev])\n\t" /* save RSP */ \ | ||
107 | "movq %P[threadrsp](%[next]),%%rsp\n\t" /* restore RSP */ \ | ||
108 | "call __switch_to\n\t" \ | ||
109 | "movq "__percpu_arg([current_task])",%%rsi\n\t" \ | ||
110 | __switch_canary \ | ||
111 | "movq %P[thread_info](%%rsi),%%r8\n\t" \ | ||
112 | "movq %%rax,%%rdi\n\t" \ | ||
113 | "testl %[_tif_fork],%P[ti_flags](%%r8)\n\t" \ | ||
114 | "jnz ret_from_fork\n\t" \ | ||
115 | RESTORE_CONTEXT \ | ||
116 | : "=a" (last) \ | ||
117 | __switch_canary_oparam \ | ||
118 | : [next] "S" (next), [prev] "D" (prev), \ | ||
119 | [threadrsp] "i" (offsetof(struct task_struct, thread.sp)), \ | ||
120 | [ti_flags] "i" (offsetof(struct thread_info, flags)), \ | ||
121 | [_tif_fork] "i" (_TIF_FORK), \ | ||
122 | [thread_info] "i" (offsetof(struct task_struct, stack)), \ | ||
123 | [current_task] "m" (current_task) \ | ||
124 | __switch_canary_iparam \ | ||
125 | : "memory", "cc" __EXTRA_CLOBBER) | ||
126 | |||
127 | #endif /* CONFIG_X86_32 */ | ||
128 | |||
129 | #endif /* _ASM_X86_SWITCH_TO_H */ | ||