diff options
Diffstat (limited to 'arch/x86/kernel/mcount_64.S')
-rw-r--r-- | arch/x86/kernel/mcount_64.S | 217 |
1 files changed, 217 insertions, 0 deletions
diff --git a/arch/x86/kernel/mcount_64.S b/arch/x86/kernel/mcount_64.S new file mode 100644 index 000000000000..c050a0153168 --- /dev/null +++ b/arch/x86/kernel/mcount_64.S | |||
@@ -0,0 +1,217 @@ | |||
1 | /* | ||
2 | * linux/arch/x86_64/mcount_64.S | ||
3 | * | ||
4 | * Copyright (C) 2014 Steven Rostedt, Red Hat Inc | ||
5 | */ | ||
6 | |||
7 | #include <linux/linkage.h> | ||
8 | #include <asm/ptrace.h> | ||
9 | #include <asm/ftrace.h> | ||
10 | |||
11 | |||
12 | .code64 | ||
13 | .section .entry.text, "ax" | ||
14 | |||
15 | |||
16 | #ifdef CONFIG_FUNCTION_TRACER | ||
17 | |||
18 | #ifdef CC_USING_FENTRY | ||
19 | # define function_hook __fentry__ | ||
20 | #else | ||
21 | # define function_hook mcount | ||
22 | #endif | ||
23 | |||
24 | #ifdef CONFIG_DYNAMIC_FTRACE | ||
25 | |||
26 | ENTRY(function_hook) | ||
27 | retq | ||
28 | END(function_hook) | ||
29 | |||
30 | /* skip is set if stack has been adjusted */ | ||
31 | .macro ftrace_caller_setup skip=0 | ||
32 | MCOUNT_SAVE_FRAME \skip | ||
33 | |||
34 | /* Load the ftrace_ops into the 3rd parameter */ | ||
35 | movq function_trace_op(%rip), %rdx | ||
36 | |||
37 | /* Load ip into the first parameter */ | ||
38 | movq RIP(%rsp), %rdi | ||
39 | subq $MCOUNT_INSN_SIZE, %rdi | ||
40 | /* Load the parent_ip into the second parameter */ | ||
41 | #ifdef CC_USING_FENTRY | ||
42 | movq SS+16(%rsp), %rsi | ||
43 | #else | ||
44 | movq 8(%rbp), %rsi | ||
45 | #endif | ||
46 | .endm | ||
47 | |||
48 | ENTRY(ftrace_caller) | ||
49 | /* Check if tracing was disabled (quick check) */ | ||
50 | cmpl $0, function_trace_stop | ||
51 | jne ftrace_stub | ||
52 | |||
53 | ftrace_caller_setup | ||
54 | /* regs go into 4th parameter (but make it NULL) */ | ||
55 | movq $0, %rcx | ||
56 | |||
57 | GLOBAL(ftrace_call) | ||
58 | call ftrace_stub | ||
59 | |||
60 | MCOUNT_RESTORE_FRAME | ||
61 | ftrace_return: | ||
62 | |||
63 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER | ||
64 | GLOBAL(ftrace_graph_call) | ||
65 | jmp ftrace_stub | ||
66 | #endif | ||
67 | |||
68 | GLOBAL(ftrace_stub) | ||
69 | retq | ||
70 | END(ftrace_caller) | ||
71 | |||
72 | ENTRY(ftrace_regs_caller) | ||
73 | /* Save the current flags before compare (in SS location)*/ | ||
74 | pushfq | ||
75 | |||
76 | /* Check if tracing was disabled (quick check) */ | ||
77 | cmpl $0, function_trace_stop | ||
78 | jne ftrace_restore_flags | ||
79 | |||
80 | /* skip=8 to skip flags saved in SS */ | ||
81 | ftrace_caller_setup 8 | ||
82 | |||
83 | /* Save the rest of pt_regs */ | ||
84 | movq %r15, R15(%rsp) | ||
85 | movq %r14, R14(%rsp) | ||
86 | movq %r13, R13(%rsp) | ||
87 | movq %r12, R12(%rsp) | ||
88 | movq %r11, R11(%rsp) | ||
89 | movq %r10, R10(%rsp) | ||
90 | movq %rbp, RBP(%rsp) | ||
91 | movq %rbx, RBX(%rsp) | ||
92 | /* Copy saved flags */ | ||
93 | movq SS(%rsp), %rcx | ||
94 | movq %rcx, EFLAGS(%rsp) | ||
95 | /* Kernel segments */ | ||
96 | movq $__KERNEL_DS, %rcx | ||
97 | movq %rcx, SS(%rsp) | ||
98 | movq $__KERNEL_CS, %rcx | ||
99 | movq %rcx, CS(%rsp) | ||
100 | /* Stack - skipping return address */ | ||
101 | leaq SS+16(%rsp), %rcx | ||
102 | movq %rcx, RSP(%rsp) | ||
103 | |||
104 | /* regs go into 4th parameter */ | ||
105 | leaq (%rsp), %rcx | ||
106 | |||
107 | GLOBAL(ftrace_regs_call) | ||
108 | call ftrace_stub | ||
109 | |||
110 | /* Copy flags back to SS, to restore them */ | ||
111 | movq EFLAGS(%rsp), %rax | ||
112 | movq %rax, SS(%rsp) | ||
113 | |||
114 | /* Handlers can change the RIP */ | ||
115 | movq RIP(%rsp), %rax | ||
116 | movq %rax, SS+8(%rsp) | ||
117 | |||
118 | /* restore the rest of pt_regs */ | ||
119 | movq R15(%rsp), %r15 | ||
120 | movq R14(%rsp), %r14 | ||
121 | movq R13(%rsp), %r13 | ||
122 | movq R12(%rsp), %r12 | ||
123 | movq R10(%rsp), %r10 | ||
124 | movq RBP(%rsp), %rbp | ||
125 | movq RBX(%rsp), %rbx | ||
126 | |||
127 | /* skip=8 to skip flags saved in SS */ | ||
128 | MCOUNT_RESTORE_FRAME 8 | ||
129 | |||
130 | /* Restore flags */ | ||
131 | popfq | ||
132 | |||
133 | jmp ftrace_return | ||
134 | ftrace_restore_flags: | ||
135 | popfq | ||
136 | jmp ftrace_stub | ||
137 | |||
138 | END(ftrace_regs_caller) | ||
139 | |||
140 | |||
141 | #else /* ! CONFIG_DYNAMIC_FTRACE */ | ||
142 | |||
143 | ENTRY(function_hook) | ||
144 | cmpl $0, function_trace_stop | ||
145 | jne ftrace_stub | ||
146 | |||
147 | cmpq $ftrace_stub, ftrace_trace_function | ||
148 | jnz trace | ||
149 | |||
150 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER | ||
151 | cmpq $ftrace_stub, ftrace_graph_return | ||
152 | jnz ftrace_graph_caller | ||
153 | |||
154 | cmpq $ftrace_graph_entry_stub, ftrace_graph_entry | ||
155 | jnz ftrace_graph_caller | ||
156 | #endif | ||
157 | |||
158 | GLOBAL(ftrace_stub) | ||
159 | retq | ||
160 | |||
161 | trace: | ||
162 | MCOUNT_SAVE_FRAME | ||
163 | |||
164 | movq RIP(%rsp), %rdi | ||
165 | #ifdef CC_USING_FENTRY | ||
166 | movq SS+16(%rsp), %rsi | ||
167 | #else | ||
168 | movq 8(%rbp), %rsi | ||
169 | #endif | ||
170 | subq $MCOUNT_INSN_SIZE, %rdi | ||
171 | |||
172 | call *ftrace_trace_function | ||
173 | |||
174 | MCOUNT_RESTORE_FRAME | ||
175 | |||
176 | jmp ftrace_stub | ||
177 | END(function_hook) | ||
178 | #endif /* CONFIG_DYNAMIC_FTRACE */ | ||
179 | #endif /* CONFIG_FUNCTION_TRACER */ | ||
180 | |||
181 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER | ||
182 | ENTRY(ftrace_graph_caller) | ||
183 | MCOUNT_SAVE_FRAME | ||
184 | |||
185 | #ifdef CC_USING_FENTRY | ||
186 | leaq SS+16(%rsp), %rdi | ||
187 | movq $0, %rdx /* No framepointers needed */ | ||
188 | #else | ||
189 | leaq 8(%rbp), %rdi | ||
190 | movq (%rbp), %rdx | ||
191 | #endif | ||
192 | movq RIP(%rsp), %rsi | ||
193 | subq $MCOUNT_INSN_SIZE, %rsi | ||
194 | |||
195 | call prepare_ftrace_return | ||
196 | |||
197 | MCOUNT_RESTORE_FRAME | ||
198 | |||
199 | retq | ||
200 | END(ftrace_graph_caller) | ||
201 | |||
202 | GLOBAL(return_to_handler) | ||
203 | subq $24, %rsp | ||
204 | |||
205 | /* Save the return values */ | ||
206 | movq %rax, (%rsp) | ||
207 | movq %rdx, 8(%rsp) | ||
208 | movq %rbp, %rdi | ||
209 | |||
210 | call ftrace_return_to_handler | ||
211 | |||
212 | movq %rax, %rdi | ||
213 | movq 8(%rsp), %rdx | ||
214 | movq (%rsp), %rax | ||
215 | addq $24, %rsp | ||
216 | jmp *%rdi | ||
217 | #endif | ||