diff options
-rw-r--r-- | arch/x86/Kconfig | 2 | ||||
-rw-r--r-- | arch/x86/kernel/Makefile | 1 | ||||
-rw-r--r-- | arch/x86/kernel/entry_64.S | 74 | ||||
-rw-r--r-- | arch/x86/kernel/ftrace.c | 11 | ||||
-rw-r--r-- | kernel/trace/ftrace.c | 4 |
5 files changed, 89 insertions, 3 deletions
diff --git a/arch/x86/Kconfig b/arch/x86/Kconfig index 0842b1127684..45c86fb94132 100644 --- a/arch/x86/Kconfig +++ b/arch/x86/Kconfig | |||
@@ -29,7 +29,7 @@ config X86 | |||
29 | select HAVE_FTRACE_MCOUNT_RECORD | 29 | select HAVE_FTRACE_MCOUNT_RECORD |
30 | select HAVE_DYNAMIC_FTRACE | 30 | select HAVE_DYNAMIC_FTRACE |
31 | select HAVE_FUNCTION_TRACER | 31 | select HAVE_FUNCTION_TRACER |
32 | select HAVE_FUNCTION_GRAPH_TRACER if X86_32 | 32 | select HAVE_FUNCTION_GRAPH_TRACER |
33 | select HAVE_FUNCTION_TRACE_MCOUNT_TEST | 33 | select HAVE_FUNCTION_TRACE_MCOUNT_TEST |
34 | select HAVE_KVM if ((X86_32 && !X86_VOYAGER && !X86_VISWS && !X86_NUMAQ) || X86_64) | 34 | select HAVE_KVM if ((X86_32 && !X86_VOYAGER && !X86_VISWS && !X86_NUMAQ) || X86_64) |
35 | select HAVE_ARCH_KGDB if !X86_VOYAGER | 35 | select HAVE_ARCH_KGDB if !X86_VOYAGER |
diff --git a/arch/x86/kernel/Makefile b/arch/x86/kernel/Makefile index 64939a0c3986..d274425fb076 100644 --- a/arch/x86/kernel/Makefile +++ b/arch/x86/kernel/Makefile | |||
@@ -17,6 +17,7 @@ endif | |||
17 | ifdef CONFIG_FUNCTION_GRAPH_TRACER | 17 | ifdef CONFIG_FUNCTION_GRAPH_TRACER |
18 | # Don't trace __switch_to() but let it for function tracer | 18 | # Don't trace __switch_to() but let it for function tracer |
19 | CFLAGS_REMOVE_process_32.o = -pg | 19 | CFLAGS_REMOVE_process_32.o = -pg |
20 | CFLAGS_REMOVE_process_64.o = -pg | ||
20 | endif | 21 | endif |
21 | 22 | ||
22 | # | 23 | # |
diff --git a/arch/x86/kernel/entry_64.S b/arch/x86/kernel/entry_64.S index 08aa6b10933c..2aa0526ac30e 100644 --- a/arch/x86/kernel/entry_64.S +++ b/arch/x86/kernel/entry_64.S | |||
@@ -98,6 +98,12 @@ ftrace_call: | |||
98 | movq (%rsp), %rax | 98 | movq (%rsp), %rax |
99 | addq $0x38, %rsp | 99 | addq $0x38, %rsp |
100 | 100 | ||
101 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER | ||
102 | .globl ftrace_graph_call | ||
103 | ftrace_graph_call: | ||
104 | jmp ftrace_stub | ||
105 | #endif | ||
106 | |||
101 | .globl ftrace_stub | 107 | .globl ftrace_stub |
102 | ftrace_stub: | 108 | ftrace_stub: |
103 | retq | 109 | retq |
@@ -110,6 +116,12 @@ ENTRY(mcount) | |||
110 | 116 | ||
111 | cmpq $ftrace_stub, ftrace_trace_function | 117 | cmpq $ftrace_stub, ftrace_trace_function |
112 | jnz trace | 118 | jnz trace |
119 | |||
120 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER | ||
121 | cmpq $ftrace_stub, ftrace_graph_return | ||
122 | jnz ftrace_graph_caller | ||
123 | #endif | ||
124 | |||
113 | .globl ftrace_stub | 125 | .globl ftrace_stub |
114 | ftrace_stub: | 126 | ftrace_stub: |
115 | retq | 127 | retq |
@@ -145,6 +157,68 @@ END(mcount) | |||
145 | #endif /* CONFIG_DYNAMIC_FTRACE */ | 157 | #endif /* CONFIG_DYNAMIC_FTRACE */ |
146 | #endif /* CONFIG_FUNCTION_TRACER */ | 158 | #endif /* CONFIG_FUNCTION_TRACER */ |
147 | 159 | ||
160 | #ifdef CONFIG_FUNCTION_GRAPH_TRACER | ||
161 | ENTRY(ftrace_graph_caller) | ||
162 | cmpl $0, function_trace_stop | ||
163 | jne ftrace_stub | ||
164 | |||
165 | subq $0x38, %rsp | ||
166 | movq %rax, (%rsp) | ||
167 | movq %rcx, 8(%rsp) | ||
168 | movq %rdx, 16(%rsp) | ||
169 | movq %rsi, 24(%rsp) | ||
170 | movq %rdi, 32(%rsp) | ||
171 | movq %r8, 40(%rsp) | ||
172 | movq %r9, 48(%rsp) | ||
173 | |||
174 | leaq 8(%rbp), %rdi | ||
175 | movq 0x38(%rsp), %rsi | ||
176 | |||
177 | call prepare_ftrace_return | ||
178 | |||
179 | movq 48(%rsp), %r9 | ||
180 | movq 40(%rsp), %r8 | ||
181 | movq 32(%rsp), %rdi | ||
182 | movq 24(%rsp), %rsi | ||
183 | movq 16(%rsp), %rdx | ||
184 | movq 8(%rsp), %rcx | ||
185 | movq (%rsp), %rax | ||
186 | addq $0x38, %rsp | ||
187 | retq | ||
188 | END(ftrace_graph_caller) | ||
189 | |||
190 | |||
191 | .globl return_to_handler | ||
192 | return_to_handler: | ||
193 | subq $80, %rsp | ||
194 | |||
195 | movq %rax, (%rsp) | ||
196 | movq %rcx, 8(%rsp) | ||
197 | movq %rdx, 16(%rsp) | ||
198 | movq %rsi, 24(%rsp) | ||
199 | movq %rdi, 32(%rsp) | ||
200 | movq %r8, 40(%rsp) | ||
201 | movq %r9, 48(%rsp) | ||
202 | movq %r10, 56(%rsp) | ||
203 | movq %r11, 64(%rsp) | ||
204 | |||
205 | call ftrace_return_to_handler | ||
206 | |||
207 | movq %rax, 72(%rsp) | ||
208 | movq 64(%rsp), %r11 | ||
209 | movq 56(%rsp), %r10 | ||
210 | movq 48(%rsp), %r9 | ||
211 | movq 40(%rsp), %r8 | ||
212 | movq 32(%rsp), %rdi | ||
213 | movq 24(%rsp), %rsi | ||
214 | movq 16(%rsp), %rdx | ||
215 | movq 8(%rsp), %rcx | ||
216 | movq (%rsp), %rax | ||
217 | addq $72, %rsp | ||
218 | retq | ||
219 | #endif | ||
220 | |||
221 | |||
148 | #ifndef CONFIG_PREEMPT | 222 | #ifndef CONFIG_PREEMPT |
149 | #define retint_kernel retint_restore_args | 223 | #define retint_kernel retint_restore_args |
150 | #endif | 224 | #endif |
diff --git a/arch/x86/kernel/ftrace.c b/arch/x86/kernel/ftrace.c index 7ef914e6a2f6..58832478b94e 100644 --- a/arch/x86/kernel/ftrace.c +++ b/arch/x86/kernel/ftrace.c | |||
@@ -467,8 +467,13 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr) | |||
467 | * ignore such a protection. | 467 | * ignore such a protection. |
468 | */ | 468 | */ |
469 | asm volatile( | 469 | asm volatile( |
470 | #ifdef CONFIG_X86_64 | ||
471 | "1: movq (%[parent_old]), %[old]\n" | ||
472 | "2: movq %[return_hooker], (%[parent_replaced])\n" | ||
473 | #else | ||
470 | "1: movl (%[parent_old]), %[old]\n" | 474 | "1: movl (%[parent_old]), %[old]\n" |
471 | "2: movl %[return_hooker], (%[parent_replaced])\n" | 475 | "2: movl %[return_hooker], (%[parent_replaced])\n" |
476 | #endif | ||
472 | " movl $0, %[faulted]\n" | 477 | " movl $0, %[faulted]\n" |
473 | 478 | ||
474 | ".section .fixup, \"ax\"\n" | 479 | ".section .fixup, \"ax\"\n" |
@@ -476,8 +481,13 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr) | |||
476 | ".previous\n" | 481 | ".previous\n" |
477 | 482 | ||
478 | ".section __ex_table, \"a\"\n" | 483 | ".section __ex_table, \"a\"\n" |
484 | #ifdef CONFIG_X86_64 | ||
485 | " .quad 1b, 3b\n" | ||
486 | " .quad 2b, 3b\n" | ||
487 | #else | ||
479 | " .long 1b, 3b\n" | 488 | " .long 1b, 3b\n" |
480 | " .long 2b, 3b\n" | 489 | " .long 2b, 3b\n" |
490 | #endif | ||
481 | ".previous\n" | 491 | ".previous\n" |
482 | 492 | ||
483 | : [parent_replaced] "=r" (parent), [old] "=r" (old), | 493 | : [parent_replaced] "=r" (parent), [old] "=r" (old), |
@@ -509,5 +519,4 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr) | |||
509 | ftrace_graph_entry(&trace); | 519 | ftrace_graph_entry(&trace); |
510 | 520 | ||
511 | } | 521 | } |
512 | |||
513 | #endif /* CONFIG_FUNCTION_GRAPH_TRACER */ | 522 | #endif /* CONFIG_FUNCTION_GRAPH_TRACER */ |
diff --git a/kernel/trace/ftrace.c b/kernel/trace/ftrace.c index 08b536a2614e..f72499627525 100644 --- a/kernel/trace/ftrace.c +++ b/kernel/trace/ftrace.c | |||
@@ -1673,8 +1673,10 @@ static int alloc_retstack_tasklist(struct ftrace_ret_stack **ret_stack_list) | |||
1673 | } | 1673 | } |
1674 | 1674 | ||
1675 | if (t->ret_stack == NULL) { | 1675 | if (t->ret_stack == NULL) { |
1676 | t->ret_stack = ret_stack_list[start++]; | ||
1677 | t->curr_ret_stack = -1; | 1676 | t->curr_ret_stack = -1; |
1677 | /* Make sure IRQs see the -1 first: */ | ||
1678 | barrier(); | ||
1679 | t->ret_stack = ret_stack_list[start++]; | ||
1678 | atomic_set(&t->trace_overrun, 0); | 1680 | atomic_set(&t->trace_overrun, 0); |
1679 | } | 1681 | } |
1680 | } while_each_thread(g, t); | 1682 | } while_each_thread(g, t); |