aboutsummaryrefslogtreecommitdiffstats
path: root/kernel
diff options
context:
space:
mode:
authorSteven Rostedt <srostedt@redhat.com>2012-11-02 17:47:21 -0400
committerSteven Rostedt <rostedt@goodmis.org>2013-01-22 23:38:01 -0500
commitedc15cafcbfa3d73f819cae99885a2e35e4cbce5 (patch)
tree964e0de8816e6d4b602318d90718770921e301d2 /kernel
parente46cbf75c621725964fe1f6e7013e8bcd86a0e3d (diff)
tracing: Avoid unnecessary multiple recursion checks
When function tracing occurs, the following steps are made: If arch does not support a ftrace feature: call internal function (uses INTERNAL bits) which calls... If callback is registered to the "global" list, the list function is called and recursion checks the GLOBAL bits. then this function calls... The function callback, which can use the FTRACE bits to check for recursion. Now if the arch does not suppport a feature, and it calls the global list function which calls the ftrace callback all three of these steps will do a recursion protection. There's no reason to do one if the previous caller already did. The recursion that we are protecting against will go through the same steps again. To prevent the multiple recursion checks, if a recursion bit is set that is higher than the MAX bit of the current check, then we know that the check was made by the previous caller, and we can skip the current check. Signed-off-by: Steven Rostedt <rostedt@goodmis.org>
Diffstat (limited to 'kernel')
-rw-r--r--kernel/trace/ftrace.c40
-rw-r--r--kernel/trace/trace.h106
2 files changed, 110 insertions, 36 deletions
diff --git a/kernel/trace/ftrace.c b/kernel/trace/ftrace.c
index 639b6ab1f04c..ce8c3d68292f 100644
--- a/kernel/trace/ftrace.c
+++ b/kernel/trace/ftrace.c
@@ -158,25 +158,15 @@ ftrace_global_list_func(unsigned long ip, unsigned long parent_ip,
158{ 158{
159 int bit; 159 int bit;
160 160
161 if (in_interrupt()) { 161 bit = trace_test_and_set_recursion(TRACE_GLOBAL_START, TRACE_GLOBAL_MAX);
162 if (in_nmi()) 162 if (bit < 0)
163 bit = TRACE_GLOBAL_NMI_BIT;
164
165 else if (in_irq())
166 bit = TRACE_GLOBAL_IRQ_BIT;
167 else
168 bit = TRACE_GLOBAL_SIRQ_BIT;
169 } else
170 bit = TRACE_GLOBAL_BIT;
171
172 if (unlikely(trace_recursion_test(bit)))
173 return; 163 return;
174 164
175 trace_recursion_set(bit);
176 do_for_each_ftrace_op(op, ftrace_global_list) { 165 do_for_each_ftrace_op(op, ftrace_global_list) {
177 op->func(ip, parent_ip, op, regs); 166 op->func(ip, parent_ip, op, regs);
178 } while_for_each_ftrace_op(op); 167 } while_for_each_ftrace_op(op);
179 trace_recursion_clear(bit); 168
169 trace_clear_recursion(bit);
180} 170}
181 171
182static void ftrace_pid_func(unsigned long ip, unsigned long parent_ip, 172static void ftrace_pid_func(unsigned long ip, unsigned long parent_ip,
@@ -4145,26 +4135,14 @@ __ftrace_ops_list_func(unsigned long ip, unsigned long parent_ip,
4145 struct ftrace_ops *ignored, struct pt_regs *regs) 4135 struct ftrace_ops *ignored, struct pt_regs *regs)
4146{ 4136{
4147 struct ftrace_ops *op; 4137 struct ftrace_ops *op;
4148 unsigned int bit; 4138 int bit;
4149 4139
4150 if (function_trace_stop) 4140 if (function_trace_stop)
4151 return; 4141 return;
4152 4142
4153 if (in_interrupt()) { 4143 bit = trace_test_and_set_recursion(TRACE_LIST_START, TRACE_LIST_MAX);
4154 if (in_nmi()) 4144 if (bit < 0)
4155 bit = TRACE_INTERNAL_NMI_BIT; 4145 return;
4156
4157 else if (in_irq())
4158 bit = TRACE_INTERNAL_IRQ_BIT;
4159 else
4160 bit = TRACE_INTERNAL_SIRQ_BIT;
4161 } else
4162 bit = TRACE_INTERNAL_BIT;
4163
4164 if (unlikely(trace_recursion_test(bit)))
4165 return;
4166
4167 trace_recursion_set(bit);
4168 4146
4169 /* 4147 /*
4170 * Some of the ops may be dynamically allocated, 4148 * Some of the ops may be dynamically allocated,
@@ -4176,7 +4154,7 @@ __ftrace_ops_list_func(unsigned long ip, unsigned long parent_ip,
4176 op->func(ip, parent_ip, op, regs); 4154 op->func(ip, parent_ip, op, regs);
4177 } while_for_each_ftrace_op(op); 4155 } while_for_each_ftrace_op(op);
4178 preempt_enable_notrace(); 4156 preempt_enable_notrace();
4179 trace_recursion_clear(bit); 4157 trace_clear_recursion(bit);
4180} 4158}
4181 4159
4182/* 4160/*
diff --git a/kernel/trace/trace.h b/kernel/trace/trace.h
index 5a095d6f088d..c203a51dd412 100644
--- a/kernel/trace/trace.h
+++ b/kernel/trace/trace.h
@@ -297,18 +297,49 @@ struct tracer {
297/* Ring buffer has the 10 LSB bits to count */ 297/* Ring buffer has the 10 LSB bits to count */
298#define trace_recursion_buffer() ((current)->trace_recursion & 0x3ff) 298#define trace_recursion_buffer() ((current)->trace_recursion & 0x3ff)
299 299
300/* for function tracing recursion */ 300/*
301 * For function tracing recursion:
302 * The order of these bits are important.
303 *
304 * When function tracing occurs, the following steps are made:
305 * If arch does not support a ftrace feature:
306 * call internal function (uses INTERNAL bits) which calls...
307 * If callback is registered to the "global" list, the list
308 * function is called and recursion checks the GLOBAL bits.
309 * then this function calls...
310 * The function callback, which can use the FTRACE bits to
311 * check for recursion.
312 *
313 * Now if the arch does not suppport a feature, and it calls
314 * the global list function which calls the ftrace callback
315 * all three of these steps will do a recursion protection.
316 * There's no reason to do one if the previous caller already
317 * did. The recursion that we are protecting against will
318 * go through the same steps again.
319 *
320 * To prevent the multiple recursion checks, if a recursion
321 * bit is set that is higher than the MAX bit of the current
322 * check, then we know that the check was made by the previous
323 * caller, and we can skip the current check.
324 */
301enum { 325enum {
302 TRACE_INTERNAL_BIT = 11, 326 TRACE_FTRACE_BIT = 11,
303 TRACE_INTERNAL_NMI_BIT, 327 TRACE_FTRACE_NMI_BIT,
304 TRACE_INTERNAL_IRQ_BIT, 328 TRACE_FTRACE_IRQ_BIT,
305 TRACE_INTERNAL_SIRQ_BIT, 329 TRACE_FTRACE_SIRQ_BIT,
306 330
331 /* GLOBAL_BITs must be greater than FTRACE_BITs */
307 TRACE_GLOBAL_BIT, 332 TRACE_GLOBAL_BIT,
308 TRACE_GLOBAL_NMI_BIT, 333 TRACE_GLOBAL_NMI_BIT,
309 TRACE_GLOBAL_IRQ_BIT, 334 TRACE_GLOBAL_IRQ_BIT,
310 TRACE_GLOBAL_SIRQ_BIT, 335 TRACE_GLOBAL_SIRQ_BIT,
311 336
337 /* INTERNAL_BITs must be greater than GLOBAL_BITs */
338 TRACE_INTERNAL_BIT,
339 TRACE_INTERNAL_NMI_BIT,
340 TRACE_INTERNAL_IRQ_BIT,
341 TRACE_INTERNAL_SIRQ_BIT,
342
312 TRACE_CONTROL_BIT, 343 TRACE_CONTROL_BIT,
313 344
314/* 345/*
@@ -325,6 +356,71 @@ enum {
325#define trace_recursion_clear(bit) do { (current)->trace_recursion &= ~(1<<(bit)); } while (0) 356#define trace_recursion_clear(bit) do { (current)->trace_recursion &= ~(1<<(bit)); } while (0)
326#define trace_recursion_test(bit) ((current)->trace_recursion & (1<<(bit))) 357#define trace_recursion_test(bit) ((current)->trace_recursion & (1<<(bit)))
327 358
359#define TRACE_CONTEXT_BITS 4
360
361#define TRACE_FTRACE_START TRACE_FTRACE_BIT
362#define TRACE_FTRACE_MAX ((1 << (TRACE_FTRACE_START + TRACE_CONTEXT_BITS)) - 1)
363
364#define TRACE_GLOBAL_START TRACE_GLOBAL_BIT
365#define TRACE_GLOBAL_MAX ((1 << (TRACE_GLOBAL_START + TRACE_CONTEXT_BITS)) - 1)
366
367#define TRACE_LIST_START TRACE_INTERNAL_BIT
368#define TRACE_LIST_MAX ((1 << (TRACE_LIST_START + TRACE_CONTEXT_BITS)) - 1)
369
370#define TRACE_CONTEXT_MASK TRACE_LIST_MAX
371
372static __always_inline int trace_get_context_bit(void)
373{
374 int bit;
375
376 if (in_interrupt()) {
377 if (in_nmi())
378 bit = 0;
379
380 else if (in_irq())
381 bit = 1;
382 else
383 bit = 2;
384 } else
385 bit = 3;
386
387 return bit;
388}
389
390static __always_inline int trace_test_and_set_recursion(int start, int max)
391{
392 unsigned int val = current->trace_recursion;
393 int bit;
394
395 /* A previous recursion check was made */
396 if ((val & TRACE_CONTEXT_MASK) > max)
397 return 0;
398
399 bit = trace_get_context_bit() + start;
400 if (unlikely(val & (1 << bit)))
401 return -1;
402
403 val |= 1 << bit;
404 current->trace_recursion = val;
405 barrier();
406
407 return bit;
408}
409
410static __always_inline void trace_clear_recursion(int bit)
411{
412 unsigned int val = current->trace_recursion;
413
414 if (!bit)
415 return;
416
417 bit = 1 << bit;
418 val &= ~bit;
419
420 barrier();
421 current->trace_recursion = val;
422}
423
328#define TRACE_PIPE_ALL_CPU -1 424#define TRACE_PIPE_ALL_CPU -1
329 425
330static inline struct ring_buffer_iter * 426static inline struct ring_buffer_iter *