diff options
Diffstat (limited to 'kernel/trace/trace.h')
-rw-r--r-- | kernel/trace/trace.h | 140 |
1 files changed, 127 insertions, 13 deletions
diff --git a/kernel/trace/trace.h b/kernel/trace/trace.h index c75d7988902c..2081971367ea 100644 --- a/kernel/trace/trace.h +++ b/kernel/trace/trace.h | |||
@@ -283,24 +283,70 @@ struct tracer { | |||
283 | enum print_line_t (*print_line)(struct trace_iterator *iter); | 283 | enum print_line_t (*print_line)(struct trace_iterator *iter); |
284 | /* If you handled the flag setting, return 0 */ | 284 | /* If you handled the flag setting, return 0 */ |
285 | int (*set_flag)(u32 old_flags, u32 bit, int set); | 285 | int (*set_flag)(u32 old_flags, u32 bit, int set); |
286 | /* Return 0 if OK with change, else return non-zero */ | ||
287 | int (*flag_changed)(struct tracer *tracer, | ||
288 | u32 mask, int set); | ||
286 | struct tracer *next; | 289 | struct tracer *next; |
287 | struct tracer_flags *flags; | 290 | struct tracer_flags *flags; |
288 | bool print_max; | 291 | bool print_max; |
289 | bool use_max_tr; | 292 | bool use_max_tr; |
293 | bool allocated_snapshot; | ||
294 | bool enabled; | ||
290 | }; | 295 | }; |
291 | 296 | ||
292 | 297 | ||
293 | /* Only current can touch trace_recursion */ | 298 | /* Only current can touch trace_recursion */ |
294 | #define trace_recursion_inc() do { (current)->trace_recursion++; } while (0) | ||
295 | #define trace_recursion_dec() do { (current)->trace_recursion--; } while (0) | ||
296 | 299 | ||
297 | /* Ring buffer has the 10 LSB bits to count */ | 300 | /* |
298 | #define trace_recursion_buffer() ((current)->trace_recursion & 0x3ff) | 301 | * For function tracing recursion: |
299 | 302 | * The order of these bits are important. | |
300 | /* for function tracing recursion */ | 303 | * |
301 | #define TRACE_INTERNAL_BIT (1<<11) | 304 | * When function tracing occurs, the following steps are made: |
302 | #define TRACE_GLOBAL_BIT (1<<12) | 305 | * If arch does not support a ftrace feature: |
303 | #define TRACE_CONTROL_BIT (1<<13) | 306 | * call internal function (uses INTERNAL bits) which calls... |
307 | * If callback is registered to the "global" list, the list | ||
308 | * function is called and recursion checks the GLOBAL bits. | ||
309 | * then this function calls... | ||
310 | * The function callback, which can use the FTRACE bits to | ||
311 | * check for recursion. | ||
312 | * | ||
313 | * Now if the arch does not suppport a feature, and it calls | ||
314 | * the global list function which calls the ftrace callback | ||
315 | * all three of these steps will do a recursion protection. | ||
316 | * There's no reason to do one if the previous caller already | ||
317 | * did. The recursion that we are protecting against will | ||
318 | * go through the same steps again. | ||
319 | * | ||
320 | * To prevent the multiple recursion checks, if a recursion | ||
321 | * bit is set that is higher than the MAX bit of the current | ||
322 | * check, then we know that the check was made by the previous | ||
323 | * caller, and we can skip the current check. | ||
324 | */ | ||
325 | enum { | ||
326 | TRACE_BUFFER_BIT, | ||
327 | TRACE_BUFFER_NMI_BIT, | ||
328 | TRACE_BUFFER_IRQ_BIT, | ||
329 | TRACE_BUFFER_SIRQ_BIT, | ||
330 | |||
331 | /* Start of function recursion bits */ | ||
332 | TRACE_FTRACE_BIT, | ||
333 | TRACE_FTRACE_NMI_BIT, | ||
334 | TRACE_FTRACE_IRQ_BIT, | ||
335 | TRACE_FTRACE_SIRQ_BIT, | ||
336 | |||
337 | /* GLOBAL_BITs must be greater than FTRACE_BITs */ | ||
338 | TRACE_GLOBAL_BIT, | ||
339 | TRACE_GLOBAL_NMI_BIT, | ||
340 | TRACE_GLOBAL_IRQ_BIT, | ||
341 | TRACE_GLOBAL_SIRQ_BIT, | ||
342 | |||
343 | /* INTERNAL_BITs must be greater than GLOBAL_BITs */ | ||
344 | TRACE_INTERNAL_BIT, | ||
345 | TRACE_INTERNAL_NMI_BIT, | ||
346 | TRACE_INTERNAL_IRQ_BIT, | ||
347 | TRACE_INTERNAL_SIRQ_BIT, | ||
348 | |||
349 | TRACE_CONTROL_BIT, | ||
304 | 350 | ||
305 | /* | 351 | /* |
306 | * Abuse of the trace_recursion. | 352 | * Abuse of the trace_recursion. |
@@ -309,11 +355,77 @@ struct tracer { | |||
309 | * was called in irq context but we have irq tracing off. Since this | 355 | * was called in irq context but we have irq tracing off. Since this |
310 | * can only be modified by current, we can reuse trace_recursion. | 356 | * can only be modified by current, we can reuse trace_recursion. |
311 | */ | 357 | */ |
312 | #define TRACE_IRQ_BIT (1<<13) | 358 | TRACE_IRQ_BIT, |
359 | }; | ||
360 | |||
361 | #define trace_recursion_set(bit) do { (current)->trace_recursion |= (1<<(bit)); } while (0) | ||
362 | #define trace_recursion_clear(bit) do { (current)->trace_recursion &= ~(1<<(bit)); } while (0) | ||
363 | #define trace_recursion_test(bit) ((current)->trace_recursion & (1<<(bit))) | ||
364 | |||
365 | #define TRACE_CONTEXT_BITS 4 | ||
366 | |||
367 | #define TRACE_FTRACE_START TRACE_FTRACE_BIT | ||
368 | #define TRACE_FTRACE_MAX ((1 << (TRACE_FTRACE_START + TRACE_CONTEXT_BITS)) - 1) | ||
369 | |||
370 | #define TRACE_GLOBAL_START TRACE_GLOBAL_BIT | ||
371 | #define TRACE_GLOBAL_MAX ((1 << (TRACE_GLOBAL_START + TRACE_CONTEXT_BITS)) - 1) | ||
372 | |||
373 | #define TRACE_LIST_START TRACE_INTERNAL_BIT | ||
374 | #define TRACE_LIST_MAX ((1 << (TRACE_LIST_START + TRACE_CONTEXT_BITS)) - 1) | ||
375 | |||
376 | #define TRACE_CONTEXT_MASK TRACE_LIST_MAX | ||
377 | |||
378 | static __always_inline int trace_get_context_bit(void) | ||
379 | { | ||
380 | int bit; | ||
313 | 381 | ||
314 | #define trace_recursion_set(bit) do { (current)->trace_recursion |= (bit); } while (0) | 382 | if (in_interrupt()) { |
315 | #define trace_recursion_clear(bit) do { (current)->trace_recursion &= ~(bit); } while (0) | 383 | if (in_nmi()) |
316 | #define trace_recursion_test(bit) ((current)->trace_recursion & (bit)) | 384 | bit = 0; |
385 | |||
386 | else if (in_irq()) | ||
387 | bit = 1; | ||
388 | else | ||
389 | bit = 2; | ||
390 | } else | ||
391 | bit = 3; | ||
392 | |||
393 | return bit; | ||
394 | } | ||
395 | |||
396 | static __always_inline int trace_test_and_set_recursion(int start, int max) | ||
397 | { | ||
398 | unsigned int val = current->trace_recursion; | ||
399 | int bit; | ||
400 | |||
401 | /* A previous recursion check was made */ | ||
402 | if ((val & TRACE_CONTEXT_MASK) > max) | ||
403 | return 0; | ||
404 | |||
405 | bit = trace_get_context_bit() + start; | ||
406 | if (unlikely(val & (1 << bit))) | ||
407 | return -1; | ||
408 | |||
409 | val |= 1 << bit; | ||
410 | current->trace_recursion = val; | ||
411 | barrier(); | ||
412 | |||
413 | return bit; | ||
414 | } | ||
415 | |||
416 | static __always_inline void trace_clear_recursion(int bit) | ||
417 | { | ||
418 | unsigned int val = current->trace_recursion; | ||
419 | |||
420 | if (!bit) | ||
421 | return; | ||
422 | |||
423 | bit = 1 << bit; | ||
424 | val &= ~bit; | ||
425 | |||
426 | barrier(); | ||
427 | current->trace_recursion = val; | ||
428 | } | ||
317 | 429 | ||
318 | #define TRACE_PIPE_ALL_CPU -1 | 430 | #define TRACE_PIPE_ALL_CPU -1 |
319 | 431 | ||
@@ -835,6 +947,8 @@ extern const char *__stop___trace_bprintk_fmt[]; | |||
835 | 947 | ||
836 | void trace_printk_init_buffers(void); | 948 | void trace_printk_init_buffers(void); |
837 | void trace_printk_start_comm(void); | 949 | void trace_printk_start_comm(void); |
950 | int trace_keep_overwrite(struct tracer *tracer, u32 mask, int set); | ||
951 | int set_tracer_flag(unsigned int mask, int enabled); | ||
838 | 952 | ||
839 | #undef FTRACE_ENTRY | 953 | #undef FTRACE_ENTRY |
840 | #define FTRACE_ENTRY(call, struct_name, id, tstruct, print, filter) \ | 954 | #define FTRACE_ENTRY(call, struct_name, id, tstruct, print, filter) \ |