aboutsummaryrefslogtreecommitdiffstats
path: root/kernel/trace
diff options
context:
space:
mode:
authorSteven Rostedt <srostedt@redhat.com>2011-01-27 23:16:51 -0500
committerSteven Rostedt <rostedt@goodmis.org>2011-02-07 20:56:19 -0500
commit43cd414552d8137157e926e46361678ea867e476 (patch)
treecbb50e071a8149e2da7be995166f487bbcb5172a /kernel/trace
parentec126cac23945de12eb2d103374e1f7ee97c5595 (diff)
tracing/filter: Optimize filter by folding the tree
There are many cases that a filter will contain multiple ORs or ANDs together near the leafs. Walking up and down the tree to get to the next compare can be a waste. If there are several ORs or ANDs together, fold them into a single pred and allocate an array of the conditions that they check. This will speed up the filter by linearly walking an array and can still break out if a short circuit condition is met. Cc: Tom Zanussi <tzanussi@gmail.com> Signed-off-by: Steven Rostedt <rostedt@goodmis.org>
Diffstat (limited to 'kernel/trace')
-rw-r--r--kernel/trace/trace.h12
-rw-r--r--kernel/trace/trace_events_filter.c233
2 files changed, 235 insertions, 10 deletions
diff --git a/kernel/trace/trace.h b/kernel/trace/trace.h
index bba34a72c780..d754330934bb 100644
--- a/kernel/trace/trace.h
+++ b/kernel/trace/trace.h
@@ -678,6 +678,7 @@ struct event_subsystem {
678 678
679#define FILTER_PRED_INVALID ((unsigned short)-1) 679#define FILTER_PRED_INVALID ((unsigned short)-1)
680#define FILTER_PRED_IS_RIGHT (1 << 15) 680#define FILTER_PRED_IS_RIGHT (1 << 15)
681#define FILTER_PRED_FOLD (1 << 15)
681 682
682struct filter_pred; 683struct filter_pred;
683struct regex; 684struct regex;
@@ -704,7 +705,16 @@ struct filter_pred {
704 filter_pred_fn_t fn; 705 filter_pred_fn_t fn;
705 u64 val; 706 u64 val;
706 struct regex regex; 707 struct regex regex;
707 char *field_name; 708 /*
709 * Leaf nodes use field_name, ops is used by AND and OR
710 * nodes. The field_name is always freed when freeing a pred.
711 * We can overload field_name for ops and have it freed
712 * as well.
713 */
714 union {
715 char *field_name;
716 unsigned short *ops;
717 };
708 int offset; 718 int offset;
709 int not; 719 int not;
710 int op; 720 int op;
diff --git a/kernel/trace/trace_events_filter.c b/kernel/trace/trace_events_filter.c
index 91c9cdcb040b..2403ce5b6507 100644
--- a/kernel/trace/trace_events_filter.c
+++ b/kernel/trace/trace_events_filter.c
@@ -381,6 +381,42 @@ get_pred_parent(struct filter_pred *pred, struct filter_pred *preds,
381 return pred; 381 return pred;
382} 382}
383 383
384/*
385 * A series of AND or ORs where found together. Instead of
386 * climbing up and down the tree branches, an array of the
387 * ops were made in order of checks. We can just move across
388 * the array and short circuit if needed.
389 */
390static int process_ops(struct filter_pred *preds,
391 struct filter_pred *op, void *rec)
392{
393 struct filter_pred *pred;
394 int type;
395 int match;
396 int i;
397
398 /*
399 * Micro-optimization: We set type to true if op
400 * is an OR and false otherwise (AND). Then we
401 * just need to test if the match is equal to
402 * the type, and if it is, we can short circuit the
403 * rest of the checks:
404 *
405 * if ((match && op->op == OP_OR) ||
406 * (!match && op->op == OP_AND))
407 * return match;
408 */
409 type = op->op == OP_OR;
410
411 for (i = 0; i < op->val; i++) {
412 pred = &preds[op->ops[i]];
413 match = pred->fn(pred, rec);
414 if (!!match == type)
415 return match;
416 }
417 return match;
418}
419
384/* return 1 if event matches, 0 otherwise (discard) */ 420/* return 1 if event matches, 0 otherwise (discard) */
385int filter_match_preds(struct event_filter *filter, void *rec) 421int filter_match_preds(struct event_filter *filter, void *rec)
386{ 422{
@@ -414,11 +450,16 @@ int filter_match_preds(struct event_filter *filter, void *rec)
414 case MOVE_DOWN: 450 case MOVE_DOWN:
415 /* only AND and OR have children */ 451 /* only AND and OR have children */
416 if (pred->left != FILTER_PRED_INVALID) { 452 if (pred->left != FILTER_PRED_INVALID) {
417 /* keep going to leaf node */ 453 /* If ops is set, then it was folded. */
418 pred = &preds[pred->left]; 454 if (!pred->ops) {
419 continue; 455 /* keep going to down the left side */
420 } 456 pred = &preds[pred->left];
421 match = pred->fn(pred, rec); 457 continue;
458 }
459 /* We can treat folded ops as a leaf node */
460 match = process_ops(preds, pred, rec);
461 } else
462 match = pred->fn(pred, rec);
422 /* If this pred is the only pred */ 463 /* If this pred is the only pred */
423 if (pred == root) 464 if (pred == root)
424 break; 465 break;
@@ -659,17 +700,34 @@ static int filter_set_pred(struct event_filter *filter,
659 left = __pop_pred_stack(stack); 700 left = __pop_pred_stack(stack);
660 if (!left || !right) 701 if (!left || !right)
661 return -EINVAL; 702 return -EINVAL;
662 dest->left = left->index; 703 /*
663 dest->right = right->index; 704 * If both children can be folded
664 left->parent = dest->index; 705 * and they are the same op as this op or a leaf,
706 * then this op can be folded.
707 */
708 if (left->index & FILTER_PRED_FOLD &&
709 (left->op == dest->op ||
710 left->left == FILTER_PRED_INVALID) &&
711 right->index & FILTER_PRED_FOLD &&
712 (right->op == dest->op ||
713 right->left == FILTER_PRED_INVALID))
714 dest->index |= FILTER_PRED_FOLD;
715
716 dest->left = left->index & ~FILTER_PRED_FOLD;
717 dest->right = right->index & ~FILTER_PRED_FOLD;
718 left->parent = dest->index & ~FILTER_PRED_FOLD;
665 right->parent = dest->index | FILTER_PRED_IS_RIGHT; 719 right->parent = dest->index | FILTER_PRED_IS_RIGHT;
666 } else 720 } else {
667 /* 721 /*
668 * Make dest->left invalid to be used as a quick 722 * Make dest->left invalid to be used as a quick
669 * way to know this is a leaf node. 723 * way to know this is a leaf node.
670 */ 724 */
671 dest->left = FILTER_PRED_INVALID; 725 dest->left = FILTER_PRED_INVALID;
672 726
727 /* All leafs allow folding the parent ops. */
728 dest->index |= FILTER_PRED_FOLD;
729 }
730
673 return __push_pred_stack(stack, dest); 731 return __push_pred_stack(stack, dest);
674} 732}
675 733
@@ -1420,6 +1478,158 @@ static int check_pred_tree(struct event_filter *filter,
1420 return 0; 1478 return 0;
1421} 1479}
1422 1480
1481static int count_leafs(struct filter_pred *preds, struct filter_pred *root)
1482{
1483 struct filter_pred *pred;
1484 enum move_type move = MOVE_DOWN;
1485 int count = 0;
1486 int done = 0;
1487
1488 pred = root;
1489
1490 do {
1491 switch (move) {
1492 case MOVE_DOWN:
1493 if (pred->left != FILTER_PRED_INVALID) {
1494 pred = &preds[pred->left];
1495 continue;
1496 }
1497 /* A leaf at the root is just a leaf in the tree */
1498 if (pred == root)
1499 return 1;
1500 count++;
1501 pred = get_pred_parent(pred, preds,
1502 pred->parent, &move);
1503 continue;
1504 case MOVE_UP_FROM_LEFT:
1505 pred = &preds[pred->right];
1506 move = MOVE_DOWN;
1507 continue;
1508 case MOVE_UP_FROM_RIGHT:
1509 if (pred == root)
1510 break;
1511 pred = get_pred_parent(pred, preds,
1512 pred->parent, &move);
1513 continue;
1514 }
1515 done = 1;
1516 } while (!done);
1517
1518 return count;
1519}
1520
1521static int fold_pred(struct filter_pred *preds, struct filter_pred *root)
1522{
1523 struct filter_pred *pred;
1524 enum move_type move = MOVE_DOWN;
1525 int count = 0;
1526 int children;
1527 int done = 0;
1528
1529 /* No need to keep the fold flag */
1530 root->index &= ~FILTER_PRED_FOLD;
1531
1532 /* If the root is a leaf then do nothing */
1533 if (root->left == FILTER_PRED_INVALID)
1534 return 0;
1535
1536 /* count the children */
1537 children = count_leafs(preds, &preds[root->left]);
1538 children += count_leafs(preds, &preds[root->right]);
1539
1540 root->ops = kzalloc(sizeof(*root->ops) * children, GFP_KERNEL);
1541 if (!root->ops)
1542 return -ENOMEM;
1543
1544 root->val = children;
1545
1546 pred = root;
1547 do {
1548 switch (move) {
1549 case MOVE_DOWN:
1550 if (pred->left != FILTER_PRED_INVALID) {
1551 pred = &preds[pred->left];
1552 continue;
1553 }
1554 if (WARN_ON(count == children))
1555 return -EINVAL;
1556 pred->index &= ~FILTER_PRED_FOLD;
1557 root->ops[count++] = pred->index;
1558 pred = get_pred_parent(pred, preds,
1559 pred->parent, &move);
1560 continue;
1561 case MOVE_UP_FROM_LEFT:
1562 pred = &preds[pred->right];
1563 move = MOVE_DOWN;
1564 continue;
1565 case MOVE_UP_FROM_RIGHT:
1566 if (pred == root)
1567 break;
1568 pred = get_pred_parent(pred, preds,
1569 pred->parent, &move);
1570 continue;
1571 }
1572 done = 1;
1573 } while (!done);
1574
1575 return 0;
1576}
1577
1578/*
1579 * To optimize the processing of the ops, if we have several "ors" or
1580 * "ands" together, we can put them in an array and process them all
1581 * together speeding up the filter logic.
1582 */
1583static int fold_pred_tree(struct event_filter *filter,
1584 struct filter_pred *root)
1585{
1586 struct filter_pred *preds;
1587 struct filter_pred *pred;
1588 enum move_type move = MOVE_DOWN;
1589 int done = 0;
1590 int err;
1591
1592 preds = filter->preds;
1593 if (!preds)
1594 return -EINVAL;
1595 pred = root;
1596
1597 do {
1598 switch (move) {
1599 case MOVE_DOWN:
1600 if (pred->index & FILTER_PRED_FOLD) {
1601 err = fold_pred(preds, pred);
1602 if (err)
1603 return err;
1604 /* Folded nodes are like leafs */
1605 } else if (pred->left != FILTER_PRED_INVALID) {
1606 pred = &preds[pred->left];
1607 continue;
1608 }
1609
1610 /* A leaf at the root is just a leaf in the tree */
1611 if (pred == root)
1612 break;
1613 pred = get_pred_parent(pred, preds,
1614 pred->parent, &move);
1615 continue;
1616 case MOVE_UP_FROM_LEFT:
1617 pred = &preds[pred->right];
1618 move = MOVE_DOWN;
1619 continue;
1620 case MOVE_UP_FROM_RIGHT:
1621 if (pred == root)
1622 break;
1623 pred = get_pred_parent(pred, preds,
1624 pred->parent, &move);
1625 continue;
1626 }
1627 done = 1;
1628 } while (!done);
1629
1630 return 0;
1631}
1632
1423static int replace_preds(struct ftrace_event_call *call, 1633static int replace_preds(struct ftrace_event_call *call,
1424 struct event_filter *filter, 1634 struct event_filter *filter,
1425 struct filter_parse_state *ps, 1635 struct filter_parse_state *ps,
@@ -1517,6 +1727,11 @@ add_pred:
1517 if (err) 1727 if (err)
1518 goto fail; 1728 goto fail;
1519 1729
1730 /* Optimize the tree */
1731 err = fold_pred_tree(filter, root);
1732 if (err)
1733 goto fail;
1734
1520 /* We don't set root until we know it works */ 1735 /* We don't set root until we know it works */
1521 barrier(); 1736 barrier();
1522 filter->root = root; 1737 filter->root = root;