aboutsummaryrefslogtreecommitdiffstats
path: root/net/sched/sch_cbq.c
diff options
context:
space:
mode:
authorPatrick McHardy <kaber@trash.net>2007-07-03 01:46:07 -0400
committerDavid S. Miller <davem@sunset.davemloft.net>2007-07-11 01:16:37 -0400
commit876d48aabf30e4981653f1a0a7ae1e262b8c8b6f (patch)
tree49dace46f70bc243605ecf73af4a3f06e607a2be /net/sched/sch_cbq.c
parenta553e4a6317b2cfc7659542c10fe43184ffe53da (diff)
[NET_SCHED]: Remove CONFIG_NET_ESTIMATOR option
The generic estimator is always built in anways and all the config options does is prevent including a minimal amount of code for setting it up. Additionally the option is already automatically selected for most cases. Signed-off-by: Patrick McHardy <kaber@trash.net> Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'net/sched/sch_cbq.c')
-rw-r--r--net/sched/sch_cbq.c8
1 files changed, 0 insertions, 8 deletions
diff --git a/net/sched/sch_cbq.c b/net/sched/sch_cbq.c
index ee2d5967d109..bf1ea9e75cd9 100644
--- a/net/sched/sch_cbq.c
+++ b/net/sched/sch_cbq.c
@@ -1653,9 +1653,7 @@ cbq_dump_class_stats(struct Qdisc *sch, unsigned long arg,
1653 cl->xstats.undertime = cl->undertime - q->now; 1653 cl->xstats.undertime = cl->undertime - q->now;
1654 1654
1655 if (gnet_stats_copy_basic(d, &cl->bstats) < 0 || 1655 if (gnet_stats_copy_basic(d, &cl->bstats) < 0 ||
1656#ifdef CONFIG_NET_ESTIMATOR
1657 gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 || 1656 gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 ||
1658#endif
1659 gnet_stats_copy_queue(d, &cl->qstats) < 0) 1657 gnet_stats_copy_queue(d, &cl->qstats) < 0)
1660 return -1; 1658 return -1;
1661 1659
@@ -1726,9 +1724,7 @@ static void cbq_destroy_class(struct Qdisc *sch, struct cbq_class *cl)
1726 tcf_destroy_chain(cl->filter_list); 1724 tcf_destroy_chain(cl->filter_list);
1727 qdisc_destroy(cl->q); 1725 qdisc_destroy(cl->q);
1728 qdisc_put_rtab(cl->R_tab); 1726 qdisc_put_rtab(cl->R_tab);
1729#ifdef CONFIG_NET_ESTIMATOR
1730 gen_kill_estimator(&cl->bstats, &cl->rate_est); 1727 gen_kill_estimator(&cl->bstats, &cl->rate_est);
1731#endif
1732 if (cl != &q->link) 1728 if (cl != &q->link)
1733 kfree(cl); 1729 kfree(cl);
1734} 1730}
@@ -1873,11 +1869,9 @@ cbq_change_class(struct Qdisc *sch, u32 classid, u32 parentid, struct rtattr **t
1873 1869
1874 sch_tree_unlock(sch); 1870 sch_tree_unlock(sch);
1875 1871
1876#ifdef CONFIG_NET_ESTIMATOR
1877 if (tca[TCA_RATE-1]) 1872 if (tca[TCA_RATE-1])
1878 gen_replace_estimator(&cl->bstats, &cl->rate_est, 1873 gen_replace_estimator(&cl->bstats, &cl->rate_est,
1879 cl->stats_lock, tca[TCA_RATE-1]); 1874 cl->stats_lock, tca[TCA_RATE-1]);
1880#endif
1881 return 0; 1875 return 0;
1882 } 1876 }
1883 1877
@@ -1963,11 +1957,9 @@ cbq_change_class(struct Qdisc *sch, u32 classid, u32 parentid, struct rtattr **t
1963 cbq_set_fopt(cl, RTA_DATA(tb[TCA_CBQ_FOPT-1])); 1957 cbq_set_fopt(cl, RTA_DATA(tb[TCA_CBQ_FOPT-1]));
1964 sch_tree_unlock(sch); 1958 sch_tree_unlock(sch);
1965 1959
1966#ifdef CONFIG_NET_ESTIMATOR
1967 if (tca[TCA_RATE-1]) 1960 if (tca[TCA_RATE-1])
1968 gen_new_estimator(&cl->bstats, &cl->rate_est, 1961 gen_new_estimator(&cl->bstats, &cl->rate_est,
1969 cl->stats_lock, tca[TCA_RATE-1]); 1962 cl->stats_lock, tca[TCA_RATE-1]);
1970#endif
1971 1963
1972 *arg = (unsigned long)cl; 1964 *arg = (unsigned long)cl;
1973 return 0; 1965 return 0;