diff options
author | Patrick McHardy <kaber@trash.net> | 2008-01-23 01:11:33 -0500 |
---|---|---|
committer | David S. Miller <davem@davemloft.net> | 2008-01-28 18:11:11 -0500 |
commit | add93b610a4e66d36d0cf0b2596c3d3bcfdaee39 (patch) | |
tree | 073873879eb3b87981ee015f0f1ca48da8f1c696 /net/sched/ematch.c | |
parent | 1e90474c377e92db7262a8968a45c1dd980ca9e5 (diff) |
[NET_SCHED]: Convert classifiers from rtnetlink to new netlink API
Signed-off-by: Patrick McHardy <kaber@trash.net>
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'net/sched/ematch.c')
-rw-r--r-- | net/sched/ematch.c | 74 |
1 files changed, 37 insertions, 37 deletions
diff --git a/net/sched/ematch.c b/net/sched/ematch.c index 27941cfc0ab5..72d9b2735245 100644 --- a/net/sched/ematch.c +++ b/net/sched/ematch.c | |||
@@ -183,11 +183,11 @@ static inline struct tcf_ematch * tcf_em_get_match(struct tcf_ematch_tree *tree, | |||
183 | 183 | ||
184 | static int tcf_em_validate(struct tcf_proto *tp, | 184 | static int tcf_em_validate(struct tcf_proto *tp, |
185 | struct tcf_ematch_tree_hdr *tree_hdr, | 185 | struct tcf_ematch_tree_hdr *tree_hdr, |
186 | struct tcf_ematch *em, struct rtattr *rta, int idx) | 186 | struct tcf_ematch *em, struct nlattr *nla, int idx) |
187 | { | 187 | { |
188 | int err = -EINVAL; | 188 | int err = -EINVAL; |
189 | struct tcf_ematch_hdr *em_hdr = RTA_DATA(rta); | 189 | struct tcf_ematch_hdr *em_hdr = nla_data(nla); |
190 | int data_len = RTA_PAYLOAD(rta) - sizeof(*em_hdr); | 190 | int data_len = nla_len(nla) - sizeof(*em_hdr); |
191 | void *data = (void *) em_hdr + sizeof(*em_hdr); | 191 | void *data = (void *) em_hdr + sizeof(*em_hdr); |
192 | 192 | ||
193 | if (!TCF_EM_REL_VALID(em_hdr->flags)) | 193 | if (!TCF_EM_REL_VALID(em_hdr->flags)) |
@@ -286,11 +286,11 @@ errout: | |||
286 | * tcf_em_tree_validate - validate ematch config TLV and build ematch tree | 286 | * tcf_em_tree_validate - validate ematch config TLV and build ematch tree |
287 | * | 287 | * |
288 | * @tp: classifier kind handle | 288 | * @tp: classifier kind handle |
289 | * @rta: ematch tree configuration TLV | 289 | * @nla: ematch tree configuration TLV |
290 | * @tree: destination ematch tree variable to store the resulting | 290 | * @tree: destination ematch tree variable to store the resulting |
291 | * ematch tree. | 291 | * ematch tree. |
292 | * | 292 | * |
293 | * This function validates the given configuration TLV @rta and builds an | 293 | * This function validates the given configuration TLV @nla and builds an |
294 | * ematch tree in @tree. The resulting tree must later be copied into | 294 | * ematch tree in @tree. The resulting tree must later be copied into |
295 | * the private classifier data using tcf_em_tree_change(). You MUST NOT | 295 | * the private classifier data using tcf_em_tree_change(). You MUST NOT |
296 | * provide the ematch tree variable of the private classifier data directly, | 296 | * provide the ematch tree variable of the private classifier data directly, |
@@ -298,45 +298,45 @@ errout: | |||
298 | * | 298 | * |
299 | * Returns a negative error code if the configuration TLV contains errors. | 299 | * Returns a negative error code if the configuration TLV contains errors. |
300 | */ | 300 | */ |
301 | int tcf_em_tree_validate(struct tcf_proto *tp, struct rtattr *rta, | 301 | int tcf_em_tree_validate(struct tcf_proto *tp, struct nlattr *nla, |
302 | struct tcf_ematch_tree *tree) | 302 | struct tcf_ematch_tree *tree) |
303 | { | 303 | { |
304 | int idx, list_len, matches_len, err = -EINVAL; | 304 | int idx, list_len, matches_len, err = -EINVAL; |
305 | struct rtattr *tb[TCA_EMATCH_TREE_MAX]; | 305 | struct nlattr *tb[TCA_EMATCH_TREE_MAX + 1]; |
306 | struct rtattr *rt_match, *rt_hdr, *rt_list; | 306 | struct nlattr *rt_match, *rt_hdr, *rt_list; |
307 | struct tcf_ematch_tree_hdr *tree_hdr; | 307 | struct tcf_ematch_tree_hdr *tree_hdr; |
308 | struct tcf_ematch *em; | 308 | struct tcf_ematch *em; |
309 | 309 | ||
310 | if (!rta) { | 310 | if (!nla) { |
311 | memset(tree, 0, sizeof(*tree)); | 311 | memset(tree, 0, sizeof(*tree)); |
312 | return 0; | 312 | return 0; |
313 | } | 313 | } |
314 | 314 | ||
315 | if (rtattr_parse_nested(tb, TCA_EMATCH_TREE_MAX, rta) < 0) | 315 | if (nla_parse_nested(tb, TCA_EMATCH_TREE_MAX, nla, NULL) < 0) |
316 | goto errout; | 316 | goto errout; |
317 | 317 | ||
318 | rt_hdr = tb[TCA_EMATCH_TREE_HDR-1]; | 318 | rt_hdr = tb[TCA_EMATCH_TREE_HDR]; |
319 | rt_list = tb[TCA_EMATCH_TREE_LIST-1]; | 319 | rt_list = tb[TCA_EMATCH_TREE_LIST]; |
320 | 320 | ||
321 | if (rt_hdr == NULL || rt_list == NULL) | 321 | if (rt_hdr == NULL || rt_list == NULL) |
322 | goto errout; | 322 | goto errout; |
323 | 323 | ||
324 | if (RTA_PAYLOAD(rt_hdr) < sizeof(*tree_hdr) || | 324 | if (nla_len(rt_hdr) < sizeof(*tree_hdr) || |
325 | RTA_PAYLOAD(rt_list) < sizeof(*rt_match)) | 325 | nla_len(rt_list) < sizeof(*rt_match)) |
326 | goto errout; | 326 | goto errout; |
327 | 327 | ||
328 | tree_hdr = RTA_DATA(rt_hdr); | 328 | tree_hdr = nla_data(rt_hdr); |
329 | memcpy(&tree->hdr, tree_hdr, sizeof(*tree_hdr)); | 329 | memcpy(&tree->hdr, tree_hdr, sizeof(*tree_hdr)); |
330 | 330 | ||
331 | rt_match = RTA_DATA(rt_list); | 331 | rt_match = nla_data(rt_list); |
332 | list_len = RTA_PAYLOAD(rt_list); | 332 | list_len = nla_len(rt_list); |
333 | matches_len = tree_hdr->nmatches * sizeof(*em); | 333 | matches_len = tree_hdr->nmatches * sizeof(*em); |
334 | 334 | ||
335 | tree->matches = kzalloc(matches_len, GFP_KERNEL); | 335 | tree->matches = kzalloc(matches_len, GFP_KERNEL); |
336 | if (tree->matches == NULL) | 336 | if (tree->matches == NULL) |
337 | goto errout; | 337 | goto errout; |
338 | 338 | ||
339 | /* We do not use rtattr_parse_nested here because the maximum | 339 | /* We do not use nla_parse_nested here because the maximum |
340 | * number of attributes is unknown. This saves us the allocation | 340 | * number of attributes is unknown. This saves us the allocation |
341 | * for a tb buffer which would serve no purpose at all. | 341 | * for a tb buffer which would serve no purpose at all. |
342 | * | 342 | * |
@@ -344,16 +344,16 @@ int tcf_em_tree_validate(struct tcf_proto *tp, struct rtattr *rta, | |||
344 | * provided, their type must be incremental from 1 to n. Even | 344 | * provided, their type must be incremental from 1 to n. Even |
345 | * if it does not serve any real purpose, a failure of sticking | 345 | * if it does not serve any real purpose, a failure of sticking |
346 | * to this policy will result in parsing failure. */ | 346 | * to this policy will result in parsing failure. */ |
347 | for (idx = 0; RTA_OK(rt_match, list_len); idx++) { | 347 | for (idx = 0; nla_ok(rt_match, list_len); idx++) { |
348 | err = -EINVAL; | 348 | err = -EINVAL; |
349 | 349 | ||
350 | if (rt_match->rta_type != (idx + 1)) | 350 | if (rt_match->nla_type != (idx + 1)) |
351 | goto errout_abort; | 351 | goto errout_abort; |
352 | 352 | ||
353 | if (idx >= tree_hdr->nmatches) | 353 | if (idx >= tree_hdr->nmatches) |
354 | goto errout_abort; | 354 | goto errout_abort; |
355 | 355 | ||
356 | if (RTA_PAYLOAD(rt_match) < sizeof(struct tcf_ematch_hdr)) | 356 | if (nla_len(rt_match) < sizeof(struct tcf_ematch_hdr)) |
357 | goto errout_abort; | 357 | goto errout_abort; |
358 | 358 | ||
359 | em = tcf_em_get_match(tree, idx); | 359 | em = tcf_em_get_match(tree, idx); |
@@ -362,7 +362,7 @@ int tcf_em_tree_validate(struct tcf_proto *tp, struct rtattr *rta, | |||
362 | if (err < 0) | 362 | if (err < 0) |
363 | goto errout_abort; | 363 | goto errout_abort; |
364 | 364 | ||
365 | rt_match = RTA_NEXT(rt_match, list_len); | 365 | rt_match = nla_next(rt_match, &list_len); |
366 | } | 366 | } |
367 | 367 | ||
368 | /* Check if the number of matches provided by userspace actually | 368 | /* Check if the number of matches provided by userspace actually |
@@ -434,18 +434,18 @@ int tcf_em_tree_dump(struct sk_buff *skb, struct tcf_ematch_tree *tree, int tlv) | |||
434 | { | 434 | { |
435 | int i; | 435 | int i; |
436 | u8 *tail; | 436 | u8 *tail; |
437 | struct rtattr *top_start = (struct rtattr *)skb_tail_pointer(skb); | 437 | struct nlattr *top_start = (struct nlattr *)skb_tail_pointer(skb); |
438 | struct rtattr *list_start; | 438 | struct nlattr *list_start; |
439 | 439 | ||
440 | RTA_PUT(skb, tlv, 0, NULL); | 440 | NLA_PUT(skb, tlv, 0, NULL); |
441 | RTA_PUT(skb, TCA_EMATCH_TREE_HDR, sizeof(tree->hdr), &tree->hdr); | 441 | NLA_PUT(skb, TCA_EMATCH_TREE_HDR, sizeof(tree->hdr), &tree->hdr); |
442 | 442 | ||
443 | list_start = (struct rtattr *)skb_tail_pointer(skb); | 443 | list_start = (struct nlattr *)skb_tail_pointer(skb); |
444 | RTA_PUT(skb, TCA_EMATCH_TREE_LIST, 0, NULL); | 444 | NLA_PUT(skb, TCA_EMATCH_TREE_LIST, 0, NULL); |
445 | 445 | ||
446 | tail = skb_tail_pointer(skb); | 446 | tail = skb_tail_pointer(skb); |
447 | for (i = 0; i < tree->hdr.nmatches; i++) { | 447 | for (i = 0; i < tree->hdr.nmatches; i++) { |
448 | struct rtattr *match_start = (struct rtattr *)tail; | 448 | struct nlattr *match_start = (struct nlattr *)tail; |
449 | struct tcf_ematch *em = tcf_em_get_match(tree, i); | 449 | struct tcf_ematch *em = tcf_em_get_match(tree, i); |
450 | struct tcf_ematch_hdr em_hdr = { | 450 | struct tcf_ematch_hdr em_hdr = { |
451 | .kind = em->ops ? em->ops->kind : TCF_EM_CONTAINER, | 451 | .kind = em->ops ? em->ops->kind : TCF_EM_CONTAINER, |
@@ -453,27 +453,27 @@ int tcf_em_tree_dump(struct sk_buff *skb, struct tcf_ematch_tree *tree, int tlv) | |||
453 | .flags = em->flags | 453 | .flags = em->flags |
454 | }; | 454 | }; |
455 | 455 | ||
456 | RTA_PUT(skb, i+1, sizeof(em_hdr), &em_hdr); | 456 | NLA_PUT(skb, i+1, sizeof(em_hdr), &em_hdr); |
457 | 457 | ||
458 | if (em->ops && em->ops->dump) { | 458 | if (em->ops && em->ops->dump) { |
459 | if (em->ops->dump(skb, em) < 0) | 459 | if (em->ops->dump(skb, em) < 0) |
460 | goto rtattr_failure; | 460 | goto nla_put_failure; |
461 | } else if (tcf_em_is_container(em) || tcf_em_is_simple(em)) { | 461 | } else if (tcf_em_is_container(em) || tcf_em_is_simple(em)) { |
462 | u32 u = em->data; | 462 | u32 u = em->data; |
463 | RTA_PUT_NOHDR(skb, sizeof(u), &u); | 463 | nla_put_nohdr(skb, sizeof(u), &u); |
464 | } else if (em->datalen > 0) | 464 | } else if (em->datalen > 0) |
465 | RTA_PUT_NOHDR(skb, em->datalen, (void *) em->data); | 465 | nla_put_nohdr(skb, em->datalen, (void *) em->data); |
466 | 466 | ||
467 | tail = skb_tail_pointer(skb); | 467 | tail = skb_tail_pointer(skb); |
468 | match_start->rta_len = tail - (u8 *)match_start; | 468 | match_start->nla_len = tail - (u8 *)match_start; |
469 | } | 469 | } |
470 | 470 | ||
471 | list_start->rta_len = tail - (u8 *)list_start; | 471 | list_start->nla_len = tail - (u8 *)list_start; |
472 | top_start->rta_len = tail - (u8 *)top_start; | 472 | top_start->nla_len = tail - (u8 *)top_start; |
473 | 473 | ||
474 | return 0; | 474 | return 0; |
475 | 475 | ||
476 | rtattr_failure: | 476 | nla_put_failure: |
477 | return -1; | 477 | return -1; |
478 | } | 478 | } |
479 | EXPORT_SYMBOL(tcf_em_tree_dump); | 479 | EXPORT_SYMBOL(tcf_em_tree_dump); |