aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/net
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc/net')
-rw-r--r--arch/sparc/net/bpf_jit_comp.c154
1 files changed, 73 insertions, 81 deletions
diff --git a/arch/sparc/net/bpf_jit_comp.c b/arch/sparc/net/bpf_jit_comp.c
index 49cee4af16f4..892a102671ad 100644
--- a/arch/sparc/net/bpf_jit_comp.c
+++ b/arch/sparc/net/bpf_jit_comp.c
@@ -415,20 +415,11 @@ void bpf_jit_compile(struct sk_filter *fp)
415 emit_reg_move(O7, r_saved_O7); 415 emit_reg_move(O7, r_saved_O7);
416 416
417 switch (filter[0].code) { 417 switch (filter[0].code) {
418 case BPF_S_RET_K: 418 case BPF_RET | BPF_K:
419 case BPF_S_LD_W_LEN: 419 case BPF_LD | BPF_W | BPF_LEN:
420 case BPF_S_ANC_PROTOCOL: 420 case BPF_LD | BPF_W | BPF_ABS:
421 case BPF_S_ANC_PKTTYPE: 421 case BPF_LD | BPF_H | BPF_ABS:
422 case BPF_S_ANC_IFINDEX: 422 case BPF_LD | BPF_B | BPF_ABS:
423 case BPF_S_ANC_MARK:
424 case BPF_S_ANC_RXHASH:
425 case BPF_S_ANC_VLAN_TAG:
426 case BPF_S_ANC_VLAN_TAG_PRESENT:
427 case BPF_S_ANC_CPU:
428 case BPF_S_ANC_QUEUE:
429 case BPF_S_LD_W_ABS:
430 case BPF_S_LD_H_ABS:
431 case BPF_S_LD_B_ABS:
432 /* The first instruction sets the A register (or is 423 /* The first instruction sets the A register (or is
433 * a "RET 'constant'") 424 * a "RET 'constant'")
434 */ 425 */
@@ -445,59 +436,60 @@ void bpf_jit_compile(struct sk_filter *fp)
445 unsigned int t_offset; 436 unsigned int t_offset;
446 unsigned int f_offset; 437 unsigned int f_offset;
447 u32 t_op, f_op; 438 u32 t_op, f_op;
439 u16 code = bpf_anc_helper(&filter[i]);
448 int ilen; 440 int ilen;
449 441
450 switch (filter[i].code) { 442 switch (code) {
451 case BPF_S_ALU_ADD_X: /* A += X; */ 443 case BPF_ALU | BPF_ADD | BPF_X: /* A += X; */
452 emit_alu_X(ADD); 444 emit_alu_X(ADD);
453 break; 445 break;
454 case BPF_S_ALU_ADD_K: /* A += K; */ 446 case BPF_ALU | BPF_ADD | BPF_K: /* A += K; */
455 emit_alu_K(ADD, K); 447 emit_alu_K(ADD, K);
456 break; 448 break;
457 case BPF_S_ALU_SUB_X: /* A -= X; */ 449 case BPF_ALU | BPF_SUB | BPF_X: /* A -= X; */
458 emit_alu_X(SUB); 450 emit_alu_X(SUB);
459 break; 451 break;
460 case BPF_S_ALU_SUB_K: /* A -= K */ 452 case BPF_ALU | BPF_SUB | BPF_K: /* A -= K */
461 emit_alu_K(SUB, K); 453 emit_alu_K(SUB, K);
462 break; 454 break;
463 case BPF_S_ALU_AND_X: /* A &= X */ 455 case BPF_ALU | BPF_AND | BPF_X: /* A &= X */
464 emit_alu_X(AND); 456 emit_alu_X(AND);
465 break; 457 break;
466 case BPF_S_ALU_AND_K: /* A &= K */ 458 case BPF_ALU | BPF_AND | BPF_K: /* A &= K */
467 emit_alu_K(AND, K); 459 emit_alu_K(AND, K);
468 break; 460 break;
469 case BPF_S_ALU_OR_X: /* A |= X */ 461 case BPF_ALU | BPF_OR | BPF_X: /* A |= X */
470 emit_alu_X(OR); 462 emit_alu_X(OR);
471 break; 463 break;
472 case BPF_S_ALU_OR_K: /* A |= K */ 464 case BPF_ALU | BPF_OR | BPF_K: /* A |= K */
473 emit_alu_K(OR, K); 465 emit_alu_K(OR, K);
474 break; 466 break;
475 case BPF_S_ANC_ALU_XOR_X: /* A ^= X; */ 467 case BPF_ANC | SKF_AD_ALU_XOR_X: /* A ^= X; */
476 case BPF_S_ALU_XOR_X: 468 case BPF_ALU | BPF_XOR | BPF_X:
477 emit_alu_X(XOR); 469 emit_alu_X(XOR);
478 break; 470 break;
479 case BPF_S_ALU_XOR_K: /* A ^= K */ 471 case BPF_ALU | BPF_XOR | BPF_K: /* A ^= K */
480 emit_alu_K(XOR, K); 472 emit_alu_K(XOR, K);
481 break; 473 break;
482 case BPF_S_ALU_LSH_X: /* A <<= X */ 474 case BPF_ALU | BPF_LSH | BPF_X: /* A <<= X */
483 emit_alu_X(SLL); 475 emit_alu_X(SLL);
484 break; 476 break;
485 case BPF_S_ALU_LSH_K: /* A <<= K */ 477 case BPF_ALU | BPF_LSH | BPF_K: /* A <<= K */
486 emit_alu_K(SLL, K); 478 emit_alu_K(SLL, K);
487 break; 479 break;
488 case BPF_S_ALU_RSH_X: /* A >>= X */ 480 case BPF_ALU | BPF_RSH | BPF_X: /* A >>= X */
489 emit_alu_X(SRL); 481 emit_alu_X(SRL);
490 break; 482 break;
491 case BPF_S_ALU_RSH_K: /* A >>= K */ 483 case BPF_ALU | BPF_RSH | BPF_K: /* A >>= K */
492 emit_alu_K(SRL, K); 484 emit_alu_K(SRL, K);
493 break; 485 break;
494 case BPF_S_ALU_MUL_X: /* A *= X; */ 486 case BPF_ALU | BPF_MUL | BPF_X: /* A *= X; */
495 emit_alu_X(MUL); 487 emit_alu_X(MUL);
496 break; 488 break;
497 case BPF_S_ALU_MUL_K: /* A *= K */ 489 case BPF_ALU | BPF_MUL | BPF_K: /* A *= K */
498 emit_alu_K(MUL, K); 490 emit_alu_K(MUL, K);
499 break; 491 break;
500 case BPF_S_ALU_DIV_K: /* A /= K with K != 0*/ 492 case BPF_ALU | BPF_DIV | BPF_K: /* A /= K with K != 0*/
501 if (K == 1) 493 if (K == 1)
502 break; 494 break;
503 emit_write_y(G0); 495 emit_write_y(G0);
@@ -512,7 +504,7 @@ void bpf_jit_compile(struct sk_filter *fp)
512#endif 504#endif
513 emit_alu_K(DIV, K); 505 emit_alu_K(DIV, K);
514 break; 506 break;
515 case BPF_S_ALU_DIV_X: /* A /= X; */ 507 case BPF_ALU | BPF_DIV | BPF_X: /* A /= X; */
516 emit_cmpi(r_X, 0); 508 emit_cmpi(r_X, 0);
517 if (pc_ret0 > 0) { 509 if (pc_ret0 > 0) {
518 t_offset = addrs[pc_ret0 - 1]; 510 t_offset = addrs[pc_ret0 - 1];
@@ -544,10 +536,10 @@ void bpf_jit_compile(struct sk_filter *fp)
544#endif 536#endif
545 emit_alu_X(DIV); 537 emit_alu_X(DIV);
546 break; 538 break;
547 case BPF_S_ALU_NEG: 539 case BPF_ALU | BPF_NEG:
548 emit_neg(); 540 emit_neg();
549 break; 541 break;
550 case BPF_S_RET_K: 542 case BPF_RET | BPF_K:
551 if (!K) { 543 if (!K) {
552 if (pc_ret0 == -1) 544 if (pc_ret0 == -1)
553 pc_ret0 = i; 545 pc_ret0 = i;
@@ -556,7 +548,7 @@ void bpf_jit_compile(struct sk_filter *fp)
556 emit_loadimm(K, r_A); 548 emit_loadimm(K, r_A);
557 } 549 }
558 /* Fallthrough */ 550 /* Fallthrough */
559 case BPF_S_RET_A: 551 case BPF_RET | BPF_A:
560 if (seen_or_pass0) { 552 if (seen_or_pass0) {
561 if (i != flen - 1) { 553 if (i != flen - 1) {
562 emit_jump(cleanup_addr); 554 emit_jump(cleanup_addr);
@@ -573,18 +565,18 @@ void bpf_jit_compile(struct sk_filter *fp)
573 emit_jmpl(r_saved_O7, 8, G0); 565 emit_jmpl(r_saved_O7, 8, G0);
574 emit_reg_move(r_A, O0); /* delay slot */ 566 emit_reg_move(r_A, O0); /* delay slot */
575 break; 567 break;
576 case BPF_S_MISC_TAX: 568 case BPF_MISC | BPF_TAX:
577 seen |= SEEN_XREG; 569 seen |= SEEN_XREG;
578 emit_reg_move(r_A, r_X); 570 emit_reg_move(r_A, r_X);
579 break; 571 break;
580 case BPF_S_MISC_TXA: 572 case BPF_MISC | BPF_TXA:
581 seen |= SEEN_XREG; 573 seen |= SEEN_XREG;
582 emit_reg_move(r_X, r_A); 574 emit_reg_move(r_X, r_A);
583 break; 575 break;
584 case BPF_S_ANC_CPU: 576 case BPF_ANC | SKF_AD_CPU:
585 emit_load_cpu(r_A); 577 emit_load_cpu(r_A);
586 break; 578 break;
587 case BPF_S_ANC_PROTOCOL: 579 case BPF_ANC | SKF_AD_PROTOCOL:
588 emit_skb_load16(protocol, r_A); 580 emit_skb_load16(protocol, r_A);
589 break; 581 break;
590#if 0 582#if 0
@@ -592,38 +584,38 @@ void bpf_jit_compile(struct sk_filter *fp)
592 * a bit field even though we very much 584 * a bit field even though we very much
593 * know what we are doing here. 585 * know what we are doing here.
594 */ 586 */
595 case BPF_S_ANC_PKTTYPE: 587 case BPF_ANC | SKF_AD_PKTTYPE:
596 __emit_skb_load8(pkt_type, r_A); 588 __emit_skb_load8(pkt_type, r_A);
597 emit_alu_K(SRL, 5); 589 emit_alu_K(SRL, 5);
598 break; 590 break;
599#endif 591#endif
600 case BPF_S_ANC_IFINDEX: 592 case BPF_ANC | SKF_AD_IFINDEX:
601 emit_skb_loadptr(dev, r_A); 593 emit_skb_loadptr(dev, r_A);
602 emit_cmpi(r_A, 0); 594 emit_cmpi(r_A, 0);
603 emit_branch(BE_PTR, cleanup_addr + 4); 595 emit_branch(BE_PTR, cleanup_addr + 4);
604 emit_nop(); 596 emit_nop();
605 emit_load32(r_A, struct net_device, ifindex, r_A); 597 emit_load32(r_A, struct net_device, ifindex, r_A);
606 break; 598 break;
607 case BPF_S_ANC_MARK: 599 case BPF_ANC | SKF_AD_MARK:
608 emit_skb_load32(mark, r_A); 600 emit_skb_load32(mark, r_A);
609 break; 601 break;
610 case BPF_S_ANC_QUEUE: 602 case BPF_ANC | SKF_AD_QUEUE:
611 emit_skb_load16(queue_mapping, r_A); 603 emit_skb_load16(queue_mapping, r_A);
612 break; 604 break;
613 case BPF_S_ANC_HATYPE: 605 case BPF_ANC | SKF_AD_HATYPE:
614 emit_skb_loadptr(dev, r_A); 606 emit_skb_loadptr(dev, r_A);
615 emit_cmpi(r_A, 0); 607 emit_cmpi(r_A, 0);
616 emit_branch(BE_PTR, cleanup_addr + 4); 608 emit_branch(BE_PTR, cleanup_addr + 4);
617 emit_nop(); 609 emit_nop();
618 emit_load16(r_A, struct net_device, type, r_A); 610 emit_load16(r_A, struct net_device, type, r_A);
619 break; 611 break;
620 case BPF_S_ANC_RXHASH: 612 case BPF_ANC | SKF_AD_RXHASH:
621 emit_skb_load32(hash, r_A); 613 emit_skb_load32(hash, r_A);
622 break; 614 break;
623 case BPF_S_ANC_VLAN_TAG: 615 case BPF_ANC | SKF_AD_VLAN_TAG:
624 case BPF_S_ANC_VLAN_TAG_PRESENT: 616 case BPF_ANC | SKF_AD_VLAN_TAG_PRESENT:
625 emit_skb_load16(vlan_tci, r_A); 617 emit_skb_load16(vlan_tci, r_A);
626 if (filter[i].code == BPF_S_ANC_VLAN_TAG) { 618 if (code == (BPF_ANC | SKF_AD_VLAN_TAG)) {
627 emit_andi(r_A, VLAN_VID_MASK, r_A); 619 emit_andi(r_A, VLAN_VID_MASK, r_A);
628 } else { 620 } else {
629 emit_loadimm(VLAN_TAG_PRESENT, r_TMP); 621 emit_loadimm(VLAN_TAG_PRESENT, r_TMP);
@@ -631,44 +623,44 @@ void bpf_jit_compile(struct sk_filter *fp)
631 } 623 }
632 break; 624 break;
633 625
634 case BPF_S_LD_IMM: 626 case BPF_LD | BPF_IMM:
635 emit_loadimm(K, r_A); 627 emit_loadimm(K, r_A);
636 break; 628 break;
637 case BPF_S_LDX_IMM: 629 case BPF_LDX | BPF_IMM:
638 emit_loadimm(K, r_X); 630 emit_loadimm(K, r_X);
639 break; 631 break;
640 case BPF_S_LD_MEM: 632 case BPF_LD | BPF_MEM:
641 emit_ldmem(K * 4, r_A); 633 emit_ldmem(K * 4, r_A);
642 break; 634 break;
643 case BPF_S_LDX_MEM: 635 case BPF_LDX | BPF_MEM:
644 emit_ldmem(K * 4, r_X); 636 emit_ldmem(K * 4, r_X);
645 break; 637 break;
646 case BPF_S_ST: 638 case BPF_ST:
647 emit_stmem(K * 4, r_A); 639 emit_stmem(K * 4, r_A);
648 break; 640 break;
649 case BPF_S_STX: 641 case BPF_STX:
650 emit_stmem(K * 4, r_X); 642 emit_stmem(K * 4, r_X);
651 break; 643 break;
652 644
653#define CHOOSE_LOAD_FUNC(K, func) \ 645#define CHOOSE_LOAD_FUNC(K, func) \
654 ((int)K < 0 ? ((int)K >= SKF_LL_OFF ? func##_negative_offset : func) : func##_positive_offset) 646 ((int)K < 0 ? ((int)K >= SKF_LL_OFF ? func##_negative_offset : func) : func##_positive_offset)
655 647
656 case BPF_S_LD_W_ABS: 648 case BPF_LD | BPF_W | BPF_ABS:
657 func = CHOOSE_LOAD_FUNC(K, bpf_jit_load_word); 649 func = CHOOSE_LOAD_FUNC(K, bpf_jit_load_word);
658common_load: seen |= SEEN_DATAREF; 650common_load: seen |= SEEN_DATAREF;
659 emit_loadimm(K, r_OFF); 651 emit_loadimm(K, r_OFF);
660 emit_call(func); 652 emit_call(func);
661 break; 653 break;
662 case BPF_S_LD_H_ABS: 654 case BPF_LD | BPF_H | BPF_ABS:
663 func = CHOOSE_LOAD_FUNC(K, bpf_jit_load_half); 655 func = CHOOSE_LOAD_FUNC(K, bpf_jit_load_half);
664 goto common_load; 656 goto common_load;
665 case BPF_S_LD_B_ABS: 657 case BPF_LD | BPF_B | BPF_ABS:
666 func = CHOOSE_LOAD_FUNC(K, bpf_jit_load_byte); 658 func = CHOOSE_LOAD_FUNC(K, bpf_jit_load_byte);
667 goto common_load; 659 goto common_load;
668 case BPF_S_LDX_B_MSH: 660 case BPF_LDX | BPF_B | BPF_MSH:
669 func = CHOOSE_LOAD_FUNC(K, bpf_jit_load_byte_msh); 661 func = CHOOSE_LOAD_FUNC(K, bpf_jit_load_byte_msh);
670 goto common_load; 662 goto common_load;
671 case BPF_S_LD_W_IND: 663 case BPF_LD | BPF_W | BPF_IND:
672 func = bpf_jit_load_word; 664 func = bpf_jit_load_word;
673common_load_ind: seen |= SEEN_DATAREF | SEEN_XREG; 665common_load_ind: seen |= SEEN_DATAREF | SEEN_XREG;
674 if (K) { 666 if (K) {
@@ -683,13 +675,13 @@ common_load_ind: seen |= SEEN_DATAREF | SEEN_XREG;
683 } 675 }
684 emit_call(func); 676 emit_call(func);
685 break; 677 break;
686 case BPF_S_LD_H_IND: 678 case BPF_LD | BPF_H | BPF_IND:
687 func = bpf_jit_load_half; 679 func = bpf_jit_load_half;
688 goto common_load_ind; 680 goto common_load_ind;
689 case BPF_S_LD_B_IND: 681 case BPF_LD | BPF_B | BPF_IND:
690 func = bpf_jit_load_byte; 682 func = bpf_jit_load_byte;
691 goto common_load_ind; 683 goto common_load_ind;
692 case BPF_S_JMP_JA: 684 case BPF_JMP | BPF_JA:
693 emit_jump(addrs[i + K]); 685 emit_jump(addrs[i + K]);
694 emit_nop(); 686 emit_nop();
695 break; 687 break;
@@ -700,14 +692,14 @@ common_load_ind: seen |= SEEN_DATAREF | SEEN_XREG;
700 f_op = FOP; \ 692 f_op = FOP; \
701 goto cond_branch 693 goto cond_branch
702 694
703 COND_SEL(BPF_S_JMP_JGT_K, BGU, BLEU); 695 COND_SEL(BPF_JMP | BPF_JGT | BPF_K, BGU, BLEU);
704 COND_SEL(BPF_S_JMP_JGE_K, BGEU, BLU); 696 COND_SEL(BPF_JMP | BPF_JGE | BPF_K, BGEU, BLU);
705 COND_SEL(BPF_S_JMP_JEQ_K, BE, BNE); 697 COND_SEL(BPF_JMP | BPF_JEQ | BPF_K, BE, BNE);
706 COND_SEL(BPF_S_JMP_JSET_K, BNE, BE); 698 COND_SEL(BPF_JMP | BPF_JSET | BPF_K, BNE, BE);
707 COND_SEL(BPF_S_JMP_JGT_X, BGU, BLEU); 699 COND_SEL(BPF_JMP | BPF_JGT | BPF_X, BGU, BLEU);
708 COND_SEL(BPF_S_JMP_JGE_X, BGEU, BLU); 700 COND_SEL(BPF_JMP | BPF_JGE | BPF_X, BGEU, BLU);
709 COND_SEL(BPF_S_JMP_JEQ_X, BE, BNE); 701 COND_SEL(BPF_JMP | BPF_JEQ | BPF_X, BE, BNE);
710 COND_SEL(BPF_S_JMP_JSET_X, BNE, BE); 702 COND_SEL(BPF_JMP | BPF_JSET | BPF_X, BNE, BE);
711 703
712cond_branch: f_offset = addrs[i + filter[i].jf]; 704cond_branch: f_offset = addrs[i + filter[i].jf];
713 t_offset = addrs[i + filter[i].jt]; 705 t_offset = addrs[i + filter[i].jt];
@@ -719,20 +711,20 @@ cond_branch: f_offset = addrs[i + filter[i].jf];
719 break; 711 break;
720 } 712 }
721 713
722 switch (filter[i].code) { 714 switch (code) {
723 case BPF_S_JMP_JGT_X: 715 case BPF_JMP | BPF_JGT | BPF_X:
724 case BPF_S_JMP_JGE_X: 716 case BPF_JMP | BPF_JGE | BPF_X:
725 case BPF_S_JMP_JEQ_X: 717 case BPF_JMP | BPF_JEQ | BPF_X:
726 seen |= SEEN_XREG; 718 seen |= SEEN_XREG;
727 emit_cmp(r_A, r_X); 719 emit_cmp(r_A, r_X);
728 break; 720 break;
729 case BPF_S_JMP_JSET_X: 721 case BPF_JMP | BPF_JSET | BPF_X:
730 seen |= SEEN_XREG; 722 seen |= SEEN_XREG;
731 emit_btst(r_A, r_X); 723 emit_btst(r_A, r_X);
732 break; 724 break;
733 case BPF_S_JMP_JEQ_K: 725 case BPF_JMP | BPF_JEQ | BPF_K:
734 case BPF_S_JMP_JGT_K: 726 case BPF_JMP | BPF_JGT | BPF_K:
735 case BPF_S_JMP_JGE_K: 727 case BPF_JMP | BPF_JGE | BPF_K:
736 if (is_simm13(K)) { 728 if (is_simm13(K)) {
737 emit_cmpi(r_A, K); 729 emit_cmpi(r_A, K);
738 } else { 730 } else {
@@ -740,7 +732,7 @@ cond_branch: f_offset = addrs[i + filter[i].jf];
740 emit_cmp(r_A, r_TMP); 732 emit_cmp(r_A, r_TMP);
741 } 733 }
742 break; 734 break;
743 case BPF_S_JMP_JSET_K: 735 case BPF_JMP | BPF_JSET | BPF_K:
744 if (is_simm13(K)) { 736 if (is_simm13(K)) {
745 emit_btsti(r_A, K); 737 emit_btsti(r_A, K);
746 } else { 738 } else {