diff options
Diffstat (limited to 'arch/mips/net/bpf_jit.c')
-rw-r--r-- | arch/mips/net/bpf_jit.c | 266 |
1 files changed, 149 insertions, 117 deletions
diff --git a/arch/mips/net/bpf_jit.c b/arch/mips/net/bpf_jit.c index a67b9753330b..b87390a56a2f 100644 --- a/arch/mips/net/bpf_jit.c +++ b/arch/mips/net/bpf_jit.c | |||
@@ -119,8 +119,6 @@ | |||
119 | /* Arguments used by JIT */ | 119 | /* Arguments used by JIT */ |
120 | #define ARGS_USED_BY_JIT 2 /* only applicable to 64-bit */ | 120 | #define ARGS_USED_BY_JIT 2 /* only applicable to 64-bit */ |
121 | 121 | ||
122 | #define FLAG_NEED_X_RESET (1 << 0) | ||
123 | |||
124 | #define SBIT(x) (1 << (x)) /* Signed version of BIT() */ | 122 | #define SBIT(x) (1 << (x)) /* Signed version of BIT() */ |
125 | 123 | ||
126 | /** | 124 | /** |
@@ -153,6 +151,8 @@ static inline int optimize_div(u32 *k) | |||
153 | return 0; | 151 | return 0; |
154 | } | 152 | } |
155 | 153 | ||
154 | static inline void emit_jit_reg_move(ptr dst, ptr src, struct jit_ctx *ctx); | ||
155 | |||
156 | /* Simply emit the instruction if the JIT memory space has been allocated */ | 156 | /* Simply emit the instruction if the JIT memory space has been allocated */ |
157 | #define emit_instr(ctx, func, ...) \ | 157 | #define emit_instr(ctx, func, ...) \ |
158 | do { \ | 158 | do { \ |
@@ -166,9 +166,7 @@ do { \ | |||
166 | /* Determine if immediate is within the 16-bit signed range */ | 166 | /* Determine if immediate is within the 16-bit signed range */ |
167 | static inline bool is_range16(s32 imm) | 167 | static inline bool is_range16(s32 imm) |
168 | { | 168 | { |
169 | if (imm >= SBIT(15) || imm < -SBIT(15)) | 169 | return !(imm >= SBIT(15) || imm < -SBIT(15)); |
170 | return true; | ||
171 | return false; | ||
172 | } | 170 | } |
173 | 171 | ||
174 | static inline void emit_addu(unsigned int dst, unsigned int src1, | 172 | static inline void emit_addu(unsigned int dst, unsigned int src1, |
@@ -187,7 +185,7 @@ static inline void emit_load_imm(unsigned int dst, u32 imm, struct jit_ctx *ctx) | |||
187 | { | 185 | { |
188 | if (ctx->target != NULL) { | 186 | if (ctx->target != NULL) { |
189 | /* addiu can only handle s16 */ | 187 | /* addiu can only handle s16 */ |
190 | if (is_range16(imm)) { | 188 | if (!is_range16(imm)) { |
191 | u32 *p = &ctx->target[ctx->idx]; | 189 | u32 *p = &ctx->target[ctx->idx]; |
192 | uasm_i_lui(&p, r_tmp_imm, (s32)imm >> 16); | 190 | uasm_i_lui(&p, r_tmp_imm, (s32)imm >> 16); |
193 | p = &ctx->target[ctx->idx + 1]; | 191 | p = &ctx->target[ctx->idx + 1]; |
@@ -199,7 +197,7 @@ static inline void emit_load_imm(unsigned int dst, u32 imm, struct jit_ctx *ctx) | |||
199 | } | 197 | } |
200 | ctx->idx++; | 198 | ctx->idx++; |
201 | 199 | ||
202 | if (is_range16(imm)) | 200 | if (!is_range16(imm)) |
203 | ctx->idx++; | 201 | ctx->idx++; |
204 | } | 202 | } |
205 | 203 | ||
@@ -240,7 +238,7 @@ static inline void emit_daddiu(unsigned int dst, unsigned int src, | |||
240 | static inline void emit_addiu(unsigned int dst, unsigned int src, | 238 | static inline void emit_addiu(unsigned int dst, unsigned int src, |
241 | u32 imm, struct jit_ctx *ctx) | 239 | u32 imm, struct jit_ctx *ctx) |
242 | { | 240 | { |
243 | if (is_range16(imm)) { | 241 | if (!is_range16(imm)) { |
244 | emit_load_imm(r_tmp, imm, ctx); | 242 | emit_load_imm(r_tmp, imm, ctx); |
245 | emit_addu(dst, r_tmp, src, ctx); | 243 | emit_addu(dst, r_tmp, src, ctx); |
246 | } else { | 244 | } else { |
@@ -313,8 +311,11 @@ static inline void emit_sll(unsigned int dst, unsigned int src, | |||
313 | unsigned int sa, struct jit_ctx *ctx) | 311 | unsigned int sa, struct jit_ctx *ctx) |
314 | { | 312 | { |
315 | /* sa is 5-bits long */ | 313 | /* sa is 5-bits long */ |
316 | BUG_ON(sa >= BIT(5)); | 314 | if (sa >= BIT(5)) |
317 | emit_instr(ctx, sll, dst, src, sa); | 315 | /* Shifting >= 32 results in zero */ |
316 | emit_jit_reg_move(dst, r_zero, ctx); | ||
317 | else | ||
318 | emit_instr(ctx, sll, dst, src, sa); | ||
318 | } | 319 | } |
319 | 320 | ||
320 | static inline void emit_srlv(unsigned int dst, unsigned int src, | 321 | static inline void emit_srlv(unsigned int dst, unsigned int src, |
@@ -327,8 +328,17 @@ static inline void emit_srl(unsigned int dst, unsigned int src, | |||
327 | unsigned int sa, struct jit_ctx *ctx) | 328 | unsigned int sa, struct jit_ctx *ctx) |
328 | { | 329 | { |
329 | /* sa is 5-bits long */ | 330 | /* sa is 5-bits long */ |
330 | BUG_ON(sa >= BIT(5)); | 331 | if (sa >= BIT(5)) |
331 | emit_instr(ctx, srl, dst, src, sa); | 332 | /* Shifting >= 32 results in zero */ |
333 | emit_jit_reg_move(dst, r_zero, ctx); | ||
334 | else | ||
335 | emit_instr(ctx, srl, dst, src, sa); | ||
336 | } | ||
337 | |||
338 | static inline void emit_slt(unsigned int dst, unsigned int src1, | ||
339 | unsigned int src2, struct jit_ctx *ctx) | ||
340 | { | ||
341 | emit_instr(ctx, slt, dst, src1, src2); | ||
332 | } | 342 | } |
333 | 343 | ||
334 | static inline void emit_sltu(unsigned int dst, unsigned int src1, | 344 | static inline void emit_sltu(unsigned int dst, unsigned int src1, |
@@ -341,7 +351,7 @@ static inline void emit_sltiu(unsigned dst, unsigned int src, | |||
341 | unsigned int imm, struct jit_ctx *ctx) | 351 | unsigned int imm, struct jit_ctx *ctx) |
342 | { | 352 | { |
343 | /* 16 bit immediate */ | 353 | /* 16 bit immediate */ |
344 | if (is_range16((s32)imm)) { | 354 | if (!is_range16((s32)imm)) { |
345 | emit_load_imm(r_tmp, imm, ctx); | 355 | emit_load_imm(r_tmp, imm, ctx); |
346 | emit_sltu(dst, src, r_tmp, ctx); | 356 | emit_sltu(dst, src, r_tmp, ctx); |
347 | } else { | 357 | } else { |
@@ -408,7 +418,7 @@ static inline void emit_div(unsigned int dst, unsigned int src, | |||
408 | u32 *p = &ctx->target[ctx->idx]; | 418 | u32 *p = &ctx->target[ctx->idx]; |
409 | uasm_i_divu(&p, dst, src); | 419 | uasm_i_divu(&p, dst, src); |
410 | p = &ctx->target[ctx->idx + 1]; | 420 | p = &ctx->target[ctx->idx + 1]; |
411 | uasm_i_mfhi(&p, dst); | 421 | uasm_i_mflo(&p, dst); |
412 | } | 422 | } |
413 | ctx->idx += 2; /* 2 insts */ | 423 | ctx->idx += 2; /* 2 insts */ |
414 | } | 424 | } |
@@ -443,6 +453,17 @@ static inline void emit_wsbh(unsigned int dst, unsigned int src, | |||
443 | emit_instr(ctx, wsbh, dst, src); | 453 | emit_instr(ctx, wsbh, dst, src); |
444 | } | 454 | } |
445 | 455 | ||
456 | /* load pointer to register */ | ||
457 | static inline void emit_load_ptr(unsigned int dst, unsigned int src, | ||
458 | int imm, struct jit_ctx *ctx) | ||
459 | { | ||
460 | /* src contains the base addr of the 32/64-pointer */ | ||
461 | if (config_enabled(CONFIG_64BIT)) | ||
462 | emit_instr(ctx, ld, dst, imm, src); | ||
463 | else | ||
464 | emit_instr(ctx, lw, dst, imm, src); | ||
465 | } | ||
466 | |||
446 | /* load a function pointer to register */ | 467 | /* load a function pointer to register */ |
447 | static inline void emit_load_func(unsigned int reg, ptr imm, | 468 | static inline void emit_load_func(unsigned int reg, ptr imm, |
448 | struct jit_ctx *ctx) | 469 | struct jit_ctx *ctx) |
@@ -545,29 +566,13 @@ static inline u16 align_sp(unsigned int num) | |||
545 | return num; | 566 | return num; |
546 | } | 567 | } |
547 | 568 | ||
548 | static inline void update_on_xread(struct jit_ctx *ctx) | ||
549 | { | ||
550 | if (!(ctx->flags & SEEN_X)) | ||
551 | ctx->flags |= FLAG_NEED_X_RESET; | ||
552 | |||
553 | ctx->flags |= SEEN_X; | ||
554 | } | ||
555 | |||
556 | static bool is_load_to_a(u16 inst) | 569 | static bool is_load_to_a(u16 inst) |
557 | { | 570 | { |
558 | switch (inst) { | 571 | switch (inst) { |
559 | case BPF_S_LD_W_LEN: | 572 | case BPF_LD | BPF_W | BPF_LEN: |
560 | case BPF_S_LD_W_ABS: | 573 | case BPF_LD | BPF_W | BPF_ABS: |
561 | case BPF_S_LD_H_ABS: | 574 | case BPF_LD | BPF_H | BPF_ABS: |
562 | case BPF_S_LD_B_ABS: | 575 | case BPF_LD | BPF_B | BPF_ABS: |
563 | case BPF_S_ANC_CPU: | ||
564 | case BPF_S_ANC_IFINDEX: | ||
565 | case BPF_S_ANC_MARK: | ||
566 | case BPF_S_ANC_PROTOCOL: | ||
567 | case BPF_S_ANC_RXHASH: | ||
568 | case BPF_S_ANC_VLAN_TAG: | ||
569 | case BPF_S_ANC_VLAN_TAG_PRESENT: | ||
570 | case BPF_S_ANC_QUEUE: | ||
571 | return true; | 576 | return true; |
572 | default: | 577 | default: |
573 | return false; | 578 | return false; |
@@ -618,7 +623,10 @@ static void save_bpf_jit_regs(struct jit_ctx *ctx, unsigned offset) | |||
618 | if (ctx->flags & SEEN_MEM) { | 623 | if (ctx->flags & SEEN_MEM) { |
619 | if (real_off % (RSIZE * 2)) | 624 | if (real_off % (RSIZE * 2)) |
620 | real_off += RSIZE; | 625 | real_off += RSIZE; |
621 | emit_addiu(r_M, r_sp, real_off, ctx); | 626 | if (config_enabled(CONFIG_64BIT)) |
627 | emit_daddiu(r_M, r_sp, real_off, ctx); | ||
628 | else | ||
629 | emit_addiu(r_M, r_sp, real_off, ctx); | ||
622 | } | 630 | } |
623 | } | 631 | } |
624 | 632 | ||
@@ -705,11 +713,11 @@ static void build_prologue(struct jit_ctx *ctx) | |||
705 | if (ctx->flags & SEEN_SKB) | 713 | if (ctx->flags & SEEN_SKB) |
706 | emit_reg_move(r_skb, MIPS_R_A0, ctx); | 714 | emit_reg_move(r_skb, MIPS_R_A0, ctx); |
707 | 715 | ||
708 | if (ctx->flags & FLAG_NEED_X_RESET) | 716 | if (ctx->flags & SEEN_X) |
709 | emit_jit_reg_move(r_X, r_zero, ctx); | 717 | emit_jit_reg_move(r_X, r_zero, ctx); |
710 | 718 | ||
711 | /* Do not leak kernel data to userspace */ | 719 | /* Do not leak kernel data to userspace */ |
712 | if ((first_inst != BPF_S_RET_K) && !(is_load_to_a(first_inst))) | 720 | if ((first_inst != (BPF_RET | BPF_K)) && !(is_load_to_a(first_inst))) |
713 | emit_jit_reg_move(r_A, r_zero, ctx); | 721 | emit_jit_reg_move(r_A, r_zero, ctx); |
714 | } | 722 | } |
715 | 723 | ||
@@ -757,13 +765,17 @@ static u64 jit_get_skb_w(struct sk_buff *skb, unsigned offset) | |||
757 | return (u64)err << 32 | ntohl(ret); | 765 | return (u64)err << 32 | ntohl(ret); |
758 | } | 766 | } |
759 | 767 | ||
760 | #define PKT_TYPE_MAX 7 | 768 | #ifdef __BIG_ENDIAN_BITFIELD |
769 | #define PKT_TYPE_MAX (7 << 5) | ||
770 | #else | ||
771 | #define PKT_TYPE_MAX 7 | ||
772 | #endif | ||
761 | static int pkt_type_offset(void) | 773 | static int pkt_type_offset(void) |
762 | { | 774 | { |
763 | struct sk_buff skb_probe = { | 775 | struct sk_buff skb_probe = { |
764 | .pkt_type = ~0, | 776 | .pkt_type = ~0, |
765 | }; | 777 | }; |
766 | char *ct = (char *)&skb_probe; | 778 | u8 *ct = (u8 *)&skb_probe; |
767 | unsigned int off; | 779 | unsigned int off; |
768 | 780 | ||
769 | for (off = 0; off < sizeof(struct sk_buff); off++) { | 781 | for (off = 0; off < sizeof(struct sk_buff); off++) { |
@@ -783,46 +795,62 @@ static int build_body(struct jit_ctx *ctx) | |||
783 | u32 k, b_off __maybe_unused; | 795 | u32 k, b_off __maybe_unused; |
784 | 796 | ||
785 | for (i = 0; i < prog->len; i++) { | 797 | for (i = 0; i < prog->len; i++) { |
798 | u16 code; | ||
799 | |||
786 | inst = &(prog->insns[i]); | 800 | inst = &(prog->insns[i]); |
787 | pr_debug("%s: code->0x%02x, jt->0x%x, jf->0x%x, k->0x%x\n", | 801 | pr_debug("%s: code->0x%02x, jt->0x%x, jf->0x%x, k->0x%x\n", |
788 | __func__, inst->code, inst->jt, inst->jf, inst->k); | 802 | __func__, inst->code, inst->jt, inst->jf, inst->k); |
789 | k = inst->k; | 803 | k = inst->k; |
804 | code = bpf_anc_helper(inst); | ||
790 | 805 | ||
791 | if (ctx->target == NULL) | 806 | if (ctx->target == NULL) |
792 | ctx->offsets[i] = ctx->idx * 4; | 807 | ctx->offsets[i] = ctx->idx * 4; |
793 | 808 | ||
794 | switch (inst->code) { | 809 | switch (code) { |
795 | case BPF_S_LD_IMM: | 810 | case BPF_LD | BPF_IMM: |
796 | /* A <- k ==> li r_A, k */ | 811 | /* A <- k ==> li r_A, k */ |
797 | ctx->flags |= SEEN_A; | 812 | ctx->flags |= SEEN_A; |
798 | emit_load_imm(r_A, k, ctx); | 813 | emit_load_imm(r_A, k, ctx); |
799 | break; | 814 | break; |
800 | case BPF_S_LD_W_LEN: | 815 | case BPF_LD | BPF_W | BPF_LEN: |
801 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, len) != 4); | 816 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, len) != 4); |
802 | /* A <- len ==> lw r_A, offset(skb) */ | 817 | /* A <- len ==> lw r_A, offset(skb) */ |
803 | ctx->flags |= SEEN_SKB | SEEN_A; | 818 | ctx->flags |= SEEN_SKB | SEEN_A; |
804 | off = offsetof(struct sk_buff, len); | 819 | off = offsetof(struct sk_buff, len); |
805 | emit_load(r_A, r_skb, off, ctx); | 820 | emit_load(r_A, r_skb, off, ctx); |
806 | break; | 821 | break; |
807 | case BPF_S_LD_MEM: | 822 | case BPF_LD | BPF_MEM: |
808 | /* A <- M[k] ==> lw r_A, offset(M) */ | 823 | /* A <- M[k] ==> lw r_A, offset(M) */ |
809 | ctx->flags |= SEEN_MEM | SEEN_A; | 824 | ctx->flags |= SEEN_MEM | SEEN_A; |
810 | emit_load(r_A, r_M, SCRATCH_OFF(k), ctx); | 825 | emit_load(r_A, r_M, SCRATCH_OFF(k), ctx); |
811 | break; | 826 | break; |
812 | case BPF_S_LD_W_ABS: | 827 | case BPF_LD | BPF_W | BPF_ABS: |
813 | /* A <- P[k:4] */ | 828 | /* A <- P[k:4] */ |
814 | load_order = 2; | 829 | load_order = 2; |
815 | goto load; | 830 | goto load; |
816 | case BPF_S_LD_H_ABS: | 831 | case BPF_LD | BPF_H | BPF_ABS: |
817 | /* A <- P[k:2] */ | 832 | /* A <- P[k:2] */ |
818 | load_order = 1; | 833 | load_order = 1; |
819 | goto load; | 834 | goto load; |
820 | case BPF_S_LD_B_ABS: | 835 | case BPF_LD | BPF_B | BPF_ABS: |
821 | /* A <- P[k:1] */ | 836 | /* A <- P[k:1] */ |
822 | load_order = 0; | 837 | load_order = 0; |
823 | load: | 838 | load: |
839 | /* the interpreter will deal with the negative K */ | ||
840 | if ((int)k < 0) | ||
841 | return -ENOTSUPP; | ||
842 | |||
824 | emit_load_imm(r_off, k, ctx); | 843 | emit_load_imm(r_off, k, ctx); |
825 | load_common: | 844 | load_common: |
845 | /* | ||
846 | * We may got here from the indirect loads so | ||
847 | * return if offset is negative. | ||
848 | */ | ||
849 | emit_slt(r_s0, r_off, r_zero, ctx); | ||
850 | emit_bcond(MIPS_COND_NE, r_s0, r_zero, | ||
851 | b_imm(prog->len, ctx), ctx); | ||
852 | emit_reg_move(r_ret, r_zero, ctx); | ||
853 | |||
826 | ctx->flags |= SEEN_CALL | SEEN_OFF | SEEN_S0 | | 854 | ctx->flags |= SEEN_CALL | SEEN_OFF | SEEN_S0 | |
827 | SEEN_SKB | SEEN_A; | 855 | SEEN_SKB | SEEN_A; |
828 | 856 | ||
@@ -852,39 +880,42 @@ load_common: | |||
852 | emit_b(b_imm(prog->len, ctx), ctx); | 880 | emit_b(b_imm(prog->len, ctx), ctx); |
853 | emit_reg_move(r_ret, r_zero, ctx); | 881 | emit_reg_move(r_ret, r_zero, ctx); |
854 | break; | 882 | break; |
855 | case BPF_S_LD_W_IND: | 883 | case BPF_LD | BPF_W | BPF_IND: |
856 | /* A <- P[X + k:4] */ | 884 | /* A <- P[X + k:4] */ |
857 | load_order = 2; | 885 | load_order = 2; |
858 | goto load_ind; | 886 | goto load_ind; |
859 | case BPF_S_LD_H_IND: | 887 | case BPF_LD | BPF_H | BPF_IND: |
860 | /* A <- P[X + k:2] */ | 888 | /* A <- P[X + k:2] */ |
861 | load_order = 1; | 889 | load_order = 1; |
862 | goto load_ind; | 890 | goto load_ind; |
863 | case BPF_S_LD_B_IND: | 891 | case BPF_LD | BPF_B | BPF_IND: |
864 | /* A <- P[X + k:1] */ | 892 | /* A <- P[X + k:1] */ |
865 | load_order = 0; | 893 | load_order = 0; |
866 | load_ind: | 894 | load_ind: |
867 | update_on_xread(ctx); | ||
868 | ctx->flags |= SEEN_OFF | SEEN_X; | 895 | ctx->flags |= SEEN_OFF | SEEN_X; |
869 | emit_addiu(r_off, r_X, k, ctx); | 896 | emit_addiu(r_off, r_X, k, ctx); |
870 | goto load_common; | 897 | goto load_common; |
871 | case BPF_S_LDX_IMM: | 898 | case BPF_LDX | BPF_IMM: |
872 | /* X <- k */ | 899 | /* X <- k */ |
873 | ctx->flags |= SEEN_X; | 900 | ctx->flags |= SEEN_X; |
874 | emit_load_imm(r_X, k, ctx); | 901 | emit_load_imm(r_X, k, ctx); |
875 | break; | 902 | break; |
876 | case BPF_S_LDX_MEM: | 903 | case BPF_LDX | BPF_MEM: |
877 | /* X <- M[k] */ | 904 | /* X <- M[k] */ |
878 | ctx->flags |= SEEN_X | SEEN_MEM; | 905 | ctx->flags |= SEEN_X | SEEN_MEM; |
879 | emit_load(r_X, r_M, SCRATCH_OFF(k), ctx); | 906 | emit_load(r_X, r_M, SCRATCH_OFF(k), ctx); |
880 | break; | 907 | break; |
881 | case BPF_S_LDX_W_LEN: | 908 | case BPF_LDX | BPF_W | BPF_LEN: |
882 | /* X <- len */ | 909 | /* X <- len */ |
883 | ctx->flags |= SEEN_X | SEEN_SKB; | 910 | ctx->flags |= SEEN_X | SEEN_SKB; |
884 | off = offsetof(struct sk_buff, len); | 911 | off = offsetof(struct sk_buff, len); |
885 | emit_load(r_X, r_skb, off, ctx); | 912 | emit_load(r_X, r_skb, off, ctx); |
886 | break; | 913 | break; |
887 | case BPF_S_LDX_B_MSH: | 914 | case BPF_LDX | BPF_B | BPF_MSH: |
915 | /* the interpreter will deal with the negative K */ | ||
916 | if ((int)k < 0) | ||
917 | return -ENOTSUPP; | ||
918 | |||
888 | /* X <- 4 * (P[k:1] & 0xf) */ | 919 | /* X <- 4 * (P[k:1] & 0xf) */ |
889 | ctx->flags |= SEEN_X | SEEN_CALL | SEEN_S0 | SEEN_SKB; | 920 | ctx->flags |= SEEN_X | SEEN_CALL | SEEN_S0 | SEEN_SKB; |
890 | /* Load offset to a1 */ | 921 | /* Load offset to a1 */ |
@@ -917,50 +948,49 @@ load_ind: | |||
917 | emit_b(b_imm(prog->len, ctx), ctx); | 948 | emit_b(b_imm(prog->len, ctx), ctx); |
918 | emit_load_imm(r_ret, 0, ctx); /* delay slot */ | 949 | emit_load_imm(r_ret, 0, ctx); /* delay slot */ |
919 | break; | 950 | break; |
920 | case BPF_S_ST: | 951 | case BPF_ST: |
921 | /* M[k] <- A */ | 952 | /* M[k] <- A */ |
922 | ctx->flags |= SEEN_MEM | SEEN_A; | 953 | ctx->flags |= SEEN_MEM | SEEN_A; |
923 | emit_store(r_A, r_M, SCRATCH_OFF(k), ctx); | 954 | emit_store(r_A, r_M, SCRATCH_OFF(k), ctx); |
924 | break; | 955 | break; |
925 | case BPF_S_STX: | 956 | case BPF_STX: |
926 | /* M[k] <- X */ | 957 | /* M[k] <- X */ |
927 | ctx->flags |= SEEN_MEM | SEEN_X; | 958 | ctx->flags |= SEEN_MEM | SEEN_X; |
928 | emit_store(r_X, r_M, SCRATCH_OFF(k), ctx); | 959 | emit_store(r_X, r_M, SCRATCH_OFF(k), ctx); |
929 | break; | 960 | break; |
930 | case BPF_S_ALU_ADD_K: | 961 | case BPF_ALU | BPF_ADD | BPF_K: |
931 | /* A += K */ | 962 | /* A += K */ |
932 | ctx->flags |= SEEN_A; | 963 | ctx->flags |= SEEN_A; |
933 | emit_addiu(r_A, r_A, k, ctx); | 964 | emit_addiu(r_A, r_A, k, ctx); |
934 | break; | 965 | break; |
935 | case BPF_S_ALU_ADD_X: | 966 | case BPF_ALU | BPF_ADD | BPF_X: |
936 | /* A += X */ | 967 | /* A += X */ |
937 | ctx->flags |= SEEN_A | SEEN_X; | 968 | ctx->flags |= SEEN_A | SEEN_X; |
938 | emit_addu(r_A, r_A, r_X, ctx); | 969 | emit_addu(r_A, r_A, r_X, ctx); |
939 | break; | 970 | break; |
940 | case BPF_S_ALU_SUB_K: | 971 | case BPF_ALU | BPF_SUB | BPF_K: |
941 | /* A -= K */ | 972 | /* A -= K */ |
942 | ctx->flags |= SEEN_A; | 973 | ctx->flags |= SEEN_A; |
943 | emit_addiu(r_A, r_A, -k, ctx); | 974 | emit_addiu(r_A, r_A, -k, ctx); |
944 | break; | 975 | break; |
945 | case BPF_S_ALU_SUB_X: | 976 | case BPF_ALU | BPF_SUB | BPF_X: |
946 | /* A -= X */ | 977 | /* A -= X */ |
947 | ctx->flags |= SEEN_A | SEEN_X; | 978 | ctx->flags |= SEEN_A | SEEN_X; |
948 | emit_subu(r_A, r_A, r_X, ctx); | 979 | emit_subu(r_A, r_A, r_X, ctx); |
949 | break; | 980 | break; |
950 | case BPF_S_ALU_MUL_K: | 981 | case BPF_ALU | BPF_MUL | BPF_K: |
951 | /* A *= K */ | 982 | /* A *= K */ |
952 | /* Load K to scratch register before MUL */ | 983 | /* Load K to scratch register before MUL */ |
953 | ctx->flags |= SEEN_A | SEEN_S0; | 984 | ctx->flags |= SEEN_A | SEEN_S0; |
954 | emit_load_imm(r_s0, k, ctx); | 985 | emit_load_imm(r_s0, k, ctx); |
955 | emit_mul(r_A, r_A, r_s0, ctx); | 986 | emit_mul(r_A, r_A, r_s0, ctx); |
956 | break; | 987 | break; |
957 | case BPF_S_ALU_MUL_X: | 988 | case BPF_ALU | BPF_MUL | BPF_X: |
958 | /* A *= X */ | 989 | /* A *= X */ |
959 | update_on_xread(ctx); | ||
960 | ctx->flags |= SEEN_A | SEEN_X; | 990 | ctx->flags |= SEEN_A | SEEN_X; |
961 | emit_mul(r_A, r_A, r_X, ctx); | 991 | emit_mul(r_A, r_A, r_X, ctx); |
962 | break; | 992 | break; |
963 | case BPF_S_ALU_DIV_K: | 993 | case BPF_ALU | BPF_DIV | BPF_K: |
964 | /* A /= k */ | 994 | /* A /= k */ |
965 | if (k == 1) | 995 | if (k == 1) |
966 | break; | 996 | break; |
@@ -973,7 +1003,7 @@ load_ind: | |||
973 | emit_load_imm(r_s0, k, ctx); | 1003 | emit_load_imm(r_s0, k, ctx); |
974 | emit_div(r_A, r_s0, ctx); | 1004 | emit_div(r_A, r_s0, ctx); |
975 | break; | 1005 | break; |
976 | case BPF_S_ALU_MOD_K: | 1006 | case BPF_ALU | BPF_MOD | BPF_K: |
977 | /* A %= k */ | 1007 | /* A %= k */ |
978 | if (k == 1 || optimize_div(&k)) { | 1008 | if (k == 1 || optimize_div(&k)) { |
979 | ctx->flags |= SEEN_A; | 1009 | ctx->flags |= SEEN_A; |
@@ -984,9 +1014,8 @@ load_ind: | |||
984 | emit_mod(r_A, r_s0, ctx); | 1014 | emit_mod(r_A, r_s0, ctx); |
985 | } | 1015 | } |
986 | break; | 1016 | break; |
987 | case BPF_S_ALU_DIV_X: | 1017 | case BPF_ALU | BPF_DIV | BPF_X: |
988 | /* A /= X */ | 1018 | /* A /= X */ |
989 | update_on_xread(ctx); | ||
990 | ctx->flags |= SEEN_X | SEEN_A; | 1019 | ctx->flags |= SEEN_X | SEEN_A; |
991 | /* Check if r_X is zero */ | 1020 | /* Check if r_X is zero */ |
992 | emit_bcond(MIPS_COND_EQ, r_X, r_zero, | 1021 | emit_bcond(MIPS_COND_EQ, r_X, r_zero, |
@@ -994,9 +1023,8 @@ load_ind: | |||
994 | emit_load_imm(r_val, 0, ctx); /* delay slot */ | 1023 | emit_load_imm(r_val, 0, ctx); /* delay slot */ |
995 | emit_div(r_A, r_X, ctx); | 1024 | emit_div(r_A, r_X, ctx); |
996 | break; | 1025 | break; |
997 | case BPF_S_ALU_MOD_X: | 1026 | case BPF_ALU | BPF_MOD | BPF_X: |
998 | /* A %= X */ | 1027 | /* A %= X */ |
999 | update_on_xread(ctx); | ||
1000 | ctx->flags |= SEEN_X | SEEN_A; | 1028 | ctx->flags |= SEEN_X | SEEN_A; |
1001 | /* Check if r_X is zero */ | 1029 | /* Check if r_X is zero */ |
1002 | emit_bcond(MIPS_COND_EQ, r_X, r_zero, | 1030 | emit_bcond(MIPS_COND_EQ, r_X, r_zero, |
@@ -1004,94 +1032,89 @@ load_ind: | |||
1004 | emit_load_imm(r_val, 0, ctx); /* delay slot */ | 1032 | emit_load_imm(r_val, 0, ctx); /* delay slot */ |
1005 | emit_mod(r_A, r_X, ctx); | 1033 | emit_mod(r_A, r_X, ctx); |
1006 | break; | 1034 | break; |
1007 | case BPF_S_ALU_OR_K: | 1035 | case BPF_ALU | BPF_OR | BPF_K: |
1008 | /* A |= K */ | 1036 | /* A |= K */ |
1009 | ctx->flags |= SEEN_A; | 1037 | ctx->flags |= SEEN_A; |
1010 | emit_ori(r_A, r_A, k, ctx); | 1038 | emit_ori(r_A, r_A, k, ctx); |
1011 | break; | 1039 | break; |
1012 | case BPF_S_ALU_OR_X: | 1040 | case BPF_ALU | BPF_OR | BPF_X: |
1013 | /* A |= X */ | 1041 | /* A |= X */ |
1014 | update_on_xread(ctx); | ||
1015 | ctx->flags |= SEEN_A; | 1042 | ctx->flags |= SEEN_A; |
1016 | emit_ori(r_A, r_A, r_X, ctx); | 1043 | emit_ori(r_A, r_A, r_X, ctx); |
1017 | break; | 1044 | break; |
1018 | case BPF_S_ALU_XOR_K: | 1045 | case BPF_ALU | BPF_XOR | BPF_K: |
1019 | /* A ^= k */ | 1046 | /* A ^= k */ |
1020 | ctx->flags |= SEEN_A; | 1047 | ctx->flags |= SEEN_A; |
1021 | emit_xori(r_A, r_A, k, ctx); | 1048 | emit_xori(r_A, r_A, k, ctx); |
1022 | break; | 1049 | break; |
1023 | case BPF_S_ANC_ALU_XOR_X: | 1050 | case BPF_ANC | SKF_AD_ALU_XOR_X: |
1024 | case BPF_S_ALU_XOR_X: | 1051 | case BPF_ALU | BPF_XOR | BPF_X: |
1025 | /* A ^= X */ | 1052 | /* A ^= X */ |
1026 | update_on_xread(ctx); | ||
1027 | ctx->flags |= SEEN_A; | 1053 | ctx->flags |= SEEN_A; |
1028 | emit_xor(r_A, r_A, r_X, ctx); | 1054 | emit_xor(r_A, r_A, r_X, ctx); |
1029 | break; | 1055 | break; |
1030 | case BPF_S_ALU_AND_K: | 1056 | case BPF_ALU | BPF_AND | BPF_K: |
1031 | /* A &= K */ | 1057 | /* A &= K */ |
1032 | ctx->flags |= SEEN_A; | 1058 | ctx->flags |= SEEN_A; |
1033 | emit_andi(r_A, r_A, k, ctx); | 1059 | emit_andi(r_A, r_A, k, ctx); |
1034 | break; | 1060 | break; |
1035 | case BPF_S_ALU_AND_X: | 1061 | case BPF_ALU | BPF_AND | BPF_X: |
1036 | /* A &= X */ | 1062 | /* A &= X */ |
1037 | update_on_xread(ctx); | ||
1038 | ctx->flags |= SEEN_A | SEEN_X; | 1063 | ctx->flags |= SEEN_A | SEEN_X; |
1039 | emit_and(r_A, r_A, r_X, ctx); | 1064 | emit_and(r_A, r_A, r_X, ctx); |
1040 | break; | 1065 | break; |
1041 | case BPF_S_ALU_LSH_K: | 1066 | case BPF_ALU | BPF_LSH | BPF_K: |
1042 | /* A <<= K */ | 1067 | /* A <<= K */ |
1043 | ctx->flags |= SEEN_A; | 1068 | ctx->flags |= SEEN_A; |
1044 | emit_sll(r_A, r_A, k, ctx); | 1069 | emit_sll(r_A, r_A, k, ctx); |
1045 | break; | 1070 | break; |
1046 | case BPF_S_ALU_LSH_X: | 1071 | case BPF_ALU | BPF_LSH | BPF_X: |
1047 | /* A <<= X */ | 1072 | /* A <<= X */ |
1048 | ctx->flags |= SEEN_A | SEEN_X; | 1073 | ctx->flags |= SEEN_A | SEEN_X; |
1049 | update_on_xread(ctx); | ||
1050 | emit_sllv(r_A, r_A, r_X, ctx); | 1074 | emit_sllv(r_A, r_A, r_X, ctx); |
1051 | break; | 1075 | break; |
1052 | case BPF_S_ALU_RSH_K: | 1076 | case BPF_ALU | BPF_RSH | BPF_K: |
1053 | /* A >>= K */ | 1077 | /* A >>= K */ |
1054 | ctx->flags |= SEEN_A; | 1078 | ctx->flags |= SEEN_A; |
1055 | emit_srl(r_A, r_A, k, ctx); | 1079 | emit_srl(r_A, r_A, k, ctx); |
1056 | break; | 1080 | break; |
1057 | case BPF_S_ALU_RSH_X: | 1081 | case BPF_ALU | BPF_RSH | BPF_X: |
1058 | ctx->flags |= SEEN_A | SEEN_X; | 1082 | ctx->flags |= SEEN_A | SEEN_X; |
1059 | update_on_xread(ctx); | ||
1060 | emit_srlv(r_A, r_A, r_X, ctx); | 1083 | emit_srlv(r_A, r_A, r_X, ctx); |
1061 | break; | 1084 | break; |
1062 | case BPF_S_ALU_NEG: | 1085 | case BPF_ALU | BPF_NEG: |
1063 | /* A = -A */ | 1086 | /* A = -A */ |
1064 | ctx->flags |= SEEN_A; | 1087 | ctx->flags |= SEEN_A; |
1065 | emit_neg(r_A, ctx); | 1088 | emit_neg(r_A, ctx); |
1066 | break; | 1089 | break; |
1067 | case BPF_S_JMP_JA: | 1090 | case BPF_JMP | BPF_JA: |
1068 | /* pc += K */ | 1091 | /* pc += K */ |
1069 | emit_b(b_imm(i + k + 1, ctx), ctx); | 1092 | emit_b(b_imm(i + k + 1, ctx), ctx); |
1070 | emit_nop(ctx); | 1093 | emit_nop(ctx); |
1071 | break; | 1094 | break; |
1072 | case BPF_S_JMP_JEQ_K: | 1095 | case BPF_JMP | BPF_JEQ | BPF_K: |
1073 | /* pc += ( A == K ) ? pc->jt : pc->jf */ | 1096 | /* pc += ( A == K ) ? pc->jt : pc->jf */ |
1074 | condt = MIPS_COND_EQ | MIPS_COND_K; | 1097 | condt = MIPS_COND_EQ | MIPS_COND_K; |
1075 | goto jmp_cmp; | 1098 | goto jmp_cmp; |
1076 | case BPF_S_JMP_JEQ_X: | 1099 | case BPF_JMP | BPF_JEQ | BPF_X: |
1077 | ctx->flags |= SEEN_X; | 1100 | ctx->flags |= SEEN_X; |
1078 | /* pc += ( A == X ) ? pc->jt : pc->jf */ | 1101 | /* pc += ( A == X ) ? pc->jt : pc->jf */ |
1079 | condt = MIPS_COND_EQ | MIPS_COND_X; | 1102 | condt = MIPS_COND_EQ | MIPS_COND_X; |
1080 | goto jmp_cmp; | 1103 | goto jmp_cmp; |
1081 | case BPF_S_JMP_JGE_K: | 1104 | case BPF_JMP | BPF_JGE | BPF_K: |
1082 | /* pc += ( A >= K ) ? pc->jt : pc->jf */ | 1105 | /* pc += ( A >= K ) ? pc->jt : pc->jf */ |
1083 | condt = MIPS_COND_GE | MIPS_COND_K; | 1106 | condt = MIPS_COND_GE | MIPS_COND_K; |
1084 | goto jmp_cmp; | 1107 | goto jmp_cmp; |
1085 | case BPF_S_JMP_JGE_X: | 1108 | case BPF_JMP | BPF_JGE | BPF_X: |
1086 | ctx->flags |= SEEN_X; | 1109 | ctx->flags |= SEEN_X; |
1087 | /* pc += ( A >= X ) ? pc->jt : pc->jf */ | 1110 | /* pc += ( A >= X ) ? pc->jt : pc->jf */ |
1088 | condt = MIPS_COND_GE | MIPS_COND_X; | 1111 | condt = MIPS_COND_GE | MIPS_COND_X; |
1089 | goto jmp_cmp; | 1112 | goto jmp_cmp; |
1090 | case BPF_S_JMP_JGT_K: | 1113 | case BPF_JMP | BPF_JGT | BPF_K: |
1091 | /* pc += ( A > K ) ? pc->jt : pc->jf */ | 1114 | /* pc += ( A > K ) ? pc->jt : pc->jf */ |
1092 | condt = MIPS_COND_GT | MIPS_COND_K; | 1115 | condt = MIPS_COND_GT | MIPS_COND_K; |
1093 | goto jmp_cmp; | 1116 | goto jmp_cmp; |
1094 | case BPF_S_JMP_JGT_X: | 1117 | case BPF_JMP | BPF_JGT | BPF_X: |
1095 | ctx->flags |= SEEN_X; | 1118 | ctx->flags |= SEEN_X; |
1096 | /* pc += ( A > X ) ? pc->jt : pc->jf */ | 1119 | /* pc += ( A > X ) ? pc->jt : pc->jf */ |
1097 | condt = MIPS_COND_GT | MIPS_COND_X; | 1120 | condt = MIPS_COND_GT | MIPS_COND_X; |
@@ -1109,7 +1132,7 @@ jmp_cmp: | |||
1109 | } | 1132 | } |
1110 | /* A < (K|X) ? r_scrach = 1 */ | 1133 | /* A < (K|X) ? r_scrach = 1 */ |
1111 | b_off = b_imm(i + inst->jf + 1, ctx); | 1134 | b_off = b_imm(i + inst->jf + 1, ctx); |
1112 | emit_bcond(MIPS_COND_GT, r_s0, r_zero, b_off, | 1135 | emit_bcond(MIPS_COND_NE, r_s0, r_zero, b_off, |
1113 | ctx); | 1136 | ctx); |
1114 | emit_nop(ctx); | 1137 | emit_nop(ctx); |
1115 | /* A > (K|X) ? scratch = 0 */ | 1138 | /* A > (K|X) ? scratch = 0 */ |
@@ -1167,7 +1190,7 @@ jmp_cmp: | |||
1167 | } | 1190 | } |
1168 | } | 1191 | } |
1169 | break; | 1192 | break; |
1170 | case BPF_S_JMP_JSET_K: | 1193 | case BPF_JMP | BPF_JSET | BPF_K: |
1171 | ctx->flags |= SEEN_S0 | SEEN_S1 | SEEN_A; | 1194 | ctx->flags |= SEEN_S0 | SEEN_S1 | SEEN_A; |
1172 | /* pc += (A & K) ? pc -> jt : pc -> jf */ | 1195 | /* pc += (A & K) ? pc -> jt : pc -> jf */ |
1173 | emit_load_imm(r_s1, k, ctx); | 1196 | emit_load_imm(r_s1, k, ctx); |
@@ -1181,7 +1204,7 @@ jmp_cmp: | |||
1181 | emit_b(b_off, ctx); | 1204 | emit_b(b_off, ctx); |
1182 | emit_nop(ctx); | 1205 | emit_nop(ctx); |
1183 | break; | 1206 | break; |
1184 | case BPF_S_JMP_JSET_X: | 1207 | case BPF_JMP | BPF_JSET | BPF_X: |
1185 | ctx->flags |= SEEN_S0 | SEEN_X | SEEN_A; | 1208 | ctx->flags |= SEEN_S0 | SEEN_X | SEEN_A; |
1186 | /* pc += (A & X) ? pc -> jt : pc -> jf */ | 1209 | /* pc += (A & X) ? pc -> jt : pc -> jf */ |
1187 | emit_and(r_s0, r_A, r_X, ctx); | 1210 | emit_and(r_s0, r_A, r_X, ctx); |
@@ -1194,7 +1217,7 @@ jmp_cmp: | |||
1194 | emit_b(b_off, ctx); | 1217 | emit_b(b_off, ctx); |
1195 | emit_nop(ctx); | 1218 | emit_nop(ctx); |
1196 | break; | 1219 | break; |
1197 | case BPF_S_RET_A: | 1220 | case BPF_RET | BPF_A: |
1198 | ctx->flags |= SEEN_A; | 1221 | ctx->flags |= SEEN_A; |
1199 | if (i != prog->len - 1) | 1222 | if (i != prog->len - 1) |
1200 | /* | 1223 | /* |
@@ -1204,7 +1227,7 @@ jmp_cmp: | |||
1204 | emit_b(b_imm(prog->len, ctx), ctx); | 1227 | emit_b(b_imm(prog->len, ctx), ctx); |
1205 | emit_reg_move(r_ret, r_A, ctx); /* delay slot */ | 1228 | emit_reg_move(r_ret, r_A, ctx); /* delay slot */ |
1206 | break; | 1229 | break; |
1207 | case BPF_S_RET_K: | 1230 | case BPF_RET | BPF_K: |
1208 | /* | 1231 | /* |
1209 | * It can emit two instructions so it does not fit on | 1232 | * It can emit two instructions so it does not fit on |
1210 | * the delay slot. | 1233 | * the delay slot. |
@@ -1219,19 +1242,18 @@ jmp_cmp: | |||
1219 | emit_nop(ctx); | 1242 | emit_nop(ctx); |
1220 | } | 1243 | } |
1221 | break; | 1244 | break; |
1222 | case BPF_S_MISC_TAX: | 1245 | case BPF_MISC | BPF_TAX: |
1223 | /* X = A */ | 1246 | /* X = A */ |
1224 | ctx->flags |= SEEN_X | SEEN_A; | 1247 | ctx->flags |= SEEN_X | SEEN_A; |
1225 | emit_jit_reg_move(r_X, r_A, ctx); | 1248 | emit_jit_reg_move(r_X, r_A, ctx); |
1226 | break; | 1249 | break; |
1227 | case BPF_S_MISC_TXA: | 1250 | case BPF_MISC | BPF_TXA: |
1228 | /* A = X */ | 1251 | /* A = X */ |
1229 | ctx->flags |= SEEN_A | SEEN_X; | 1252 | ctx->flags |= SEEN_A | SEEN_X; |
1230 | update_on_xread(ctx); | ||
1231 | emit_jit_reg_move(r_A, r_X, ctx); | 1253 | emit_jit_reg_move(r_A, r_X, ctx); |
1232 | break; | 1254 | break; |
1233 | /* AUX */ | 1255 | /* AUX */ |
1234 | case BPF_S_ANC_PROTOCOL: | 1256 | case BPF_ANC | SKF_AD_PROTOCOL: |
1235 | /* A = ntohs(skb->protocol */ | 1257 | /* A = ntohs(skb->protocol */ |
1236 | ctx->flags |= SEEN_SKB | SEEN_OFF | SEEN_A; | 1258 | ctx->flags |= SEEN_SKB | SEEN_OFF | SEEN_A; |
1237 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, | 1259 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, |
@@ -1256,7 +1278,7 @@ jmp_cmp: | |||
1256 | } | 1278 | } |
1257 | #endif | 1279 | #endif |
1258 | break; | 1280 | break; |
1259 | case BPF_S_ANC_CPU: | 1281 | case BPF_ANC | SKF_AD_CPU: |
1260 | ctx->flags |= SEEN_A | SEEN_OFF; | 1282 | ctx->flags |= SEEN_A | SEEN_OFF; |
1261 | /* A = current_thread_info()->cpu */ | 1283 | /* A = current_thread_info()->cpu */ |
1262 | BUILD_BUG_ON(FIELD_SIZEOF(struct thread_info, | 1284 | BUILD_BUG_ON(FIELD_SIZEOF(struct thread_info, |
@@ -1265,11 +1287,12 @@ jmp_cmp: | |||
1265 | /* $28/gp points to the thread_info struct */ | 1287 | /* $28/gp points to the thread_info struct */ |
1266 | emit_load(r_A, 28, off, ctx); | 1288 | emit_load(r_A, 28, off, ctx); |
1267 | break; | 1289 | break; |
1268 | case BPF_S_ANC_IFINDEX: | 1290 | case BPF_ANC | SKF_AD_IFINDEX: |
1269 | /* A = skb->dev->ifindex */ | 1291 | /* A = skb->dev->ifindex */ |
1270 | ctx->flags |= SEEN_SKB | SEEN_A | SEEN_S0; | 1292 | ctx->flags |= SEEN_SKB | SEEN_A | SEEN_S0; |
1271 | off = offsetof(struct sk_buff, dev); | 1293 | off = offsetof(struct sk_buff, dev); |
1272 | emit_load(r_s0, r_skb, off, ctx); | 1294 | /* Load *dev pointer */ |
1295 | emit_load_ptr(r_s0, r_skb, off, ctx); | ||
1273 | /* error (0) in the delay slot */ | 1296 | /* error (0) in the delay slot */ |
1274 | emit_bcond(MIPS_COND_EQ, r_s0, r_zero, | 1297 | emit_bcond(MIPS_COND_EQ, r_s0, r_zero, |
1275 | b_imm(prog->len, ctx), ctx); | 1298 | b_imm(prog->len, ctx), ctx); |
@@ -1279,31 +1302,36 @@ jmp_cmp: | |||
1279 | off = offsetof(struct net_device, ifindex); | 1302 | off = offsetof(struct net_device, ifindex); |
1280 | emit_load(r_A, r_s0, off, ctx); | 1303 | emit_load(r_A, r_s0, off, ctx); |
1281 | break; | 1304 | break; |
1282 | case BPF_S_ANC_MARK: | 1305 | case BPF_ANC | SKF_AD_MARK: |
1283 | ctx->flags |= SEEN_SKB | SEEN_A; | 1306 | ctx->flags |= SEEN_SKB | SEEN_A; |
1284 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, mark) != 4); | 1307 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, mark) != 4); |
1285 | off = offsetof(struct sk_buff, mark); | 1308 | off = offsetof(struct sk_buff, mark); |
1286 | emit_load(r_A, r_skb, off, ctx); | 1309 | emit_load(r_A, r_skb, off, ctx); |
1287 | break; | 1310 | break; |
1288 | case BPF_S_ANC_RXHASH: | 1311 | case BPF_ANC | SKF_AD_RXHASH: |
1289 | ctx->flags |= SEEN_SKB | SEEN_A; | 1312 | ctx->flags |= SEEN_SKB | SEEN_A; |
1290 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, hash) != 4); | 1313 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, hash) != 4); |
1291 | off = offsetof(struct sk_buff, hash); | 1314 | off = offsetof(struct sk_buff, hash); |
1292 | emit_load(r_A, r_skb, off, ctx); | 1315 | emit_load(r_A, r_skb, off, ctx); |
1293 | break; | 1316 | break; |
1294 | case BPF_S_ANC_VLAN_TAG: | 1317 | case BPF_ANC | SKF_AD_VLAN_TAG: |
1295 | case BPF_S_ANC_VLAN_TAG_PRESENT: | 1318 | case BPF_ANC | SKF_AD_VLAN_TAG_PRESENT: |
1296 | ctx->flags |= SEEN_SKB | SEEN_S0 | SEEN_A; | 1319 | ctx->flags |= SEEN_SKB | SEEN_S0 | SEEN_A; |
1297 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, | 1320 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, |
1298 | vlan_tci) != 2); | 1321 | vlan_tci) != 2); |
1299 | off = offsetof(struct sk_buff, vlan_tci); | 1322 | off = offsetof(struct sk_buff, vlan_tci); |
1300 | emit_half_load(r_s0, r_skb, off, ctx); | 1323 | emit_half_load(r_s0, r_skb, off, ctx); |
1301 | if (inst->code == BPF_S_ANC_VLAN_TAG) | 1324 | if (code == (BPF_ANC | SKF_AD_VLAN_TAG)) { |
1302 | emit_and(r_A, r_s0, VLAN_VID_MASK, ctx); | 1325 | emit_andi(r_A, r_s0, (u16)~VLAN_TAG_PRESENT, ctx); |
1303 | else | 1326 | } else { |
1304 | emit_and(r_A, r_s0, VLAN_TAG_PRESENT, ctx); | 1327 | emit_andi(r_A, r_s0, VLAN_TAG_PRESENT, ctx); |
1328 | /* return 1 if present */ | ||
1329 | emit_sltu(r_A, r_zero, r_A, ctx); | ||
1330 | } | ||
1305 | break; | 1331 | break; |
1306 | case BPF_S_ANC_PKTTYPE: | 1332 | case BPF_ANC | SKF_AD_PKTTYPE: |
1333 | ctx->flags |= SEEN_SKB; | ||
1334 | |||
1307 | off = pkt_type_offset(); | 1335 | off = pkt_type_offset(); |
1308 | 1336 | ||
1309 | if (off < 0) | 1337 | if (off < 0) |
@@ -1311,8 +1339,12 @@ jmp_cmp: | |||
1311 | emit_load_byte(r_tmp, r_skb, off, ctx); | 1339 | emit_load_byte(r_tmp, r_skb, off, ctx); |
1312 | /* Keep only the last 3 bits */ | 1340 | /* Keep only the last 3 bits */ |
1313 | emit_andi(r_A, r_tmp, PKT_TYPE_MAX, ctx); | 1341 | emit_andi(r_A, r_tmp, PKT_TYPE_MAX, ctx); |
1342 | #ifdef __BIG_ENDIAN_BITFIELD | ||
1343 | /* Get the actual packet type to the lower 3 bits */ | ||
1344 | emit_srl(r_A, r_A, 5, ctx); | ||
1345 | #endif | ||
1314 | break; | 1346 | break; |
1315 | case BPF_S_ANC_QUEUE: | 1347 | case BPF_ANC | SKF_AD_QUEUE: |
1316 | ctx->flags |= SEEN_SKB | SEEN_A; | 1348 | ctx->flags |= SEEN_SKB | SEEN_A; |
1317 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, | 1349 | BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, |
1318 | queue_mapping) != 2); | 1350 | queue_mapping) != 2); |
@@ -1322,8 +1354,8 @@ jmp_cmp: | |||
1322 | emit_half_load(r_A, r_skb, off, ctx); | 1354 | emit_half_load(r_A, r_skb, off, ctx); |
1323 | break; | 1355 | break; |
1324 | default: | 1356 | default: |
1325 | pr_warn("%s: Unhandled opcode: 0x%02x\n", __FILE__, | 1357 | pr_debug("%s: Unhandled opcode: 0x%02x\n", __FILE__, |
1326 | inst->code); | 1358 | inst->code); |
1327 | return -1; | 1359 | return -1; |
1328 | } | 1360 | } |
1329 | } | 1361 | } |