This patch adds unit tests for new JMP32 instructions. Signed-off-by: Jiong Wang <jiong.w...@netronome.com> --- include/linux/filter.h | 19 +++ lib/test_bpf.c | 321 ++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 335 insertions(+), 5 deletions(-)
diff --git a/include/linux/filter.h b/include/linux/filter.h index 537e9e7..94e1000 100644 --- a/include/linux/filter.h +++ b/include/linux/filter.h @@ -271,6 +271,15 @@ struct sock_reuseport; .off = OFF, \ .imm = 0 }) +/* Likewise, but is 32-bit variant. */ +#define BPF_JMP32_REG(OP, DST, SRC, OFF) \ + ((struct bpf_insn) { \ + .code = BPF_JMP | BPF_OP(OP) | BPF_X, \ + .dst_reg = DST, \ + .src_reg = SRC, \ + .off = OFF, \ + .imm = 1 }) + /* Conditional jumps against immediates, if (dst_reg 'op' imm32) goto pc + off16 */ #define BPF_JMP_IMM(OP, DST, IMM, OFF) \ @@ -281,6 +290,16 @@ struct sock_reuseport; .off = OFF, \ .imm = IMM }) +/* Likewise, but is 32-bit variant. */ + +#define BPF_JMP32_IMM(OP, DST, IMM, OFF) \ + ((struct bpf_insn) { \ + .code = BPF_JMP | BPF_OP(OP) | BPF_K, \ + .dst_reg = DST, \ + .src_reg = 1, \ + .off = OFF, \ + .imm = IMM }) + /* Unconditional jumps, goto pc + off16 */ #define BPF_JMP_A(OFF) \ diff --git a/lib/test_bpf.c b/lib/test_bpf.c index f3e5707..c17f08b 100644 --- a/lib/test_bpf.c +++ b/lib/test_bpf.c @@ -4447,6 +4447,21 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JSLT | BPF_K (32-bit variant) */ + { + "JMP32_JSLT_K: Signed jump: if (-2 < -1) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x1fffffffeLL), + BPF_JMP32_IMM(BPF_JSLT, R1, -1, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JSLT_K: Signed jump: if (-1 < -1) return 0", .u.insns_int = { @@ -4476,6 +4491,21 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JSGT | BPF_K (32-bit variant) */ + { + "JMP32_JSGT_K: Signed jump: if (-1 > -2) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0xfffffff7ffffffffLL), + BPF_JMP32_IMM(BPF_JSGT, R1, -2, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JSGT_K: Signed jump: if (-1 > -1) return 0", .u.insns_int = { @@ -4505,6 +4535,21 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JSLE | BPF_K (32-bit variant) */ + { + "JMP32_JSLE_K: Signed jump: if (-2 <= -1) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x7ffffffffffffffeLL), + BPF_JMP32_IMM(BPF_JSLE, R1, -1, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1", .u.insns_int = { @@ -4572,6 +4617,21 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JSGE | BPF_K (32-bit variant) */ + { + "JMP32_JSGE_K: Signed jump: if (-1 >= -2) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0xfffffff7ffffffffLL), + BPF_JMP32_IMM(BPF_JSGE, R1, -2, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1", .u.insns_int = { @@ -4639,6 +4699,21 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JGT | BPF_K (32-bit variant) */ + { + "JMP32_JGT_K: if (2 > 3) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x100000002), + BPF_JMP32_IMM(BPF_JGT, R1, 3, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 0 } }, + }, { "JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1", .u.insns_int = { @@ -4668,8 +4743,23 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JLT | BPF_K (32-bit variant) */ { - "JMP_JGT_K: Unsigned jump: if (1 < -1) return 1", + "JMP32_JLT_K: if (2 < 3) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x100000002), + BPF_JMP32_IMM(BPF_JLT, R1, 3, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, + { + "JMP_JLT_K: Unsigned jump: if (1 < -1) return 1", .u.insns_int = { BPF_ALU32_IMM(BPF_MOV, R0, 0), BPF_LD_IMM64(R1, 1), @@ -4697,6 +4787,21 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JGE | BPF_K (32-bit variant) */ + { + "JMP32_JGE_K: if (2 >= 3) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x100000002), + BPF_JMP32_IMM(BPF_JGE, R1, 3, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 0 } }, + }, /* BPF_JMP | BPF_JLE | BPF_K */ { "JMP_JLE_K: if (2 <= 3) return 1", @@ -4712,6 +4817,21 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JLE | BPF_K (32-bit variant) */ + { + "JMP32_JLE_K: if (2 <= 3) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x100000002), + BPF_JMP32_IMM(BPF_JLE, R1, 3, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, /* BPF_JMP | BPF_JGT | BPF_K jump backwards */ { "JMP_JGT_K: if (3 > 2) return 1 (jump backwards)", @@ -4787,6 +4907,21 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JNE | BPF_K (32-bit variant) */ + { + "JMP32_JNE_K: if (3 != 2) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x100000003ULL), + BPF_JMP32_IMM(BPF_JNE, R1, 2, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, /* BPF_JMP | BPF_JEQ | BPF_K */ { "JMP_JEQ_K: if (3 == 3) return 1", @@ -4802,6 +4937,21 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JEQ | BPF_K (32-bit variant) */ + { + "JMP32_JEQ_K: if (3 == 3) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x100000003ULL), + BPF_JMP32_IMM(BPF_JEQ, R1, 3, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, /* BPF_JMP | BPF_JSET | BPF_K */ { "JMP_JSET_K: if (0x3 & 0x2) return 1", @@ -4847,6 +4997,22 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JSGT | BPF_X (32-bit variant) */ + { + "JMP32_JSGT_X: Signed jump: if (-1 > -2) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x7fffffffffffffffULL), + BPF_LD_IMM64(R2, -2), + BPF_JMP32_REG(BPF_JSGT, R1, R2, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JSGT_X: Signed jump: if (-1 > -1) return 0", .u.insns_int = { @@ -4878,13 +5044,30 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JSLT | BPF_X */ { - "JMP_JSLT_X: Signed jump: if (-1 < -1) return 0", + "JMP_JSLT_X: Signed jump: if (-2 < -1) return 1", .u.insns_int = { - BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_ALU32_IMM(BPF_MOV, R0, 0), BPF_LD_IMM64(R1, -1), - BPF_LD_IMM64(R2, -1), - BPF_JMP_REG(BPF_JSLT, R1, R2, 1), + BPF_LD_IMM64(R2, -2), + BPF_JMP_REG(BPF_JSLT, R2, R1, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, + /* BPF_JMP | BPF_JSLT | BPF_X (32-bit variant) */ + { + "JMP32_JSLT_X: Signed jump: if (-1 < -1) return 0", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_LD_IMM64(R1, 0x1ffffffffULL), + BPF_LD_IMM64(R2, 0x2ffffffffULL), + BPF_JMP32_REG(BPF_JSLT, R1, R2, 1), BPF_EXIT_INSN(), BPF_ALU32_IMM(BPF_MOV, R0, 0), BPF_EXIT_INSN(), @@ -4909,6 +5092,22 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JSGE | BPF_X (32-bit variant) */ + { + "JMP32_JSGE_X: Signed jump: if (-1 >= -2) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0xfffffff7ffffffffULL), + BPF_LD_IMM64(R2, -2), + BPF_JMP32_REG(BPF_JSGE, R1, R2, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1", .u.insns_int = { @@ -4940,6 +5139,22 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JSLE | BPF_X (32-bit variant) */ + { + "JMP32_JSLE_X: Signed jump: if (-2 <= -1) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x1ffffffffULL), + BPF_LD_IMM64(R2, -2), + BPF_JMP32_REG(BPF_JSLE, R2, R1, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1", .u.insns_int = { @@ -4971,6 +5186,22 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JGT | BPF_X (32-bit variant) */ + { + "JMP32_JGT_X: if (3 > 2) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x7fffffff3ULL), + BPF_LD_IMM64(R2, 0xffffffff2ULL), + BPF_JMP32_REG(BPF_JGT, R1, R2, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1", .u.insns_int = { @@ -5002,6 +5233,22 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JLT | BPF_X (32-bit variant) */ + { + "JMP32_JLT_X: if (2 < 3) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x100000003ULL), + BPF_LD_IMM64(R2, 0x200000002ULL), + BPF_JMP32_REG(BPF_JLT, R2, R1, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JLT_X: Unsigned jump: if (1 < -1) return 1", .u.insns_int = { @@ -5033,6 +5280,22 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JGE | BPF_X (32-bit variant) */ + { + "JMP32_JGE_X: if (3 >= 2) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x123456783ULL), + BPF_LD_IMM64(R2, 0xf23456782ULL), + BPF_JMP32_REG(BPF_JGE, R1, R2, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JGE_X: if (3 >= 3) return 1", .u.insns_int = { @@ -5064,6 +5327,22 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JLE | BPF_X (32-bit variant) */ + { + "JMP32_JLE_X: if (2 <= 3) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x7fffffff3ULL), + BPF_LD_IMM64(R2, 0x8fffffff2ULL), + BPF_JMP32_REG(BPF_JLE, R2, R1, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, { "JMP_JLE_X: if (3 <= 3) return 1", .u.insns_int = { @@ -5184,6 +5463,22 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JNE | BPF_X (32-bit variant) */ + { + "JMP32_JNE_X: if (3 != 2) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x10000003ULL), + BPF_LD_IMM64(R2, 0x10000002ULL), + BPF_JMP32_REG(BPF_JNE, R1, R2, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, /* BPF_JMP | BPF_JEQ | BPF_X */ { "JMP_JEQ_X: if (3 == 3) return 1", @@ -5200,6 +5495,22 @@ static struct bpf_test tests[] = { { }, { { 0, 1 } }, }, + /* BPF_JMP | BPF_JEQ | BPF_X (32-bit variant) */ + { + "JMP32_JEQ_X: if (3 == 3) return 1", + .u.insns_int = { + BPF_ALU32_IMM(BPF_MOV, R0, 0), + BPF_LD_IMM64(R1, 0x100000003ULL), + BPF_LD_IMM64(R2, 0x200000003ULL), + BPF_JMP32_REG(BPF_JEQ, R1, R2, 1), + BPF_EXIT_INSN(), + BPF_ALU32_IMM(BPF_MOV, R0, 1), + BPF_EXIT_INSN(), + }, + INTERNAL, + { }, + { { 0, 1 } }, + }, /* BPF_JMP | BPF_JSET | BPF_X */ { "JMP_JSET_X: if (0x3 & 0x2) return 1", -- 2.7.4