Commit 791caeb0 authored by David Daney's avatar David Daney Committed by David S. Miller

test_bpf: Add a couple of tests for BPF_JSGE.

Some JITs can optimize comparisons with zero.  Add a couple of
BPF_JSGE tests against immediate zero.
Signed-off-by: default avatarDavid Daney <david.daney@cavium.com>
Acked-by: default avatarDaniel Borkmann <daniel@iogearbox.net>
Signed-off-by: default avatarDavid S. Miller <davem@davemloft.net>
parent ae08ea97
...@@ -4504,6 +4504,44 @@ static struct bpf_test tests[] = { ...@@ -4504,6 +4504,44 @@ static struct bpf_test tests[] = {
{ }, { },
{ { 0, 1 } }, { { 0, 1 } },
}, },
{
"JMP_JSGE_K: Signed jump: value walk 1",
.u.insns_int = {
BPF_ALU32_IMM(BPF_MOV, R0, 0),
BPF_LD_IMM64(R1, -3),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
BPF_ALU64_IMM(BPF_ADD, R1, 1),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
BPF_ALU64_IMM(BPF_ADD, R1, 1),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
BPF_ALU64_IMM(BPF_ADD, R1, 1),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
BPF_EXIT_INSN(), /* bad exit */
BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
BPF_EXIT_INSN(),
},
INTERNAL,
{ },
{ { 0, 1 } },
},
{
"JMP_JSGE_K: Signed jump: value walk 2",
.u.insns_int = {
BPF_ALU32_IMM(BPF_MOV, R0, 0),
BPF_LD_IMM64(R1, -3),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
BPF_ALU64_IMM(BPF_ADD, R1, 2),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
BPF_ALU64_IMM(BPF_ADD, R1, 2),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
BPF_EXIT_INSN(), /* bad exit */
BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
BPF_EXIT_INSN(),
},
INTERNAL,
{ },
{ { 0, 1 } },
},
/* BPF_JMP | BPF_JGT | BPF_K */ /* BPF_JMP | BPF_JGT | BPF_K */
{ {
"JMP_JGT_K: if (3 > 2) return 1", "JMP_JGT_K: if (3 > 2) return 1",
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment