treewide: remove redundant IS_ERR() before error code check
[linux/fpc-iii.git] / tools / testing / selftests / bpf / verifier / xadd.c
blobc5de2e62cc8bbc919e913da12b8de428ebfcd46a
2 "xadd/w check unaligned stack",
3 .insns = {
4 BPF_MOV64_IMM(BPF_REG_0, 1),
5 BPF_STX_MEM(BPF_DW, BPF_REG_10, BPF_REG_0, -8),
6 BPF_STX_XADD(BPF_W, BPF_REG_10, BPF_REG_0, -7),
7 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_10, -8),
8 BPF_EXIT_INSN(),
9 },
10 .result = REJECT,
11 .errstr = "misaligned stack access off",
12 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
15 "xadd/w check unaligned map",
16 .insns = {
17 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
18 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
19 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
20 BPF_LD_MAP_FD(BPF_REG_1, 0),
21 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
22 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
23 BPF_EXIT_INSN(),
24 BPF_MOV64_IMM(BPF_REG_1, 1),
25 BPF_STX_XADD(BPF_W, BPF_REG_0, BPF_REG_1, 3),
26 BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_0, 3),
27 BPF_EXIT_INSN(),
29 .fixup_map_hash_8b = { 3 },
30 .result = REJECT,
31 .errstr = "misaligned value access off",
32 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
35 "xadd/w check unaligned pkt",
36 .insns = {
37 BPF_LDX_MEM(BPF_W, BPF_REG_2, BPF_REG_1, offsetof(struct xdp_md, data)),
38 BPF_LDX_MEM(BPF_W, BPF_REG_3, BPF_REG_1,
39 offsetof(struct xdp_md, data_end)),
40 BPF_MOV64_REG(BPF_REG_1, BPF_REG_2),
41 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 8),
42 BPF_JMP_REG(BPF_JLT, BPF_REG_1, BPF_REG_3, 2),
43 BPF_MOV64_IMM(BPF_REG_0, 99),
44 BPF_JMP_IMM(BPF_JA, 0, 0, 6),
45 BPF_MOV64_IMM(BPF_REG_0, 1),
46 BPF_ST_MEM(BPF_W, BPF_REG_2, 0, 0),
47 BPF_ST_MEM(BPF_W, BPF_REG_2, 3, 0),
48 BPF_STX_XADD(BPF_W, BPF_REG_2, BPF_REG_0, 1),
49 BPF_STX_XADD(BPF_W, BPF_REG_2, BPF_REG_0, 2),
50 BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_2, 1),
51 BPF_EXIT_INSN(),
53 .result = REJECT,
54 .errstr = "BPF_XADD stores into R2 pkt is not allowed",
55 .prog_type = BPF_PROG_TYPE_XDP,
56 .flags = F_NEEDS_EFFICIENT_UNALIGNED_ACCESS,
59 "xadd/w check whether src/dst got mangled, 1",
60 .insns = {
61 BPF_MOV64_IMM(BPF_REG_0, 1),
62 BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
63 BPF_MOV64_REG(BPF_REG_7, BPF_REG_10),
64 BPF_STX_MEM(BPF_DW, BPF_REG_10, BPF_REG_0, -8),
65 BPF_STX_XADD(BPF_DW, BPF_REG_10, BPF_REG_0, -8),
66 BPF_STX_XADD(BPF_DW, BPF_REG_10, BPF_REG_0, -8),
67 BPF_JMP_REG(BPF_JNE, BPF_REG_6, BPF_REG_0, 3),
68 BPF_JMP_REG(BPF_JNE, BPF_REG_7, BPF_REG_10, 2),
69 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_10, -8),
70 BPF_EXIT_INSN(),
71 BPF_MOV64_IMM(BPF_REG_0, 42),
72 BPF_EXIT_INSN(),
74 .result = ACCEPT,
75 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
76 .retval = 3,
79 "xadd/w check whether src/dst got mangled, 2",
80 .insns = {
81 BPF_MOV64_IMM(BPF_REG_0, 1),
82 BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
83 BPF_MOV64_REG(BPF_REG_7, BPF_REG_10),
84 BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -8),
85 BPF_STX_XADD(BPF_W, BPF_REG_10, BPF_REG_0, -8),
86 BPF_STX_XADD(BPF_W, BPF_REG_10, BPF_REG_0, -8),
87 BPF_JMP_REG(BPF_JNE, BPF_REG_6, BPF_REG_0, 3),
88 BPF_JMP_REG(BPF_JNE, BPF_REG_7, BPF_REG_10, 2),
89 BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -8),
90 BPF_EXIT_INSN(),
91 BPF_MOV64_IMM(BPF_REG_0, 42),
92 BPF_EXIT_INSN(),
94 .result = ACCEPT,
95 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
96 .retval = 3,