2 "xadd/w check unaligned stack",
4 BPF_MOV64_IMM(BPF_REG_0
, 1),
5 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_0
, -8),
6 BPF_STX_XADD(BPF_W
, BPF_REG_10
, BPF_REG_0
, -7),
7 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_10
, -8),
11 .errstr
= "misaligned stack access off",
12 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
,
15 "xadd/w check unaligned map",
17 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
18 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
19 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
20 BPF_LD_MAP_FD(BPF_REG_1
, 0),
21 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
22 BPF_JMP_IMM(BPF_JNE
, BPF_REG_0
, 0, 1),
24 BPF_MOV64_IMM(BPF_REG_1
, 1),
25 BPF_STX_XADD(BPF_W
, BPF_REG_0
, BPF_REG_1
, 3),
26 BPF_LDX_MEM(BPF_W
, BPF_REG_0
, BPF_REG_0
, 3),
29 .fixup_map_hash_8b
= { 3 },
31 .errstr
= "misaligned value access off",
32 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
,
35 "xadd/w check unaligned pkt",
37 BPF_LDX_MEM(BPF_W
, BPF_REG_2
, BPF_REG_1
, offsetof(struct xdp_md
, data
)),
38 BPF_LDX_MEM(BPF_W
, BPF_REG_3
, BPF_REG_1
,
39 offsetof(struct xdp_md
, data_end
)),
40 BPF_MOV64_REG(BPF_REG_1
, BPF_REG_2
),
41 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_1
, 8),
42 BPF_JMP_REG(BPF_JLT
, BPF_REG_1
, BPF_REG_3
, 2),
43 BPF_MOV64_IMM(BPF_REG_0
, 99),
44 BPF_JMP_IMM(BPF_JA
, 0, 0, 6),
45 BPF_MOV64_IMM(BPF_REG_0
, 1),
46 BPF_ST_MEM(BPF_W
, BPF_REG_2
, 0, 0),
47 BPF_ST_MEM(BPF_W
, BPF_REG_2
, 3, 0),
48 BPF_STX_XADD(BPF_W
, BPF_REG_2
, BPF_REG_0
, 1),
49 BPF_STX_XADD(BPF_W
, BPF_REG_2
, BPF_REG_0
, 2),
50 BPF_LDX_MEM(BPF_W
, BPF_REG_0
, BPF_REG_2
, 1),
54 .errstr
= "BPF_XADD stores into R2 pkt is not allowed",
55 .prog_type
= BPF_PROG_TYPE_XDP
,
56 .flags
= F_NEEDS_EFFICIENT_UNALIGNED_ACCESS
,
59 "xadd/w check whether src/dst got mangled, 1",
61 BPF_MOV64_IMM(BPF_REG_0
, 1),
62 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_0
),
63 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_10
),
64 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_0
, -8),
65 BPF_STX_XADD(BPF_DW
, BPF_REG_10
, BPF_REG_0
, -8),
66 BPF_STX_XADD(BPF_DW
, BPF_REG_10
, BPF_REG_0
, -8),
67 BPF_JMP_REG(BPF_JNE
, BPF_REG_6
, BPF_REG_0
, 3),
68 BPF_JMP_REG(BPF_JNE
, BPF_REG_7
, BPF_REG_10
, 2),
69 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_10
, -8),
71 BPF_MOV64_IMM(BPF_REG_0
, 42),
75 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
,
79 "xadd/w check whether src/dst got mangled, 2",
81 BPF_MOV64_IMM(BPF_REG_0
, 1),
82 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_0
),
83 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_10
),
84 BPF_STX_MEM(BPF_W
, BPF_REG_10
, BPF_REG_0
, -8),
85 BPF_STX_XADD(BPF_W
, BPF_REG_10
, BPF_REG_0
, -8),
86 BPF_STX_XADD(BPF_W
, BPF_REG_10
, BPF_REG_0
, -8),
87 BPF_JMP_REG(BPF_JNE
, BPF_REG_6
, BPF_REG_0
, 3),
88 BPF_JMP_REG(BPF_JNE
, BPF_REG_7
, BPF_REG_10
, 2),
89 BPF_LDX_MEM(BPF_W
, BPF_REG_0
, BPF_REG_10
, -8),
91 BPF_MOV64_IMM(BPF_REG_0
, 42),
95 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
,