2 "multiple registers share map_lookup_elem result",
4 BPF_MOV64_IMM(BPF_REG_1
, 10),
5 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_1
, -8),
6 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
7 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
8 BPF_LD_MAP_FD(BPF_REG_1
, 0),
9 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
10 BPF_MOV64_REG(BPF_REG_4
, BPF_REG_0
),
11 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 1),
12 BPF_ST_MEM(BPF_DW
, BPF_REG_4
, 0, 0),
15 .fixup_map_hash_8b
= { 4 },
17 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
20 "alu ops on ptr_to_map_value_or_null, 1",
22 BPF_MOV64_IMM(BPF_REG_1
, 10),
23 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_1
, -8),
24 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
25 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
26 BPF_LD_MAP_FD(BPF_REG_1
, 0),
27 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
28 BPF_MOV64_REG(BPF_REG_4
, BPF_REG_0
),
29 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_4
, -2),
30 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_4
, 2),
31 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 1),
32 BPF_ST_MEM(BPF_DW
, BPF_REG_4
, 0, 0),
35 .fixup_map_hash_8b
= { 4 },
36 .errstr
= "R4 pointer arithmetic on map_value_or_null",
38 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
41 "alu ops on ptr_to_map_value_or_null, 2",
43 BPF_MOV64_IMM(BPF_REG_1
, 10),
44 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_1
, -8),
45 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
46 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
47 BPF_LD_MAP_FD(BPF_REG_1
, 0),
48 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
49 BPF_MOV64_REG(BPF_REG_4
, BPF_REG_0
),
50 BPF_ALU64_IMM(BPF_AND
, BPF_REG_4
, -1),
51 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 1),
52 BPF_ST_MEM(BPF_DW
, BPF_REG_4
, 0, 0),
55 .fixup_map_hash_8b
= { 4 },
56 .errstr
= "R4 pointer arithmetic on map_value_or_null",
58 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
61 "alu ops on ptr_to_map_value_or_null, 3",
63 BPF_MOV64_IMM(BPF_REG_1
, 10),
64 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_1
, -8),
65 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
66 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
67 BPF_LD_MAP_FD(BPF_REG_1
, 0),
68 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
69 BPF_MOV64_REG(BPF_REG_4
, BPF_REG_0
),
70 BPF_ALU64_IMM(BPF_LSH
, BPF_REG_4
, 1),
71 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 1),
72 BPF_ST_MEM(BPF_DW
, BPF_REG_4
, 0, 0),
75 .fixup_map_hash_8b
= { 4 },
76 .errstr
= "R4 pointer arithmetic on map_value_or_null",
78 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
81 "invalid memory access with multiple map_lookup_elem calls",
83 BPF_MOV64_IMM(BPF_REG_1
, 10),
84 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_1
, -8),
85 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
86 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
87 BPF_LD_MAP_FD(BPF_REG_1
, 0),
88 BPF_MOV64_REG(BPF_REG_8
, BPF_REG_1
),
89 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_2
),
90 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
91 BPF_MOV64_REG(BPF_REG_4
, BPF_REG_0
),
92 BPF_MOV64_REG(BPF_REG_1
, BPF_REG_8
),
93 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_7
),
94 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
95 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 1),
96 BPF_ST_MEM(BPF_DW
, BPF_REG_4
, 0, 0),
99 .fixup_map_hash_8b
= { 4 },
101 .errstr
= "R4 !read_ok",
102 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
105 "valid indirect map_lookup_elem access with 2nd lookup in branch",
107 BPF_MOV64_IMM(BPF_REG_1
, 10),
108 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_1
, -8),
109 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
110 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
111 BPF_LD_MAP_FD(BPF_REG_1
, 0),
112 BPF_MOV64_REG(BPF_REG_8
, BPF_REG_1
),
113 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_2
),
114 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
115 BPF_MOV64_IMM(BPF_REG_2
, 10),
116 BPF_JMP_IMM(BPF_JNE
, BPF_REG_2
, 0, 3),
117 BPF_MOV64_REG(BPF_REG_1
, BPF_REG_8
),
118 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_7
),
119 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
120 BPF_MOV64_REG(BPF_REG_4
, BPF_REG_0
),
121 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 1),
122 BPF_ST_MEM(BPF_DW
, BPF_REG_4
, 0, 0),
125 .fixup_map_hash_8b
= { 4 },
127 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
130 "invalid map access from else condition",
132 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
133 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
134 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
135 BPF_LD_MAP_FD(BPF_REG_1
, 0),
136 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
137 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 6),
138 BPF_LDX_MEM(BPF_W
, BPF_REG_1
, BPF_REG_0
, 0),
139 BPF_JMP_IMM(BPF_JGE
, BPF_REG_1
, MAX_ENTRIES
-1, 1),
140 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_1
, 1),
141 BPF_ALU64_IMM(BPF_LSH
, BPF_REG_1
, 2),
142 BPF_ALU64_REG(BPF_ADD
, BPF_REG_0
, BPF_REG_1
),
143 BPF_ST_MEM(BPF_DW
, BPF_REG_0
, 0, offsetof(struct test_val
, foo
)),
146 .fixup_map_hash_48b
= { 3 },
147 .errstr
= "R0 unbounded memory access",
149 .errstr_unpriv
= "R0 leaks addr",
150 .result_unpriv
= REJECT
,
151 .flags
= F_NEEDS_EFFICIENT_UNALIGNED_ACCESS
,
154 "map lookup and null branch prediction",
156 BPF_MOV64_IMM(BPF_REG_1
, 10),
157 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_1
, -8),
158 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
159 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
160 BPF_LD_MAP_FD(BPF_REG_1
, 0),
161 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 0, 0, BPF_FUNC_map_lookup_elem
),
162 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_0
),
163 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_6
, 0, 2),
164 BPF_JMP_IMM(BPF_JNE
, BPF_REG_6
, 0, 1),
165 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_10
, 10),
168 .fixup_map_hash_8b
= { 4 },
169 .prog_type
= BPF_PROG_TYPE_SCHED_CLS
,