4 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_1
),
5 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
6 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
7 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
8 BPF_LD_MAP_FD(BPF_REG_1
, 0),
9 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem
),
10 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 8),
11 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_0
),
12 BPF_EMIT_CALL(BPF_FUNC_get_prandom_u32
),
13 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_0
),
14 BPF_JMP_IMM(BPF_JSGT
, BPF_REG_0
, 20, 4),
15 BPF_JMP_IMM(BPF_JSLT
, BPF_REG_2
, 0, 3),
16 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_0
),
17 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_2
),
18 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_7
, 0),
21 .fixup_map_hash_48b
= { 4 },
23 .prog_type
= BPF_PROG_TYPE_TRACEPOINT
,
24 .flags
= F_NEEDS_EFFICIENT_UNALIGNED_ACCESS
,
29 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_1
),
30 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
31 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
32 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
33 BPF_LD_MAP_FD(BPF_REG_1
, 0),
34 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem
),
35 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 8),
36 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_0
),
37 BPF_EMIT_CALL(BPF_FUNC_get_prandom_u32
),
38 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_0
),
39 BPF_JMP_IMM(BPF_JSGT
, BPF_REG_0
, 24, 4),
40 BPF_JMP_IMM(BPF_JSLT
, BPF_REG_2
, 0, 3),
41 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_0
),
42 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_2
),
43 BPF_LDX_MEM(BPF_B
, BPF_REG_0
, BPF_REG_7
, 0),
46 .fixup_map_hash_48b
= { 4 },
48 .errstr
= "invalid access to map value, value_size=48 off=48 size=1",
49 .prog_type
= BPF_PROG_TYPE_TRACEPOINT
,
52 "regalloc src_reg mark",
54 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_1
),
55 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
56 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
57 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
58 BPF_LD_MAP_FD(BPF_REG_1
, 0),
59 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem
),
60 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 9),
61 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_0
),
62 BPF_EMIT_CALL(BPF_FUNC_get_prandom_u32
),
63 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_0
),
64 BPF_JMP_IMM(BPF_JSGT
, BPF_REG_0
, 20, 5),
65 BPF_MOV64_IMM(BPF_REG_3
, 0),
66 BPF_JMP_REG(BPF_JSGE
, BPF_REG_3
, BPF_REG_2
, 3),
67 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_0
),
68 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_2
),
69 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_7
, 0),
72 .fixup_map_hash_48b
= { 4 },
74 .prog_type
= BPF_PROG_TYPE_TRACEPOINT
,
75 .flags
= F_NEEDS_EFFICIENT_UNALIGNED_ACCESS
,
78 "regalloc src_reg negative",
80 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_1
),
81 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
82 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
83 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
84 BPF_LD_MAP_FD(BPF_REG_1
, 0),
85 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem
),
86 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 9),
87 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_0
),
88 BPF_EMIT_CALL(BPF_FUNC_get_prandom_u32
),
89 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_0
),
90 BPF_JMP_IMM(BPF_JSGT
, BPF_REG_0
, 22, 5),
91 BPF_MOV64_IMM(BPF_REG_3
, 0),
92 BPF_JMP_REG(BPF_JSGE
, BPF_REG_3
, BPF_REG_2
, 3),
93 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_0
),
94 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_2
),
95 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_7
, 0),
98 .fixup_map_hash_48b
= { 4 },
100 .errstr
= "invalid access to map value, value_size=48 off=44 size=8",
101 .prog_type
= BPF_PROG_TYPE_TRACEPOINT
,
102 .flags
= F_NEEDS_EFFICIENT_UNALIGNED_ACCESS
,
105 "regalloc and spill",
107 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_1
),
108 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
109 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
110 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
111 BPF_LD_MAP_FD(BPF_REG_1
, 0),
112 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem
),
113 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 11),
114 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_0
),
115 BPF_EMIT_CALL(BPF_FUNC_get_prandom_u32
),
116 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_0
),
117 BPF_JMP_IMM(BPF_JSGT
, BPF_REG_0
, 20, 7),
118 /* r0 has upper bound that should propagate into r2 */
119 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_2
, -8), /* spill r2 */
120 BPF_MOV64_IMM(BPF_REG_0
, 0),
121 BPF_MOV64_IMM(BPF_REG_2
, 0), /* clear r0 and r2 */
122 BPF_LDX_MEM(BPF_DW
, BPF_REG_3
, BPF_REG_10
, -8), /* fill r3 */
123 BPF_JMP_REG(BPF_JSGE
, BPF_REG_0
, BPF_REG_3
, 2),
124 /* r3 has lower and upper bounds */
125 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_3
),
126 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_7
, 0),
129 .fixup_map_hash_48b
= { 4 },
131 .prog_type
= BPF_PROG_TYPE_TRACEPOINT
,
132 .flags
= F_NEEDS_EFFICIENT_UNALIGNED_ACCESS
,
135 "regalloc and spill negative",
137 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_1
),
138 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
139 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
140 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
141 BPF_LD_MAP_FD(BPF_REG_1
, 0),
142 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem
),
143 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 11),
144 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_0
),
145 BPF_EMIT_CALL(BPF_FUNC_get_prandom_u32
),
146 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_0
),
147 BPF_JMP_IMM(BPF_JSGT
, BPF_REG_0
, 48, 7),
148 /* r0 has upper bound that should propagate into r2 */
149 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_2
, -8), /* spill r2 */
150 BPF_MOV64_IMM(BPF_REG_0
, 0),
151 BPF_MOV64_IMM(BPF_REG_2
, 0), /* clear r0 and r2 */
152 BPF_LDX_MEM(BPF_DW
, BPF_REG_3
, BPF_REG_10
, -8), /* fill r3 */
153 BPF_JMP_REG(BPF_JSGE
, BPF_REG_0
, BPF_REG_3
, 2),
154 /* r3 has lower and upper bounds */
155 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_3
),
156 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_7
, 0),
159 .fixup_map_hash_48b
= { 4 },
161 .errstr
= "invalid access to map value, value_size=48 off=48 size=8",
162 .prog_type
= BPF_PROG_TYPE_TRACEPOINT
,
163 .flags
= F_NEEDS_EFFICIENT_UNALIGNED_ACCESS
,
166 "regalloc three regs",
168 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_1
),
169 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
170 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
171 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
172 BPF_LD_MAP_FD(BPF_REG_1
, 0),
173 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem
),
174 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 10),
175 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_0
),
176 BPF_EMIT_CALL(BPF_FUNC_get_prandom_u32
),
177 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_0
),
178 BPF_MOV64_REG(BPF_REG_4
, BPF_REG_2
),
179 BPF_JMP_IMM(BPF_JSGT
, BPF_REG_0
, 12, 5),
180 BPF_JMP_IMM(BPF_JSLT
, BPF_REG_2
, 0, 4),
181 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_0
),
182 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_2
),
183 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_4
),
184 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_7
, 0),
187 .fixup_map_hash_48b
= { 4 },
189 .prog_type
= BPF_PROG_TYPE_TRACEPOINT
,
190 .flags
= F_NEEDS_EFFICIENT_UNALIGNED_ACCESS
,
193 "regalloc after call",
195 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_1
),
196 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
197 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
198 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
199 BPF_LD_MAP_FD(BPF_REG_1
, 0),
200 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem
),
201 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 10),
202 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_0
),
203 BPF_EMIT_CALL(BPF_FUNC_get_prandom_u32
),
204 BPF_MOV64_REG(BPF_REG_8
, BPF_REG_0
),
205 BPF_MOV64_REG(BPF_REG_9
, BPF_REG_0
),
206 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 1, 0, 6),
207 BPF_JMP_IMM(BPF_JSGT
, BPF_REG_8
, 20, 4),
208 BPF_JMP_IMM(BPF_JSLT
, BPF_REG_9
, 0, 3),
209 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_8
),
210 BPF_ALU64_REG(BPF_ADD
, BPF_REG_7
, BPF_REG_9
),
211 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_7
, 0),
213 BPF_MOV64_IMM(BPF_REG_0
, 0),
216 .fixup_map_hash_48b
= { 4 },
218 .prog_type
= BPF_PROG_TYPE_TRACEPOINT
,
219 .flags
= F_NEEDS_EFFICIENT_UNALIGNED_ACCESS
,
222 "regalloc in callee",
224 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_1
),
225 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
226 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
227 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
228 BPF_LD_MAP_FD(BPF_REG_1
, 0),
229 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem
),
230 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 6),
231 BPF_MOV64_REG(BPF_REG_7
, BPF_REG_0
),
232 BPF_EMIT_CALL(BPF_FUNC_get_prandom_u32
),
233 BPF_MOV64_REG(BPF_REG_1
, BPF_REG_0
),
234 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_0
),
235 BPF_MOV64_REG(BPF_REG_3
, BPF_REG_7
),
236 BPF_RAW_INSN(BPF_JMP
| BPF_CALL
, 0, 1, 0, 1),
238 BPF_JMP_IMM(BPF_JSGT
, BPF_REG_1
, 20, 5),
239 BPF_JMP_IMM(BPF_JSLT
, BPF_REG_2
, 0, 4),
240 BPF_ALU64_REG(BPF_ADD
, BPF_REG_3
, BPF_REG_1
),
241 BPF_ALU64_REG(BPF_ADD
, BPF_REG_3
, BPF_REG_2
),
242 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_3
, 0),
244 BPF_MOV64_IMM(BPF_REG_0
, 0),
247 .fixup_map_hash_48b
= { 4 },
249 .prog_type
= BPF_PROG_TYPE_TRACEPOINT
,
250 .flags
= F_NEEDS_EFFICIENT_UNALIGNED_ACCESS
,
253 "regalloc, spill, JEQ",
255 BPF_MOV64_REG(BPF_REG_6
, BPF_REG_1
),
256 BPF_ST_MEM(BPF_DW
, BPF_REG_10
, -8, 0),
257 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_10
),
258 BPF_ALU64_IMM(BPF_ADD
, BPF_REG_2
, -8),
259 BPF_LD_MAP_FD(BPF_REG_1
, 0),
260 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem
),
261 BPF_STX_MEM(BPF_DW
, BPF_REG_10
, BPF_REG_0
, -8), /* spill r0 */
262 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_0
, 0, 0),
263 /* The verifier will walk the rest twice with r0 == 0 and r0 == map_value */
264 BPF_EMIT_CALL(BPF_FUNC_get_prandom_u32
),
265 BPF_MOV64_REG(BPF_REG_2
, BPF_REG_0
),
266 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_2
, 20, 0),
267 /* The verifier will walk the rest two more times with r0 == 20 and r0 == unknown */
268 BPF_LDX_MEM(BPF_DW
, BPF_REG_3
, BPF_REG_10
, -8), /* fill r3 with map_value */
269 BPF_JMP_IMM(BPF_JEQ
, BPF_REG_3
, 0, 1), /* skip ldx if map_value == NULL */
270 /* Buggy verifier will think that r3 == 20 here */
271 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_3
, 0), /* read from map_value */
274 .fixup_map_hash_48b
= { 4 },
276 .prog_type
= BPF_PROG_TYPE_TRACEPOINT
,