1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=riscv32 -mattr=+v -run-pass=instruction-select -simplify-mir -verify-machineinstrs %s -o - | FileCheck -check-prefix=RV32I %s
3 # RUN: llc -mtriple=riscv64 -mattr=+v -run-pass=instruction-select -simplify-mir -verify-machineinstrs %s -o - | FileCheck -check-prefix=RV64I %s
6 name: zext_nxv1i16_nxv1i8
9 tracksRegLiveness: true
14 ; RV32I-LABEL: name: zext_nxv1i16_nxv1i8
17 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
18 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
19 ; RV32I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_MF4 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
20 ; RV32I-NEXT: $v8 = COPY %1
21 ; RV32I-NEXT: PseudoRET implicit $v8
23 ; RV64I-LABEL: name: zext_nxv1i16_nxv1i8
26 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
27 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
28 ; RV64I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_MF4 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
29 ; RV64I-NEXT: $v8 = COPY %1
30 ; RV64I-NEXT: PseudoRET implicit $v8
31 %0:vrb(<vscale x 1 x s8>) = COPY $v8
32 %1:vrb(<vscale x 1 x s16>) = G_ZEXT %0(<vscale x 1 x s8>)
33 $v8 = COPY %1(<vscale x 1 x s16>)
34 PseudoRET implicit $v8
38 name: zext_nxv1i32_nxv1i8
41 tracksRegLiveness: true
46 ; RV32I-LABEL: name: zext_nxv1i32_nxv1i8
49 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
50 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
51 ; RV32I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF4_MF2 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
52 ; RV32I-NEXT: $v8 = COPY %1
53 ; RV32I-NEXT: PseudoRET implicit $v8
55 ; RV64I-LABEL: name: zext_nxv1i32_nxv1i8
58 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
59 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
60 ; RV64I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF4_MF2 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
61 ; RV64I-NEXT: $v8 = COPY %1
62 ; RV64I-NEXT: PseudoRET implicit $v8
63 %0:vrb(<vscale x 1 x s8>) = COPY $v8
64 %1:vrb(<vscale x 1 x s32>) = G_ZEXT %0(<vscale x 1 x s8>)
65 $v8 = COPY %1(<vscale x 1 x s32>)
66 PseudoRET implicit $v8
70 name: zext_nxv1i64_nxv1i8
73 tracksRegLiveness: true
78 ; RV32I-LABEL: name: zext_nxv1i64_nxv1i8
81 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
82 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
83 ; RV32I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF8_M1 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
84 ; RV32I-NEXT: $v8 = COPY %1
85 ; RV32I-NEXT: PseudoRET implicit $v8
87 ; RV64I-LABEL: name: zext_nxv1i64_nxv1i8
90 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
91 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
92 ; RV64I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF8_M1 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
93 ; RV64I-NEXT: $v8 = COPY %1
94 ; RV64I-NEXT: PseudoRET implicit $v8
95 %0:vrb(<vscale x 1 x s8>) = COPY $v8
96 %1:vrb(<vscale x 1 x s64>) = G_ZEXT %0(<vscale x 1 x s8>)
97 $v8 = COPY %1(<vscale x 1 x s64>)
98 PseudoRET implicit $v8
102 name: zext_nxv2i16_nxv2i8
104 regBankSelected: true
105 tracksRegLiveness: true
110 ; RV32I-LABEL: name: zext_nxv2i16_nxv2i8
111 ; RV32I: liveins: $v8
113 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
114 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
115 ; RV32I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_MF2 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
116 ; RV32I-NEXT: $v8 = COPY %1
117 ; RV32I-NEXT: PseudoRET implicit $v8
119 ; RV64I-LABEL: name: zext_nxv2i16_nxv2i8
120 ; RV64I: liveins: $v8
122 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
123 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
124 ; RV64I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_MF2 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
125 ; RV64I-NEXT: $v8 = COPY %1
126 ; RV64I-NEXT: PseudoRET implicit $v8
127 %0:vrb(<vscale x 2 x s8>) = COPY $v8
128 %1:vrb(<vscale x 2 x s16>) = G_ZEXT %0(<vscale x 2 x s8>)
129 $v8 = COPY %1(<vscale x 2 x s16>)
130 PseudoRET implicit $v8
134 name: zext_nxv2i32_nxv2i8
136 regBankSelected: true
137 tracksRegLiveness: true
142 ; RV32I-LABEL: name: zext_nxv2i32_nxv2i8
143 ; RV32I: liveins: $v8
145 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
146 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
147 ; RV32I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF4_M1 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
148 ; RV32I-NEXT: $v8 = COPY %1
149 ; RV32I-NEXT: PseudoRET implicit $v8
151 ; RV64I-LABEL: name: zext_nxv2i32_nxv2i8
152 ; RV64I: liveins: $v8
154 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
155 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
156 ; RV64I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF4_M1 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
157 ; RV64I-NEXT: $v8 = COPY %1
158 ; RV64I-NEXT: PseudoRET implicit $v8
159 %0:vrb(<vscale x 2 x s8>) = COPY $v8
160 %1:vrb(<vscale x 2 x s32>) = G_ZEXT %0(<vscale x 2 x s8>)
161 $v8 = COPY %1(<vscale x 2 x s32>)
162 PseudoRET implicit $v8
166 name: zext_nxv2i64_nxv2i8
168 regBankSelected: true
169 tracksRegLiveness: true
174 ; RV32I-LABEL: name: zext_nxv2i64_nxv2i8
175 ; RV32I: liveins: $v8
177 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
178 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
179 ; RV32I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF8_M2 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
180 ; RV32I-NEXT: $v8m2 = COPY %1
181 ; RV32I-NEXT: PseudoRET implicit $v8m2
183 ; RV64I-LABEL: name: zext_nxv2i64_nxv2i8
184 ; RV64I: liveins: $v8
186 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
187 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
188 ; RV64I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF8_M2 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
189 ; RV64I-NEXT: $v8m2 = COPY %1
190 ; RV64I-NEXT: PseudoRET implicit $v8m2
191 %0:vrb(<vscale x 2 x s8>) = COPY $v8
192 %1:vrb(<vscale x 2 x s64>) = G_ZEXT %0(<vscale x 2 x s8>)
193 $v8m2 = COPY %1(<vscale x 2 x s64>)
194 PseudoRET implicit $v8m2
198 name: zext_nxv4i16_nxv4i8
200 regBankSelected: true
201 tracksRegLiveness: true
206 ; RV32I-LABEL: name: zext_nxv4i16_nxv4i8
207 ; RV32I: liveins: $v8
209 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
210 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
211 ; RV32I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_M1 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
212 ; RV32I-NEXT: $v8 = COPY %1
213 ; RV32I-NEXT: PseudoRET implicit $v8
215 ; RV64I-LABEL: name: zext_nxv4i16_nxv4i8
216 ; RV64I: liveins: $v8
218 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
219 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
220 ; RV64I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_M1 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
221 ; RV64I-NEXT: $v8 = COPY %1
222 ; RV64I-NEXT: PseudoRET implicit $v8
223 %0:vrb(<vscale x 4 x s8>) = COPY $v8
224 %1:vrb(<vscale x 4 x s16>) = G_ZEXT %0(<vscale x 4 x s8>)
225 $v8 = COPY %1(<vscale x 4 x s16>)
226 PseudoRET implicit $v8
230 name: zext_nxv4i32_nxv4i8
232 regBankSelected: true
233 tracksRegLiveness: true
238 ; RV32I-LABEL: name: zext_nxv4i32_nxv4i8
239 ; RV32I: liveins: $v8
241 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
242 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
243 ; RV32I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF4_M2 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
244 ; RV32I-NEXT: $v8m2 = COPY %1
245 ; RV32I-NEXT: PseudoRET implicit $v8m2
247 ; RV64I-LABEL: name: zext_nxv4i32_nxv4i8
248 ; RV64I: liveins: $v8
250 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
251 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
252 ; RV64I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF4_M2 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
253 ; RV64I-NEXT: $v8m2 = COPY %1
254 ; RV64I-NEXT: PseudoRET implicit $v8m2
255 %0:vrb(<vscale x 4 x s8>) = COPY $v8
256 %1:vrb(<vscale x 4 x s32>) = G_ZEXT %0(<vscale x 4 x s8>)
257 $v8m2 = COPY %1(<vscale x 4 x s32>)
258 PseudoRET implicit $v8m2
262 name: zext_nxv4i64_nxv4i8
264 regBankSelected: true
265 tracksRegLiveness: true
270 ; RV32I-LABEL: name: zext_nxv4i64_nxv4i8
271 ; RV32I: liveins: $v8
273 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
274 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
275 ; RV32I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF8_M4 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
276 ; RV32I-NEXT: $v8m4 = COPY %1
277 ; RV32I-NEXT: PseudoRET implicit $v8m4
279 ; RV64I-LABEL: name: zext_nxv4i64_nxv4i8
280 ; RV64I: liveins: $v8
282 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
283 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
284 ; RV64I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF8_M4 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
285 ; RV64I-NEXT: $v8m4 = COPY %1
286 ; RV64I-NEXT: PseudoRET implicit $v8m4
287 %0:vrb(<vscale x 4 x s8>) = COPY $v8
288 %1:vrb(<vscale x 4 x s64>) = G_ZEXT %0(<vscale x 4 x s8>)
289 $v8m4 = COPY %1(<vscale x 4 x s64>)
290 PseudoRET implicit $v8m4
294 name: zext_nxv8i16_nxv8i8
296 regBankSelected: true
297 tracksRegLiveness: true
302 ; RV32I-LABEL: name: zext_nxv8i16_nxv8i8
303 ; RV32I: liveins: $v8
305 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
306 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
307 ; RV32I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF2_M2 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
308 ; RV32I-NEXT: $v8m2 = COPY %1
309 ; RV32I-NEXT: PseudoRET implicit $v8m2
311 ; RV64I-LABEL: name: zext_nxv8i16_nxv8i8
312 ; RV64I: liveins: $v8
314 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
315 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
316 ; RV64I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF2_M2 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
317 ; RV64I-NEXT: $v8m2 = COPY %1
318 ; RV64I-NEXT: PseudoRET implicit $v8m2
319 %0:vrb(<vscale x 8 x s8>) = COPY $v8
320 %1:vrb(<vscale x 8 x s16>) = G_ZEXT %0(<vscale x 8 x s8>)
321 $v8m2 = COPY %1(<vscale x 8 x s16>)
322 PseudoRET implicit $v8m2
326 name: zext_nxv8i32_nxv8i8
328 regBankSelected: true
329 tracksRegLiveness: true
334 ; RV32I-LABEL: name: zext_nxv8i32_nxv8i8
335 ; RV32I: liveins: $v8
337 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
338 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
339 ; RV32I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF4_M4 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
340 ; RV32I-NEXT: $v8m4 = COPY %1
341 ; RV32I-NEXT: PseudoRET implicit $v8m4
343 ; RV64I-LABEL: name: zext_nxv8i32_nxv8i8
344 ; RV64I: liveins: $v8
346 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
347 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
348 ; RV64I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF4_M4 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
349 ; RV64I-NEXT: $v8m4 = COPY %1
350 ; RV64I-NEXT: PseudoRET implicit $v8m4
351 %0:vrb(<vscale x 8 x s8>) = COPY $v8
352 %1:vrb(<vscale x 8 x s32>) = G_ZEXT %0(<vscale x 8 x s8>)
353 $v8m4 = COPY %1(<vscale x 8 x s32>)
354 PseudoRET implicit $v8m4
358 name: zext_nxv8i64_nxv8i8
360 regBankSelected: true
361 tracksRegLiveness: true
366 ; RV32I-LABEL: name: zext_nxv8i64_nxv8i8
367 ; RV32I: liveins: $v8
369 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
370 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
371 ; RV32I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF8_M8 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
372 ; RV32I-NEXT: $v8m8 = COPY %1
373 ; RV32I-NEXT: PseudoRET implicit $v8m8
375 ; RV64I-LABEL: name: zext_nxv8i64_nxv8i8
376 ; RV64I: liveins: $v8
378 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
379 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
380 ; RV64I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF8_M8 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
381 ; RV64I-NEXT: $v8m8 = COPY %1
382 ; RV64I-NEXT: PseudoRET implicit $v8m8
383 %0:vrb(<vscale x 8 x s8>) = COPY $v8
384 %1:vrb(<vscale x 8 x s64>) = G_ZEXT %0(<vscale x 8 x s8>)
385 $v8m8 = COPY %1(<vscale x 8 x s64>)
386 PseudoRET implicit $v8m8
390 name: zext_nxv16i16_nxv16i8
392 regBankSelected: true
393 tracksRegLiveness: true
398 ; RV32I-LABEL: name: zext_nxv16i16_nxv16i8
399 ; RV32I: liveins: $v8m2
401 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrm2 = COPY $v8m2
402 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
403 ; RV32I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF2_M4 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
404 ; RV32I-NEXT: $v8m4 = COPY %1
405 ; RV32I-NEXT: PseudoRET implicit $v8m4
407 ; RV64I-LABEL: name: zext_nxv16i16_nxv16i8
408 ; RV64I: liveins: $v8m2
410 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrm2 = COPY $v8m2
411 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
412 ; RV64I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF2_M4 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
413 ; RV64I-NEXT: $v8m4 = COPY %1
414 ; RV64I-NEXT: PseudoRET implicit $v8m4
415 %0:vrb(<vscale x 16 x s8>) = COPY $v8m2
416 %1:vrb(<vscale x 16 x s16>) = G_ZEXT %0(<vscale x 16 x s8>)
417 $v8m4 = COPY %1(<vscale x 16 x s16>)
418 PseudoRET implicit $v8m4
422 name: zext_nxv16i32_nxv16i8
424 regBankSelected: true
425 tracksRegLiveness: true
430 ; RV32I-LABEL: name: zext_nxv16i32_nxv16i8
431 ; RV32I: liveins: $v8m2
433 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrm2 = COPY $v8m2
434 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
435 ; RV32I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF4_M8 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
436 ; RV32I-NEXT: $v8m8 = COPY %1
437 ; RV32I-NEXT: PseudoRET implicit $v8m8
439 ; RV64I-LABEL: name: zext_nxv16i32_nxv16i8
440 ; RV64I: liveins: $v8m2
442 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrm2 = COPY $v8m2
443 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
444 ; RV64I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF4_M8 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
445 ; RV64I-NEXT: $v8m8 = COPY %1
446 ; RV64I-NEXT: PseudoRET implicit $v8m8
447 %0:vrb(<vscale x 16 x s8>) = COPY $v8m2
448 %1:vrb(<vscale x 16 x s32>) = G_ZEXT %0(<vscale x 16 x s8>)
449 $v8m8 = COPY %1(<vscale x 16 x s32>)
450 PseudoRET implicit $v8m8
454 name: zext_nxv32i16_nxv32i8
456 regBankSelected: true
457 tracksRegLiveness: true
462 ; RV32I-LABEL: name: zext_nxv32i16_nxv32i8
463 ; RV32I: liveins: $v8m4
465 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrm4 = COPY $v8m4
466 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
467 ; RV32I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF2_M8 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
468 ; RV32I-NEXT: $v8m8 = COPY %1
469 ; RV32I-NEXT: PseudoRET implicit $v8m8
471 ; RV64I-LABEL: name: zext_nxv32i16_nxv32i8
472 ; RV64I: liveins: $v8m4
474 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrm4 = COPY $v8m4
475 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
476 ; RV64I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF2_M8 [[DEF]], [[COPY]], -1, 4 /* e16 */, 3 /* ta, ma */
477 ; RV64I-NEXT: $v8m8 = COPY %1
478 ; RV64I-NEXT: PseudoRET implicit $v8m8
479 %0:vrb(<vscale x 32 x s8>) = COPY $v8m4
480 %1:vrb(<vscale x 32 x s16>) = G_ZEXT %0(<vscale x 32 x s8>)
481 $v8m8 = COPY %1(<vscale x 32 x s16>)
482 PseudoRET implicit $v8m8
486 name: zext_nxv1i32_nxv1i16
488 regBankSelected: true
489 tracksRegLiveness: true
494 ; RV32I-LABEL: name: zext_nxv1i32_nxv1i16
495 ; RV32I: liveins: $v8
497 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
498 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
499 ; RV32I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_MF2 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
500 ; RV32I-NEXT: $v8 = COPY %1
501 ; RV32I-NEXT: PseudoRET implicit $v8
503 ; RV64I-LABEL: name: zext_nxv1i32_nxv1i16
504 ; RV64I: liveins: $v8
506 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
507 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
508 ; RV64I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_MF2 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
509 ; RV64I-NEXT: $v8 = COPY %1
510 ; RV64I-NEXT: PseudoRET implicit $v8
511 %0:vrb(<vscale x 1 x s16>) = COPY $v8
512 %1:vrb(<vscale x 1 x s32>) = G_ZEXT %0(<vscale x 1 x s16>)
513 $v8 = COPY %1(<vscale x 1 x s32>)
514 PseudoRET implicit $v8
518 name: zext_nxv1i64_nxv1i16
520 regBankSelected: true
521 tracksRegLiveness: true
526 ; RV32I-LABEL: name: zext_nxv1i64_nxv1i16
527 ; RV32I: liveins: $v8
529 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
530 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
531 ; RV32I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF4_M1 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
532 ; RV32I-NEXT: $v8 = COPY %1
533 ; RV32I-NEXT: PseudoRET implicit $v8
535 ; RV64I-LABEL: name: zext_nxv1i64_nxv1i16
536 ; RV64I: liveins: $v8
538 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
539 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
540 ; RV64I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF4_M1 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
541 ; RV64I-NEXT: $v8 = COPY %1
542 ; RV64I-NEXT: PseudoRET implicit $v8
543 %0:vrb(<vscale x 1 x s16>) = COPY $v8
544 %1:vrb(<vscale x 1 x s64>) = G_ZEXT %0(<vscale x 1 x s16>)
545 $v8 = COPY %1(<vscale x 1 x s64>)
546 PseudoRET implicit $v8
550 name: zext_nxv2i32_nxv2i16
552 regBankSelected: true
553 tracksRegLiveness: true
558 ; RV32I-LABEL: name: zext_nxv2i32_nxv2i16
559 ; RV32I: liveins: $v8
561 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
562 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
563 ; RV32I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_M1 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
564 ; RV32I-NEXT: $v8 = COPY %1
565 ; RV32I-NEXT: PseudoRET implicit $v8
567 ; RV64I-LABEL: name: zext_nxv2i32_nxv2i16
568 ; RV64I: liveins: $v8
570 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
571 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
572 ; RV64I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_M1 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
573 ; RV64I-NEXT: $v8 = COPY %1
574 ; RV64I-NEXT: PseudoRET implicit $v8
575 %0:vrb(<vscale x 2 x s16>) = COPY $v8
576 %1:vrb(<vscale x 2 x s32>) = G_ZEXT %0(<vscale x 2 x s16>)
577 $v8 = COPY %1(<vscale x 2 x s32>)
578 PseudoRET implicit $v8
582 name: zext_nxv2i64_nxv2i16
584 regBankSelected: true
585 tracksRegLiveness: true
590 ; RV32I-LABEL: name: zext_nxv2i64_nxv2i16
591 ; RV32I: liveins: $v8
593 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
594 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
595 ; RV32I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF4_M2 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
596 ; RV32I-NEXT: $v8m2 = COPY %1
597 ; RV32I-NEXT: PseudoRET implicit $v8m2
599 ; RV64I-LABEL: name: zext_nxv2i64_nxv2i16
600 ; RV64I: liveins: $v8
602 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
603 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
604 ; RV64I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF4_M2 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
605 ; RV64I-NEXT: $v8m2 = COPY %1
606 ; RV64I-NEXT: PseudoRET implicit $v8m2
607 %0:vrb(<vscale x 2 x s16>) = COPY $v8
608 %1:vrb(<vscale x 2 x s64>) = G_ZEXT %0(<vscale x 2 x s16>)
609 $v8m2 = COPY %1(<vscale x 2 x s64>)
610 PseudoRET implicit $v8m2
614 name: zext_nxv4i32_nxv4i16
616 regBankSelected: true
617 tracksRegLiveness: true
622 ; RV32I-LABEL: name: zext_nxv4i32_nxv4i16
623 ; RV32I: liveins: $v8
625 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
626 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
627 ; RV32I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF2_M2 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
628 ; RV32I-NEXT: $v8m2 = COPY %1
629 ; RV32I-NEXT: PseudoRET implicit $v8m2
631 ; RV64I-LABEL: name: zext_nxv4i32_nxv4i16
632 ; RV64I: liveins: $v8
634 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
635 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
636 ; RV64I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF2_M2 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
637 ; RV64I-NEXT: $v8m2 = COPY %1
638 ; RV64I-NEXT: PseudoRET implicit $v8m2
639 %0:vrb(<vscale x 4 x s16>) = COPY $v8
640 %1:vrb(<vscale x 4 x s32>) = G_ZEXT %0(<vscale x 4 x s16>)
641 $v8m2 = COPY %1(<vscale x 4 x s32>)
642 PseudoRET implicit $v8m2
646 name: zext_nxv4i64_nxv4i16
648 regBankSelected: true
649 tracksRegLiveness: true
654 ; RV32I-LABEL: name: zext_nxv4i64_nxv4i16
655 ; RV32I: liveins: $v8
657 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
658 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
659 ; RV32I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF4_M4 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
660 ; RV32I-NEXT: $v8m4 = COPY %1
661 ; RV32I-NEXT: PseudoRET implicit $v8m4
663 ; RV64I-LABEL: name: zext_nxv4i64_nxv4i16
664 ; RV64I: liveins: $v8
666 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
667 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
668 ; RV64I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF4_M4 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
669 ; RV64I-NEXT: $v8m4 = COPY %1
670 ; RV64I-NEXT: PseudoRET implicit $v8m4
671 %0:vrb(<vscale x 4 x s16>) = COPY $v8
672 %1:vrb(<vscale x 4 x s64>) = G_ZEXT %0(<vscale x 4 x s16>)
673 $v8m4 = COPY %1(<vscale x 4 x s64>)
674 PseudoRET implicit $v8m4
678 name: zext_nxv8i32_nxv8i16
680 regBankSelected: true
681 tracksRegLiveness: true
686 ; RV32I-LABEL: name: zext_nxv8i32_nxv8i16
687 ; RV32I: liveins: $v8m2
689 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrm2 = COPY $v8m2
690 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
691 ; RV32I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF2_M4 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
692 ; RV32I-NEXT: $v8m4 = COPY %1
693 ; RV32I-NEXT: PseudoRET implicit $v8m4
695 ; RV64I-LABEL: name: zext_nxv8i32_nxv8i16
696 ; RV64I: liveins: $v8m2
698 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrm2 = COPY $v8m2
699 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
700 ; RV64I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF2_M4 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
701 ; RV64I-NEXT: $v8m4 = COPY %1
702 ; RV64I-NEXT: PseudoRET implicit $v8m4
703 %0:vrb(<vscale x 8 x s16>) = COPY $v8m2
704 %1:vrb(<vscale x 8 x s32>) = G_ZEXT %0(<vscale x 8 x s16>)
705 $v8m4 = COPY %1(<vscale x 8 x s32>)
706 PseudoRET implicit $v8m4
710 name: zext_nxv8i64_nxv8i16
712 regBankSelected: true
713 tracksRegLiveness: true
718 ; RV32I-LABEL: name: zext_nxv8i64_nxv8i16
719 ; RV32I: liveins: $v8m2
721 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrm2 = COPY $v8m2
722 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
723 ; RV32I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF4_M8 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
724 ; RV32I-NEXT: $v8m8 = COPY %1
725 ; RV32I-NEXT: PseudoRET implicit $v8m8
727 ; RV64I-LABEL: name: zext_nxv8i64_nxv8i16
728 ; RV64I: liveins: $v8m2
730 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrm2 = COPY $v8m2
731 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
732 ; RV64I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF4_M8 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
733 ; RV64I-NEXT: $v8m8 = COPY %1
734 ; RV64I-NEXT: PseudoRET implicit $v8m8
735 %0:vrb(<vscale x 8 x s16>) = COPY $v8m2
736 %1:vrb(<vscale x 8 x s64>) = G_ZEXT %0(<vscale x 8 x s16>)
737 $v8m8 = COPY %1(<vscale x 8 x s64>)
738 PseudoRET implicit $v8m8
742 name: zext_nxv16i32_nxv16i16
744 regBankSelected: true
745 tracksRegLiveness: true
750 ; RV32I-LABEL: name: zext_nxv16i32_nxv16i16
751 ; RV32I: liveins: $v8m4
753 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrm4 = COPY $v8m4
754 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
755 ; RV32I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF2_M8 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
756 ; RV32I-NEXT: $v8m8 = COPY %1
757 ; RV32I-NEXT: PseudoRET implicit $v8m8
759 ; RV64I-LABEL: name: zext_nxv16i32_nxv16i16
760 ; RV64I: liveins: $v8m4
762 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrm4 = COPY $v8m4
763 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
764 ; RV64I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF2_M8 [[DEF]], [[COPY]], -1, 5 /* e32 */, 3 /* ta, ma */
765 ; RV64I-NEXT: $v8m8 = COPY %1
766 ; RV64I-NEXT: PseudoRET implicit $v8m8
767 %0:vrb(<vscale x 16 x s16>) = COPY $v8m4
768 %1:vrb(<vscale x 16 x s32>) = G_ZEXT %0(<vscale x 16 x s16>)
769 $v8m8 = COPY %1(<vscale x 16 x s32>)
770 PseudoRET implicit $v8m8
774 name: zext_nxv1i64_nxv1i32
776 regBankSelected: true
777 tracksRegLiveness: true
782 ; RV32I-LABEL: name: zext_nxv1i64_nxv1i32
783 ; RV32I: liveins: $v8
785 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
786 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
787 ; RV32I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_M1 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
788 ; RV32I-NEXT: $v8 = COPY %1
789 ; RV32I-NEXT: PseudoRET implicit $v8
791 ; RV64I-LABEL: name: zext_nxv1i64_nxv1i32
792 ; RV64I: liveins: $v8
794 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
795 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vr = IMPLICIT_DEF
796 ; RV64I-NEXT: early-clobber %1:vr = PseudoVZEXT_VF2_M1 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
797 ; RV64I-NEXT: $v8 = COPY %1
798 ; RV64I-NEXT: PseudoRET implicit $v8
799 %0:vrb(<vscale x 1 x s32>) = COPY $v8
800 %1:vrb(<vscale x 1 x s64>) = G_ZEXT %0(<vscale x 1 x s32>)
801 $v8 = COPY %1(<vscale x 1 x s64>)
802 PseudoRET implicit $v8
806 name: zext_nxv2i64_nxv2i32
808 regBankSelected: true
809 tracksRegLiveness: true
814 ; RV32I-LABEL: name: zext_nxv2i64_nxv2i32
815 ; RV32I: liveins: $v8
817 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
818 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
819 ; RV32I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF2_M2 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
820 ; RV32I-NEXT: $v8m2 = COPY %1
821 ; RV32I-NEXT: PseudoRET implicit $v8m2
823 ; RV64I-LABEL: name: zext_nxv2i64_nxv2i32
824 ; RV64I: liveins: $v8
826 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vr = COPY $v8
827 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm2 = IMPLICIT_DEF
828 ; RV64I-NEXT: early-clobber %1:vrm2 = PseudoVZEXT_VF2_M2 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
829 ; RV64I-NEXT: $v8m2 = COPY %1
830 ; RV64I-NEXT: PseudoRET implicit $v8m2
831 %0:vrb(<vscale x 2 x s32>) = COPY $v8
832 %1:vrb(<vscale x 2 x s64>) = G_ZEXT %0(<vscale x 2 x s32>)
833 $v8m2 = COPY %1(<vscale x 2 x s64>)
834 PseudoRET implicit $v8m2
838 name: zext_nxv4i64_nxv4i32
840 regBankSelected: true
841 tracksRegLiveness: true
846 ; RV32I-LABEL: name: zext_nxv4i64_nxv4i32
847 ; RV32I: liveins: $v8m2
849 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrm2 = COPY $v8m2
850 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
851 ; RV32I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF2_M4 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
852 ; RV32I-NEXT: $v8m4 = COPY %1
853 ; RV32I-NEXT: PseudoRET implicit $v8m4
855 ; RV64I-LABEL: name: zext_nxv4i64_nxv4i32
856 ; RV64I: liveins: $v8m2
858 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrm2 = COPY $v8m2
859 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm4 = IMPLICIT_DEF
860 ; RV64I-NEXT: early-clobber %1:vrm4 = PseudoVZEXT_VF2_M4 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
861 ; RV64I-NEXT: $v8m4 = COPY %1
862 ; RV64I-NEXT: PseudoRET implicit $v8m4
863 %0:vrb(<vscale x 4 x s32>) = COPY $v8m2
864 %1:vrb(<vscale x 4 x s64>) = G_ZEXT %0(<vscale x 4 x s32>)
865 $v8m4 = COPY %1(<vscale x 4 x s64>)
866 PseudoRET implicit $v8m4
870 name: zext_nxv8i64_nxv8i32
872 regBankSelected: true
873 tracksRegLiveness: true
878 ; RV32I-LABEL: name: zext_nxv8i64_nxv8i32
879 ; RV32I: liveins: $v8m4
881 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrm4 = COPY $v8m4
882 ; RV32I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
883 ; RV32I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF2_M8 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
884 ; RV32I-NEXT: $v8m8 = COPY %1
885 ; RV32I-NEXT: PseudoRET implicit $v8m8
887 ; RV64I-LABEL: name: zext_nxv8i64_nxv8i32
888 ; RV64I: liveins: $v8m4
890 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrm4 = COPY $v8m4
891 ; RV64I-NEXT: [[DEF:%[0-9]+]]:vrm8 = IMPLICIT_DEF
892 ; RV64I-NEXT: early-clobber %1:vrm8 = PseudoVZEXT_VF2_M8 [[DEF]], [[COPY]], -1, 6 /* e64 */, 3 /* ta, ma */
893 ; RV64I-NEXT: $v8m8 = COPY %1
894 ; RV64I-NEXT: PseudoRET implicit $v8m8
895 %0:vrb(<vscale x 8 x s32>) = COPY $v8m4
896 %1:vrb(<vscale x 8 x s64>) = G_ZEXT %0(<vscale x 8 x s32>)
897 $v8m8 = COPY %1(<vscale x 8 x s64>)
898 PseudoRET implicit $v8m8