1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=riscv32 -mattr=+m,+v -run-pass=regbankselect \
3 # RUN: -simplify-mir -verify-machineinstrs %s \
4 # RUN: -o - | FileCheck -check-prefix=RV32I %s
5 # RUN: llc -mtriple=riscv64 -mattr=+m,+v -run-pass=regbankselect \
6 # RUN: -simplify-mir -verify-machineinstrs %s \
7 # RUN: -o - | FileCheck -check-prefix=RV64I %s
10 name: anyext_nxv1i16_nxv1i8
12 tracksRegLiveness: true
17 ; RV32I-LABEL: name: anyext_nxv1i16_nxv1i8
20 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s8>) = COPY $v8
21 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s16>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
22 ; RV32I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s16>)
23 ; RV32I-NEXT: PseudoRET implicit $v8
25 ; RV64I-LABEL: name: anyext_nxv1i16_nxv1i8
28 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s8>) = COPY $v8
29 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s16>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
30 ; RV64I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s16>)
31 ; RV64I-NEXT: PseudoRET implicit $v8
32 %0:_(<vscale x 1 x s8>) = COPY $v8
33 %1:_(<vscale x 1 x s16>) = G_ANYEXT %0(<vscale x 1 x s8>)
34 $v8 = COPY %1(<vscale x 1 x s16>)
35 PseudoRET implicit $v8
39 name: anyext_nxv1i32_nxv1i8
41 tracksRegLiveness: true
46 ; RV32I-LABEL: name: anyext_nxv1i32_nxv1i8
49 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s8>) = COPY $v8
50 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s32>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
51 ; RV32I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s32>)
52 ; RV32I-NEXT: PseudoRET implicit $v8
54 ; RV64I-LABEL: name: anyext_nxv1i32_nxv1i8
57 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s8>) = COPY $v8
58 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s32>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
59 ; RV64I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s32>)
60 ; RV64I-NEXT: PseudoRET implicit $v8
61 %0:_(<vscale x 1 x s8>) = COPY $v8
62 %1:_(<vscale x 1 x s32>) = G_ANYEXT %0(<vscale x 1 x s8>)
63 $v8 = COPY %1(<vscale x 1 x s32>)
64 PseudoRET implicit $v8
68 name: anyext_nxv1i64_nxv1i8
70 tracksRegLiveness: true
75 ; RV32I-LABEL: name: anyext_nxv1i64_nxv1i8
78 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s8>) = COPY $v8
79 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
80 ; RV32I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
81 ; RV32I-NEXT: PseudoRET implicit $v8
83 ; RV64I-LABEL: name: anyext_nxv1i64_nxv1i8
86 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s8>) = COPY $v8
87 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
88 ; RV64I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
89 ; RV64I-NEXT: PseudoRET implicit $v8
90 %0:_(<vscale x 1 x s8>) = COPY $v8
91 %1:_(<vscale x 1 x s64>) = G_ANYEXT %0(<vscale x 1 x s8>)
92 $v8 = COPY %1(<vscale x 1 x s64>)
93 PseudoRET implicit $v8
97 name: anyext_nxv2i16_nxv2i8
99 tracksRegLiveness: true
104 ; RV32I-LABEL: name: anyext_nxv2i16_nxv2i8
105 ; RV32I: liveins: $v8
107 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s8>) = COPY $v8
108 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s16>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
109 ; RV32I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s16>)
110 ; RV32I-NEXT: PseudoRET implicit $v8
112 ; RV64I-LABEL: name: anyext_nxv2i16_nxv2i8
113 ; RV64I: liveins: $v8
115 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s8>) = COPY $v8
116 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s16>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
117 ; RV64I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s16>)
118 ; RV64I-NEXT: PseudoRET implicit $v8
119 %0:_(<vscale x 2 x s8>) = COPY $v8
120 %1:_(<vscale x 2 x s16>) = G_ANYEXT %0(<vscale x 2 x s8>)
121 $v8 = COPY %1(<vscale x 2 x s16>)
122 PseudoRET implicit $v8
126 name: anyext_nxv2i32_nxv2i8
128 tracksRegLiveness: true
133 ; RV32I-LABEL: name: anyext_nxv2i32_nxv2i8
134 ; RV32I: liveins: $v8
136 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s8>) = COPY $v8
137 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s32>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
138 ; RV32I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s32>)
139 ; RV32I-NEXT: PseudoRET implicit $v8
141 ; RV64I-LABEL: name: anyext_nxv2i32_nxv2i8
142 ; RV64I: liveins: $v8
144 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s8>) = COPY $v8
145 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s32>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
146 ; RV64I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s32>)
147 ; RV64I-NEXT: PseudoRET implicit $v8
148 %0:_(<vscale x 2 x s8>) = COPY $v8
149 %1:_(<vscale x 2 x s32>) = G_ANYEXT %0(<vscale x 2 x s8>)
150 $v8 = COPY %1(<vscale x 2 x s32>)
151 PseudoRET implicit $v8
155 name: anyext_nxv2i64_nxv2i8
157 tracksRegLiveness: true
162 ; RV32I-LABEL: name: anyext_nxv2i64_nxv2i8
163 ; RV32I: liveins: $v8
165 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s8>) = COPY $v8
166 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
167 ; RV32I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
168 ; RV32I-NEXT: PseudoRET implicit $v8m2
170 ; RV64I-LABEL: name: anyext_nxv2i64_nxv2i8
171 ; RV64I: liveins: $v8
173 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s8>) = COPY $v8
174 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
175 ; RV64I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
176 ; RV64I-NEXT: PseudoRET implicit $v8m2
177 %0:_(<vscale x 2 x s8>) = COPY $v8
178 %1:_(<vscale x 2 x s64>) = G_ANYEXT %0(<vscale x 2 x s8>)
179 $v8m2 = COPY %1(<vscale x 2 x s64>)
180 PseudoRET implicit $v8m2
184 name: anyext_nxv4i16_nxv4i8
186 tracksRegLiveness: true
191 ; RV32I-LABEL: name: anyext_nxv4i16_nxv4i8
192 ; RV32I: liveins: $v8
194 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s8>) = COPY $v8
195 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s16>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
196 ; RV32I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 4 x s16>)
197 ; RV32I-NEXT: PseudoRET implicit $v8
199 ; RV64I-LABEL: name: anyext_nxv4i16_nxv4i8
200 ; RV64I: liveins: $v8
202 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s8>) = COPY $v8
203 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s16>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
204 ; RV64I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 4 x s16>)
205 ; RV64I-NEXT: PseudoRET implicit $v8
206 %0:_(<vscale x 4 x s8>) = COPY $v8
207 %1:_(<vscale x 4 x s16>) = G_ANYEXT %0(<vscale x 4 x s8>)
208 $v8 = COPY %1(<vscale x 4 x s16>)
209 PseudoRET implicit $v8
213 name: anyext_nxv4i32_nxv4i8
215 tracksRegLiveness: true
220 ; RV32I-LABEL: name: anyext_nxv4i32_nxv4i8
221 ; RV32I: liveins: $v8
223 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s8>) = COPY $v8
224 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s32>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
225 ; RV32I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 4 x s32>)
226 ; RV32I-NEXT: PseudoRET implicit $v8m2
228 ; RV64I-LABEL: name: anyext_nxv4i32_nxv4i8
229 ; RV64I: liveins: $v8
231 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s8>) = COPY $v8
232 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s32>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
233 ; RV64I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 4 x s32>)
234 ; RV64I-NEXT: PseudoRET implicit $v8m2
235 %0:_(<vscale x 4 x s8>) = COPY $v8
236 %1:_(<vscale x 4 x s32>) = G_ANYEXT %0(<vscale x 4 x s8>)
237 $v8m2 = COPY %1(<vscale x 4 x s32>)
238 PseudoRET implicit $v8m2
242 name: anyext_nxv4i64_nxv4i8
244 tracksRegLiveness: true
249 ; RV32I-LABEL: name: anyext_nxv4i64_nxv4i8
250 ; RV32I: liveins: $v8
252 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s8>) = COPY $v8
253 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
254 ; RV32I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
255 ; RV32I-NEXT: PseudoRET implicit $v8m4
257 ; RV64I-LABEL: name: anyext_nxv4i64_nxv4i8
258 ; RV64I: liveins: $v8
260 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s8>) = COPY $v8
261 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
262 ; RV64I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
263 ; RV64I-NEXT: PseudoRET implicit $v8m4
264 %0:_(<vscale x 4 x s8>) = COPY $v8
265 %1:_(<vscale x 4 x s64>) = G_ANYEXT %0(<vscale x 4 x s8>)
266 $v8m4 = COPY %1(<vscale x 4 x s64>)
267 PseudoRET implicit $v8m4
271 name: anyext_nxv8i16_nxv8i8
273 tracksRegLiveness: true
278 ; RV32I-LABEL: name: anyext_nxv8i16_nxv8i8
279 ; RV32I: liveins: $v8
281 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s8>) = COPY $v8
282 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s16>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
283 ; RV32I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 8 x s16>)
284 ; RV32I-NEXT: PseudoRET implicit $v8m2
286 ; RV64I-LABEL: name: anyext_nxv8i16_nxv8i8
287 ; RV64I: liveins: $v8
289 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s8>) = COPY $v8
290 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s16>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
291 ; RV64I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 8 x s16>)
292 ; RV64I-NEXT: PseudoRET implicit $v8m2
293 %0:_(<vscale x 8 x s8>) = COPY $v8
294 %1:_(<vscale x 8 x s16>) = G_ANYEXT %0(<vscale x 8 x s8>)
295 $v8m2 = COPY %1(<vscale x 8 x s16>)
296 PseudoRET implicit $v8m2
300 name: anyext_nxv8i32_nxv8i8
302 tracksRegLiveness: true
307 ; RV32I-LABEL: name: anyext_nxv8i32_nxv8i8
308 ; RV32I: liveins: $v8
310 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s8>) = COPY $v8
311 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s32>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
312 ; RV32I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 8 x s32>)
313 ; RV32I-NEXT: PseudoRET implicit $v8m4
315 ; RV64I-LABEL: name: anyext_nxv8i32_nxv8i8
316 ; RV64I: liveins: $v8
318 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s8>) = COPY $v8
319 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s32>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
320 ; RV64I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 8 x s32>)
321 ; RV64I-NEXT: PseudoRET implicit $v8m4
322 %0:_(<vscale x 8 x s8>) = COPY $v8
323 %1:_(<vscale x 8 x s32>) = G_ANYEXT %0(<vscale x 8 x s8>)
324 $v8m4 = COPY %1(<vscale x 8 x s32>)
325 PseudoRET implicit $v8m4
329 name: anyext_nxv8i64_nxv8i8
331 tracksRegLiveness: true
336 ; RV32I-LABEL: name: anyext_nxv8i64_nxv8i8
337 ; RV32I: liveins: $v8
339 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s8>) = COPY $v8
340 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
341 ; RV32I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
342 ; RV32I-NEXT: PseudoRET implicit $v8m8
344 ; RV64I-LABEL: name: anyext_nxv8i64_nxv8i8
345 ; RV64I: liveins: $v8
347 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s8>) = COPY $v8
348 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
349 ; RV64I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
350 ; RV64I-NEXT: PseudoRET implicit $v8m8
351 %0:_(<vscale x 8 x s8>) = COPY $v8
352 %1:_(<vscale x 8 x s64>) = G_ANYEXT %0(<vscale x 8 x s8>)
353 $v8m8 = COPY %1(<vscale x 8 x s64>)
354 PseudoRET implicit $v8m8
358 name: anyext_nxv16i16_nxv16i8
360 tracksRegLiveness: true
365 ; RV32I-LABEL: name: anyext_nxv16i16_nxv16i8
366 ; RV32I: liveins: $v8
368 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s8>) = COPY $v8m2
369 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 16 x s16>) = G_ANYEXT [[COPY]](<vscale x 16 x s8>)
370 ; RV32I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 16 x s16>)
371 ; RV32I-NEXT: PseudoRET implicit $v8m4
373 ; RV64I-LABEL: name: anyext_nxv16i16_nxv16i8
374 ; RV64I: liveins: $v8
376 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s8>) = COPY $v8m2
377 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 16 x s16>) = G_ANYEXT [[COPY]](<vscale x 16 x s8>)
378 ; RV64I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 16 x s16>)
379 ; RV64I-NEXT: PseudoRET implicit $v8m4
380 %0:_(<vscale x 16 x s8>) = COPY $v8m2
381 %1:_(<vscale x 16 x s16>) = G_ANYEXT %0(<vscale x 16 x s8>)
382 $v8m4 = COPY %1(<vscale x 16 x s16>)
383 PseudoRET implicit $v8m4
387 name: anyext_nxv16i32_nxv16i8
389 tracksRegLiveness: true
394 ; RV32I-LABEL: name: anyext_nxv16i32_nxv16i8
395 ; RV32I: liveins: $v8
397 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s8>) = COPY $v8m4
398 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 16 x s32>) = G_ANYEXT [[COPY]](<vscale x 16 x s8>)
399 ; RV32I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 16 x s32>)
400 ; RV32I-NEXT: PseudoRET implicit $v8m8
402 ; RV64I-LABEL: name: anyext_nxv16i32_nxv16i8
403 ; RV64I: liveins: $v8
405 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s8>) = COPY $v8m4
406 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 16 x s32>) = G_ANYEXT [[COPY]](<vscale x 16 x s8>)
407 ; RV64I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 16 x s32>)
408 ; RV64I-NEXT: PseudoRET implicit $v8m8
409 %0:_(<vscale x 16 x s8>) = COPY $v8m4
410 %1:_(<vscale x 16 x s32>) = G_ANYEXT %0(<vscale x 16 x s8>)
411 $v8m8 = COPY %1(<vscale x 16 x s32>)
412 PseudoRET implicit $v8m8
416 name: anyext_nxv32i16_nxv32i8
418 tracksRegLiveness: true
423 ; RV32I-LABEL: name: anyext_nxv32i16_nxv32i8
424 ; RV32I: liveins: $v8
426 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s8>) = COPY $v8m4
427 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 32 x s16>) = G_ANYEXT [[COPY]](<vscale x 32 x s8>)
428 ; RV32I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 32 x s16>)
429 ; RV32I-NEXT: PseudoRET implicit $v8m8
431 ; RV64I-LABEL: name: anyext_nxv32i16_nxv32i8
432 ; RV64I: liveins: $v8
434 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 32 x s8>) = COPY $v8m4
435 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 32 x s16>) = G_ANYEXT [[COPY]](<vscale x 32 x s8>)
436 ; RV64I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 32 x s16>)
437 ; RV64I-NEXT: PseudoRET implicit $v8m8
438 %0:_(<vscale x 32 x s8>) = COPY $v8m4
439 %1:_(<vscale x 32 x s16>) = G_ANYEXT %0(<vscale x 32 x s8>)
440 $v8m8 = COPY %1(<vscale x 32 x s16>)
441 PseudoRET implicit $v8m8
445 name: anyext_nxv1i32_nxv1i16
447 tracksRegLiveness: true
452 ; RV32I-LABEL: name: anyext_nxv1i32_nxv1i16
453 ; RV32I: liveins: $v8
455 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s16>) = COPY $v8
456 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s32>) = G_ANYEXT [[COPY]](<vscale x 1 x s16>)
457 ; RV32I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s32>)
458 ; RV32I-NEXT: PseudoRET implicit $v8
460 ; RV64I-LABEL: name: anyext_nxv1i32_nxv1i16
461 ; RV64I: liveins: $v8
463 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s16>) = COPY $v8
464 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s32>) = G_ANYEXT [[COPY]](<vscale x 1 x s16>)
465 ; RV64I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s32>)
466 ; RV64I-NEXT: PseudoRET implicit $v8
467 %0:_(<vscale x 1 x s16>) = COPY $v8
468 %1:_(<vscale x 1 x s32>) = G_ANYEXT %0(<vscale x 1 x s16>)
469 $v8 = COPY %1(<vscale x 1 x s32>)
470 PseudoRET implicit $v8
474 name: anyext_nxv1i64_nxv1i16
476 tracksRegLiveness: true
481 ; RV32I-LABEL: name: anyext_nxv1i64_nxv1i16
482 ; RV32I: liveins: $v8
484 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s16>) = COPY $v8
485 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s16>)
486 ; RV32I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
487 ; RV32I-NEXT: PseudoRET implicit $v8
489 ; RV64I-LABEL: name: anyext_nxv1i64_nxv1i16
490 ; RV64I: liveins: $v8
492 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s16>) = COPY $v8
493 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s16>)
494 ; RV64I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
495 ; RV64I-NEXT: PseudoRET implicit $v8
496 %0:_(<vscale x 1 x s16>) = COPY $v8
497 %1:_(<vscale x 1 x s64>) = G_ANYEXT %0(<vscale x 1 x s16>)
498 $v8 = COPY %1(<vscale x 1 x s64>)
499 PseudoRET implicit $v8
503 name: anyext_nxv2i32_nxv2i16
505 tracksRegLiveness: true
510 ; RV32I-LABEL: name: anyext_nxv2i32_nxv2i16
511 ; RV32I: liveins: $v8
513 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s16>) = COPY $v8
514 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s32>) = G_ANYEXT [[COPY]](<vscale x 2 x s16>)
515 ; RV32I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s32>)
516 ; RV32I-NEXT: PseudoRET implicit $v8
518 ; RV64I-LABEL: name: anyext_nxv2i32_nxv2i16
519 ; RV64I: liveins: $v8
521 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s16>) = COPY $v8
522 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s32>) = G_ANYEXT [[COPY]](<vscale x 2 x s16>)
523 ; RV64I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s32>)
524 ; RV64I-NEXT: PseudoRET implicit $v8
525 %0:_(<vscale x 2 x s16>) = COPY $v8
526 %1:_(<vscale x 2 x s32>) = G_ANYEXT %0(<vscale x 2 x s16>)
527 $v8 = COPY %1(<vscale x 2 x s32>)
528 PseudoRET implicit $v8
532 name: anyext_nxv2i64_nxv2i16
534 tracksRegLiveness: true
539 ; RV32I-LABEL: name: anyext_nxv2i64_nxv2i16
540 ; RV32I: liveins: $v8
542 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s16>) = COPY $v8
543 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s16>)
544 ; RV32I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
545 ; RV32I-NEXT: PseudoRET implicit $v8m2
547 ; RV64I-LABEL: name: anyext_nxv2i64_nxv2i16
548 ; RV64I: liveins: $v8
550 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s16>) = COPY $v8
551 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s16>)
552 ; RV64I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
553 ; RV64I-NEXT: PseudoRET implicit $v8m2
554 %0:_(<vscale x 2 x s16>) = COPY $v8
555 %1:_(<vscale x 2 x s64>) = G_ANYEXT %0(<vscale x 2 x s16>)
556 $v8m2 = COPY %1(<vscale x 2 x s64>)
557 PseudoRET implicit $v8m2
561 name: anyext_nxv4i32_nxv4i16
563 tracksRegLiveness: true
568 ; RV32I-LABEL: name: anyext_nxv4i32_nxv4i16
569 ; RV32I: liveins: $v8
571 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s16>) = COPY $v8
572 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s32>) = G_ANYEXT [[COPY]](<vscale x 4 x s16>)
573 ; RV32I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 4 x s32>)
574 ; RV32I-NEXT: PseudoRET implicit $v8m2
576 ; RV64I-LABEL: name: anyext_nxv4i32_nxv4i16
577 ; RV64I: liveins: $v8
579 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s16>) = COPY $v8
580 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s32>) = G_ANYEXT [[COPY]](<vscale x 4 x s16>)
581 ; RV64I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 4 x s32>)
582 ; RV64I-NEXT: PseudoRET implicit $v8m2
583 %0:_(<vscale x 4 x s16>) = COPY $v8
584 %1:_(<vscale x 4 x s32>) = G_ANYEXT %0(<vscale x 4 x s16>)
585 $v8m2 = COPY %1(<vscale x 4 x s32>)
586 PseudoRET implicit $v8m2
590 name: anyext_nxv4i64_nxv4i16
592 tracksRegLiveness: true
597 ; RV32I-LABEL: name: anyext_nxv4i64_nxv4i16
598 ; RV32I: liveins: $v8
600 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s16>) = COPY $v8
601 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s16>)
602 ; RV32I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
603 ; RV32I-NEXT: PseudoRET implicit $v8m4
605 ; RV64I-LABEL: name: anyext_nxv4i64_nxv4i16
606 ; RV64I: liveins: $v8
608 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s16>) = COPY $v8
609 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s16>)
610 ; RV64I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
611 ; RV64I-NEXT: PseudoRET implicit $v8m4
612 %0:_(<vscale x 4 x s16>) = COPY $v8
613 %1:_(<vscale x 4 x s64>) = G_ANYEXT %0(<vscale x 4 x s16>)
614 $v8m4 = COPY %1(<vscale x 4 x s64>)
615 PseudoRET implicit $v8m4
619 name: anyext_nxv8i32_nxv8i16
621 tracksRegLiveness: true
626 ; RV32I-LABEL: name: anyext_nxv8i32_nxv8i16
627 ; RV32I: liveins: $v8
629 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s16>) = COPY $v8m2
630 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s32>) = G_ANYEXT [[COPY]](<vscale x 8 x s16>)
631 ; RV32I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 8 x s32>)
632 ; RV32I-NEXT: PseudoRET implicit $v8m4
634 ; RV64I-LABEL: name: anyext_nxv8i32_nxv8i16
635 ; RV64I: liveins: $v8
637 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s16>) = COPY $v8m2
638 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s32>) = G_ANYEXT [[COPY]](<vscale x 8 x s16>)
639 ; RV64I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 8 x s32>)
640 ; RV64I-NEXT: PseudoRET implicit $v8m4
641 %0:_(<vscale x 8 x s16>) = COPY $v8m2
642 %1:_(<vscale x 8 x s32>) = G_ANYEXT %0(<vscale x 8 x s16>)
643 $v8m4 = COPY %1(<vscale x 8 x s32>)
644 PseudoRET implicit $v8m4
648 name: anyext_nxv8i64_nxv8i16
650 tracksRegLiveness: true
655 ; RV32I-LABEL: name: anyext_nxv8i64_nxv8i16
656 ; RV32I: liveins: $v8
658 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s16>) = COPY $v8m2
659 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s16>)
660 ; RV32I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
661 ; RV32I-NEXT: PseudoRET implicit $v8m8
663 ; RV64I-LABEL: name: anyext_nxv8i64_nxv8i16
664 ; RV64I: liveins: $v8
666 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s16>) = COPY $v8m2
667 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s16>)
668 ; RV64I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
669 ; RV64I-NEXT: PseudoRET implicit $v8m8
670 %0:_(<vscale x 8 x s16>) = COPY $v8m2
671 %1:_(<vscale x 8 x s64>) = G_ANYEXT %0(<vscale x 8 x s16>)
672 $v8m8 = COPY %1(<vscale x 8 x s64>)
673 PseudoRET implicit $v8m8
677 name: anyext_nxv16i32_nxv16i16
679 tracksRegLiveness: true
684 ; RV32I-LABEL: name: anyext_nxv16i32_nxv16i16
685 ; RV32I: liveins: $v8
687 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s16>) = COPY $v8m4
688 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 16 x s32>) = G_ANYEXT [[COPY]](<vscale x 16 x s16>)
689 ; RV32I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 16 x s32>)
690 ; RV32I-NEXT: PseudoRET implicit $v8m8
692 ; RV64I-LABEL: name: anyext_nxv16i32_nxv16i16
693 ; RV64I: liveins: $v8
695 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 16 x s16>) = COPY $v8m4
696 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 16 x s32>) = G_ANYEXT [[COPY]](<vscale x 16 x s16>)
697 ; RV64I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 16 x s32>)
698 ; RV64I-NEXT: PseudoRET implicit $v8m8
699 %0:_(<vscale x 16 x s16>) = COPY $v8m4
700 %1:_(<vscale x 16 x s32>) = G_ANYEXT %0(<vscale x 16 x s16>)
701 $v8m8 = COPY %1(<vscale x 16 x s32>)
702 PseudoRET implicit $v8m8
706 name: anyext_nxv1i64_nxv1i32
708 tracksRegLiveness: true
713 ; RV32I-LABEL: name: anyext_nxv1i64_nxv1i32
714 ; RV32I: liveins: $v8
716 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s32>) = COPY $v8
717 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s32>)
718 ; RV32I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
719 ; RV32I-NEXT: PseudoRET implicit $v8
721 ; RV64I-LABEL: name: anyext_nxv1i64_nxv1i32
722 ; RV64I: liveins: $v8
724 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 1 x s32>) = COPY $v8
725 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s32>)
726 ; RV64I-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
727 ; RV64I-NEXT: PseudoRET implicit $v8
728 %0:_(<vscale x 1 x s32>) = COPY $v8
729 %1:_(<vscale x 1 x s64>) = G_ANYEXT %0(<vscale x 1 x s32>)
730 $v8 = COPY %1(<vscale x 1 x s64>)
731 PseudoRET implicit $v8
735 name: anyext_nxv2i64_nxv2i32
737 tracksRegLiveness: true
742 ; RV32I-LABEL: name: anyext_nxv2i64_nxv2i32
743 ; RV32I: liveins: $v8
745 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s32>) = COPY $v8
746 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s32>)
747 ; RV32I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
748 ; RV32I-NEXT: PseudoRET implicit $v8m2
750 ; RV64I-LABEL: name: anyext_nxv2i64_nxv2i32
751 ; RV64I: liveins: $v8
753 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 2 x s32>) = COPY $v8
754 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s32>)
755 ; RV64I-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
756 ; RV64I-NEXT: PseudoRET implicit $v8m2
757 %0:_(<vscale x 2 x s32>) = COPY $v8
758 %1:_(<vscale x 2 x s64>) = G_ANYEXT %0(<vscale x 2 x s32>)
759 $v8m2 = COPY %1(<vscale x 2 x s64>)
760 PseudoRET implicit $v8m2
764 name: anyext_nxv4i64_nxv4i32
766 tracksRegLiveness: true
771 ; RV32I-LABEL: name: anyext_nxv4i64_nxv4i32
772 ; RV32I: liveins: $v8
774 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s32>) = COPY $v8m2
775 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s32>)
776 ; RV32I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
777 ; RV32I-NEXT: PseudoRET implicit $v8m4
779 ; RV64I-LABEL: name: anyext_nxv4i64_nxv4i32
780 ; RV64I: liveins: $v8
782 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 4 x s32>) = COPY $v8m2
783 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s32>)
784 ; RV64I-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
785 ; RV64I-NEXT: PseudoRET implicit $v8m4
786 %0:_(<vscale x 4 x s32>) = COPY $v8m2
787 %1:_(<vscale x 4 x s64>) = G_ANYEXT %0(<vscale x 4 x s32>)
788 $v8m4 = COPY %1(<vscale x 4 x s64>)
789 PseudoRET implicit $v8m4
793 name: anyext_nxv8i64_nxv8i32
795 tracksRegLiveness: true
800 ; RV32I-LABEL: name: anyext_nxv8i64_nxv8i32
801 ; RV32I: liveins: $v8
803 ; RV32I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s32>) = COPY $v8m4
804 ; RV32I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s32>)
805 ; RV32I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
806 ; RV32I-NEXT: PseudoRET implicit $v8m8
808 ; RV64I-LABEL: name: anyext_nxv8i64_nxv8i32
809 ; RV64I: liveins: $v8
811 ; RV64I-NEXT: [[COPY:%[0-9]+]]:vrb(<vscale x 8 x s32>) = COPY $v8m4
812 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:vrb(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s32>)
813 ; RV64I-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
814 ; RV64I-NEXT: PseudoRET implicit $v8m8
815 %0:_(<vscale x 8 x s32>) = COPY $v8m4
816 %1:_(<vscale x 8 x s64>) = G_ANYEXT %0(<vscale x 8 x s32>)
817 $v8m8 = COPY %1(<vscale x 8 x s64>)
818 PseudoRET implicit $v8m8