1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 5
2 # RUN: llc -mtriple=riscv64 -mattr=+m,+v -run-pass=regbankselect \
3 # RUN: -disable-gisel-legality-check -simplify-mir -verify-machineinstrs %s \
4 # RUN: -o - | FileCheck %s
7 name: splat_zero_nxv1i8
12 ; CHECK-LABEL: name: splat_zero_nxv1i8
13 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
14 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
15 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 1 x s8>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
16 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 1 x s8>)
17 ; CHECK-NEXT: PseudoRET implicit $v8
18 %3:_(s32) = G_CONSTANT i32 0
19 %2:_(s64) = G_ANYEXT %3(s32)
20 %0:_(<vscale x 1 x s8>) = G_SPLAT_VECTOR %2(s64)
21 $v8 = COPY %0(<vscale x 1 x s8>)
22 PseudoRET implicit $v8
26 name: splat_zero_nxv2i8
28 regBankSelected: false
31 ; CHECK-LABEL: name: splat_zero_nxv2i8
32 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
33 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
34 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 2 x s8>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
35 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 2 x s8>)
36 ; CHECK-NEXT: PseudoRET implicit $v8
37 %3:_(s32) = G_CONSTANT i32 0
38 %2:_(s64) = G_ANYEXT %3(s32)
39 %0:_(<vscale x 2 x s8>) = G_SPLAT_VECTOR %2(s64)
40 $v8 = COPY %0(<vscale x 2 x s8>)
41 PseudoRET implicit $v8
45 name: splat_zero_nxv4i8
47 regBankSelected: false
50 ; CHECK-LABEL: name: splat_zero_nxv4i8
51 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
52 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
53 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 4 x s8>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
54 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 4 x s8>)
55 ; CHECK-NEXT: PseudoRET implicit $v8
56 %3:_(s32) = G_CONSTANT i32 0
57 %2:_(s64) = G_ANYEXT %3(s32)
58 %0:_(<vscale x 4 x s8>) = G_SPLAT_VECTOR %2(s64)
59 $v8 = COPY %0(<vscale x 4 x s8>)
60 PseudoRET implicit $v8
64 name: splat_zero_nxv8i8
66 regBankSelected: false
69 ; CHECK-LABEL: name: splat_zero_nxv8i8
70 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
71 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
72 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 8 x s8>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
73 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 8 x s8>)
74 ; CHECK-NEXT: PseudoRET implicit $v8
75 %3:_(s32) = G_CONSTANT i32 0
76 %2:_(s64) = G_ANYEXT %3(s32)
77 %0:_(<vscale x 8 x s8>) = G_SPLAT_VECTOR %2(s64)
78 $v8 = COPY %0(<vscale x 8 x s8>)
79 PseudoRET implicit $v8
83 name: splat_zero_nxv16i8
85 regBankSelected: false
88 ; CHECK-LABEL: name: splat_zero_nxv16i8
89 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
90 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
91 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 16 x s8>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
92 ; CHECK-NEXT: $v8m2 = COPY [[SPLAT_VECTOR]](<vscale x 16 x s8>)
93 ; CHECK-NEXT: PseudoRET implicit $v8m2
94 %3:_(s32) = G_CONSTANT i32 0
95 %2:_(s64) = G_ANYEXT %3(s32)
96 %0:_(<vscale x 16 x s8>) = G_SPLAT_VECTOR %2(s64)
97 $v8m2 = COPY %0(<vscale x 16 x s8>)
98 PseudoRET implicit $v8m2
102 name: splat_zero_nxv32i8
104 regBankSelected: false
107 ; CHECK-LABEL: name: splat_zero_nxv32i8
108 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
109 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
110 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 32 x s8>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
111 ; CHECK-NEXT: $v8m4 = COPY [[SPLAT_VECTOR]](<vscale x 32 x s8>)
112 ; CHECK-NEXT: PseudoRET implicit $v8m4
113 %3:_(s32) = G_CONSTANT i32 0
114 %2:_(s64) = G_ANYEXT %3(s32)
115 %0:_(<vscale x 32 x s8>) = G_SPLAT_VECTOR %2(s64)
116 $v8m4 = COPY %0(<vscale x 32 x s8>)
117 PseudoRET implicit $v8m4
121 name: splat_zero_nxv64i8
123 regBankSelected: false
126 ; CHECK-LABEL: name: splat_zero_nxv64i8
127 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
128 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
129 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 64 x s8>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
130 ; CHECK-NEXT: $v8m8 = COPY [[SPLAT_VECTOR]](<vscale x 64 x s8>)
131 ; CHECK-NEXT: PseudoRET implicit $v8m8
132 %3:_(s32) = G_CONSTANT i32 0
133 %2:_(s64) = G_ANYEXT %3(s32)
134 %0:_(<vscale x 64 x s8>) = G_SPLAT_VECTOR %2(s64)
135 $v8m8 = COPY %0(<vscale x 64 x s8>)
136 PseudoRET implicit $v8m8
140 name: splat_zero_nxv1i16
142 regBankSelected: false
145 ; CHECK-LABEL: name: splat_zero_nxv1i16
146 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
147 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
148 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 1 x s16>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
149 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 1 x s16>)
150 ; CHECK-NEXT: PseudoRET implicit $v8
151 %3:_(s32) = G_CONSTANT i32 0
152 %2:_(s64) = G_ANYEXT %3(s32)
153 %0:_(<vscale x 1 x s16>) = G_SPLAT_VECTOR %2(s64)
154 $v8 = COPY %0(<vscale x 1 x s16>)
155 PseudoRET implicit $v8
159 name: splat_zero_nxv2i16
161 regBankSelected: false
164 ; CHECK-LABEL: name: splat_zero_nxv2i16
165 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
166 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
167 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 2 x s16>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
168 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 2 x s16>)
169 ; CHECK-NEXT: PseudoRET implicit $v8
170 %3:_(s32) = G_CONSTANT i32 0
171 %2:_(s64) = G_ANYEXT %3(s32)
172 %0:_(<vscale x 2 x s16>) = G_SPLAT_VECTOR %2(s64)
173 $v8 = COPY %0(<vscale x 2 x s16>)
174 PseudoRET implicit $v8
178 name: splat_zero_nxv4i16
180 regBankSelected: false
183 ; CHECK-LABEL: name: splat_zero_nxv4i16
184 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
185 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
186 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 4 x s16>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
187 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 4 x s16>)
188 ; CHECK-NEXT: PseudoRET implicit $v8
189 %3:_(s32) = G_CONSTANT i32 0
190 %2:_(s64) = G_ANYEXT %3(s32)
191 %0:_(<vscale x 4 x s16>) = G_SPLAT_VECTOR %2(s64)
192 $v8 = COPY %0(<vscale x 4 x s16>)
193 PseudoRET implicit $v8
197 name: splat_zero_nxv8i16
199 regBankSelected: false
202 ; CHECK-LABEL: name: splat_zero_nxv8i16
203 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
204 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
205 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 8 x s16>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
206 ; CHECK-NEXT: $v8m2 = COPY [[SPLAT_VECTOR]](<vscale x 8 x s16>)
207 ; CHECK-NEXT: PseudoRET implicit $v8m2
208 %3:_(s32) = G_CONSTANT i32 0
209 %2:_(s64) = G_ANYEXT %3(s32)
210 %0:_(<vscale x 8 x s16>) = G_SPLAT_VECTOR %2(s64)
211 $v8m2 = COPY %0(<vscale x 8 x s16>)
212 PseudoRET implicit $v8m2
216 name: splat_zero_nxv16i16
218 regBankSelected: false
221 ; CHECK-LABEL: name: splat_zero_nxv16i16
222 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
223 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
224 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 16 x s16>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
225 ; CHECK-NEXT: $v8m4 = COPY [[SPLAT_VECTOR]](<vscale x 16 x s16>)
226 ; CHECK-NEXT: PseudoRET implicit $v8m4
227 %3:_(s32) = G_CONSTANT i32 0
228 %2:_(s64) = G_ANYEXT %3(s32)
229 %0:_(<vscale x 16 x s16>) = G_SPLAT_VECTOR %2(s64)
230 $v8m4 = COPY %0(<vscale x 16 x s16>)
231 PseudoRET implicit $v8m4
235 name: splat_zero_nxv32i16
237 regBankSelected: false
240 ; CHECK-LABEL: name: splat_zero_nxv32i16
241 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
242 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
243 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 32 x s16>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
244 ; CHECK-NEXT: $v8m8 = COPY [[SPLAT_VECTOR]](<vscale x 32 x s16>)
245 ; CHECK-NEXT: PseudoRET implicit $v8m8
246 %3:_(s32) = G_CONSTANT i32 0
247 %2:_(s64) = G_ANYEXT %3(s32)
248 %0:_(<vscale x 32 x s16>) = G_SPLAT_VECTOR %2(s64)
249 $v8m8 = COPY %0(<vscale x 32 x s16>)
250 PseudoRET implicit $v8m8
254 name: splat_zero_nxv1i32
256 regBankSelected: false
259 ; CHECK-LABEL: name: splat_zero_nxv1i32
260 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
261 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
262 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 1 x s32>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
263 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 1 x s32>)
264 ; CHECK-NEXT: PseudoRET implicit $v8
265 %1:_(s32) = G_CONSTANT i32 0
266 %2:_(s64) = G_ANYEXT %1(s32)
267 %0:_(<vscale x 1 x s32>) = G_SPLAT_VECTOR %2(s64)
268 $v8 = COPY %0(<vscale x 1 x s32>)
269 PseudoRET implicit $v8
273 name: splat_zero_nxv2i32
275 regBankSelected: false
278 ; CHECK-LABEL: name: splat_zero_nxv2i32
279 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
280 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
281 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 2 x s32>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
282 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 2 x s32>)
283 ; CHECK-NEXT: PseudoRET implicit $v8
284 %1:_(s32) = G_CONSTANT i32 0
285 %2:_(s64) = G_ANYEXT %1(s32)
286 %0:_(<vscale x 2 x s32>) = G_SPLAT_VECTOR %2(s64)
287 $v8 = COPY %0(<vscale x 2 x s32>)
288 PseudoRET implicit $v8
292 name: splat_zero_nxv4i32
294 regBankSelected: false
297 ; CHECK-LABEL: name: splat_zero_nxv4i32
298 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
299 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
300 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 4 x s32>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
301 ; CHECK-NEXT: $v8m2 = COPY [[SPLAT_VECTOR]](<vscale x 4 x s32>)
302 ; CHECK-NEXT: PseudoRET implicit $v8m2
303 %1:_(s32) = G_CONSTANT i32 0
304 %2:_(s64) = G_ANYEXT %1(s32)
305 %0:_(<vscale x 4 x s32>) = G_SPLAT_VECTOR %2(s64)
306 $v8m2 = COPY %0(<vscale x 4 x s32>)
307 PseudoRET implicit $v8m2
311 name: splat_zero_nxv8i32
313 regBankSelected: false
316 ; CHECK-LABEL: name: splat_zero_nxv8i32
317 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
318 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
319 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 8 x s32>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
320 ; CHECK-NEXT: $v8m4 = COPY [[SPLAT_VECTOR]](<vscale x 8 x s32>)
321 ; CHECK-NEXT: PseudoRET implicit $v8m4
322 %1:_(s32) = G_CONSTANT i32 0
323 %2:_(s64) = G_ANYEXT %1(s32)
324 %0:_(<vscale x 8 x s32>) = G_SPLAT_VECTOR %2(s64)
325 $v8m4 = COPY %0(<vscale x 8 x s32>)
326 PseudoRET implicit $v8m4
330 name: splat_zero_nxv16i32
332 regBankSelected: false
335 ; CHECK-LABEL: name: splat_zero_nxv16i32
336 ; CHECK: [[C:%[0-9]+]]:gprb(s32) = G_CONSTANT i32 0
337 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[C]](s32)
338 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 16 x s32>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
339 ; CHECK-NEXT: $v8m8 = COPY [[SPLAT_VECTOR]](<vscale x 16 x s32>)
340 ; CHECK-NEXT: PseudoRET implicit $v8m8
341 %1:_(s32) = G_CONSTANT i32 0
342 %2:_(s64) = G_ANYEXT %1(s32)
343 %0:_(<vscale x 16 x s32>) = G_SPLAT_VECTOR %2(s64)
344 $v8m8 = COPY %0(<vscale x 16 x s32>)
345 PseudoRET implicit $v8m8
349 name: splat_zero_nxv1i64
351 regBankSelected: false
354 ; CHECK-LABEL: name: splat_zero_nxv1i64
355 ; CHECK: [[C:%[0-9]+]]:gprb(s64) = G_CONSTANT i64 0
356 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 1 x s64>) = G_SPLAT_VECTOR [[C]](s64)
357 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 1 x s64>)
358 ; CHECK-NEXT: PseudoRET implicit $v8
359 %1:_(s64) = G_CONSTANT i64 0
360 %0:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR %1(s64)
361 $v8 = COPY %0(<vscale x 1 x s64>)
362 PseudoRET implicit $v8
366 name: splat_zero_nxv2i64
368 regBankSelected: false
371 ; CHECK-LABEL: name: splat_zero_nxv2i64
372 ; CHECK: [[C:%[0-9]+]]:gprb(s64) = G_CONSTANT i64 0
373 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 2 x s64>) = G_SPLAT_VECTOR [[C]](s64)
374 ; CHECK-NEXT: $v8m2 = COPY [[SPLAT_VECTOR]](<vscale x 2 x s64>)
375 ; CHECK-NEXT: PseudoRET implicit $v8m2
376 %1:_(s64) = G_CONSTANT i64 0
377 %0:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR %1(s64)
378 $v8m2 = COPY %0(<vscale x 2 x s64>)
379 PseudoRET implicit $v8m2
383 name: splat_zero_nxv4i64
385 regBankSelected: false
388 ; CHECK-LABEL: name: splat_zero_nxv4i64
389 ; CHECK: [[C:%[0-9]+]]:gprb(s64) = G_CONSTANT i64 0
390 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 4 x s64>) = G_SPLAT_VECTOR [[C]](s64)
391 ; CHECK-NEXT: $v8m4 = COPY [[SPLAT_VECTOR]](<vscale x 4 x s64>)
392 ; CHECK-NEXT: PseudoRET implicit $v8m4
393 %1:_(s64) = G_CONSTANT i64 0
394 %0:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR %1(s64)
395 $v8m4 = COPY %0(<vscale x 4 x s64>)
396 PseudoRET implicit $v8m4
400 name: splat_zero_nxv8i64
402 regBankSelected: false
405 ; CHECK-LABEL: name: splat_zero_nxv8i64
406 ; CHECK: [[C:%[0-9]+]]:gprb(s64) = G_CONSTANT i64 0
407 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 8 x s64>) = G_SPLAT_VECTOR [[C]](s64)
408 ; CHECK-NEXT: $v8m8 = COPY [[SPLAT_VECTOR]](<vscale x 8 x s64>)
409 ; CHECK-NEXT: PseudoRET implicit $v8m8
410 %1:_(s64) = G_CONSTANT i64 0
411 %0:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR %1(s64)
412 $v8m8 = COPY %0(<vscale x 8 x s64>)
413 PseudoRET implicit $v8m8
417 name: splat_zero_nxv1f32
419 regBankSelected: false
422 ; CHECK-LABEL: name: splat_zero_nxv1f32
423 ; CHECK: [[C:%[0-9]+]]:fprb(s32) = G_FCONSTANT float 0.000000e+00
424 ; CHECK-NEXT: [[COPY:%[0-9]+]]:gprb(s32) = COPY [[C]](s32)
425 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[COPY]](s32)
426 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 1 x s32>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
427 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 1 x s32>)
428 ; CHECK-NEXT: PseudoRET implicit $v8
429 %1:_(s32) = G_FCONSTANT float 0.000000e+00
430 %2:_(s64) = G_ANYEXT %1(s32)
431 %0:_(<vscale x 1 x s32>) = G_SPLAT_VECTOR %2(s64)
432 $v8 = COPY %0(<vscale x 1 x s32>)
433 PseudoRET implicit $v8
437 name: splat_zero_nxv2f32
439 regBankSelected: false
442 ; CHECK-LABEL: name: splat_zero_nxv2f32
443 ; CHECK: [[C:%[0-9]+]]:fprb(s32) = G_FCONSTANT float 0.000000e+00
444 ; CHECK-NEXT: [[COPY:%[0-9]+]]:gprb(s32) = COPY [[C]](s32)
445 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[COPY]](s32)
446 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 2 x s32>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
447 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 2 x s32>)
448 ; CHECK-NEXT: PseudoRET implicit $v8
449 %1:_(s32) = G_FCONSTANT float 0.000000e+00
450 %2:_(s64) = G_ANYEXT %1(s32)
451 %0:_(<vscale x 2 x s32>) = G_SPLAT_VECTOR %2(s64)
452 $v8 = COPY %0(<vscale x 2 x s32>)
453 PseudoRET implicit $v8
457 name: splat_zero_nxv4f32
459 regBankSelected: false
462 ; CHECK-LABEL: name: splat_zero_nxv4f32
463 ; CHECK: [[C:%[0-9]+]]:fprb(s32) = G_FCONSTANT float 0.000000e+00
464 ; CHECK-NEXT: [[COPY:%[0-9]+]]:gprb(s32) = COPY [[C]](s32)
465 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[COPY]](s32)
466 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 4 x s32>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
467 ; CHECK-NEXT: $v8m2 = COPY [[SPLAT_VECTOR]](<vscale x 4 x s32>)
468 ; CHECK-NEXT: PseudoRET implicit $v8m2
469 %1:_(s32) = G_FCONSTANT float 0.000000e+00
470 %2:_(s64) = G_ANYEXT %1(s32)
471 %0:_(<vscale x 4 x s32>) = G_SPLAT_VECTOR %2(s64)
472 $v8m2 = COPY %0(<vscale x 4 x s32>)
473 PseudoRET implicit $v8m2
477 name: splat_zero_nxv8f32
479 regBankSelected: false
482 ; CHECK-LABEL: name: splat_zero_nxv8f32
483 ; CHECK: [[C:%[0-9]+]]:fprb(s32) = G_FCONSTANT float 0.000000e+00
484 ; CHECK-NEXT: [[COPY:%[0-9]+]]:gprb(s32) = COPY [[C]](s32)
485 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[COPY]](s32)
486 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 8 x s32>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
487 ; CHECK-NEXT: $v8m4 = COPY [[SPLAT_VECTOR]](<vscale x 8 x s32>)
488 ; CHECK-NEXT: PseudoRET implicit $v8m4
489 %1:_(s32) = G_FCONSTANT float 0.000000e+00
490 %2:_(s64) = G_ANYEXT %1(s32)
491 %0:_(<vscale x 8 x s32>) = G_SPLAT_VECTOR %2(s64)
492 $v8m4 = COPY %0(<vscale x 8 x s32>)
493 PseudoRET implicit $v8m4
497 name: splat_zero_nxv16f32
499 regBankSelected: false
502 ; CHECK-LABEL: name: splat_zero_nxv16f32
503 ; CHECK: [[C:%[0-9]+]]:fprb(s32) = G_FCONSTANT float 0.000000e+00
504 ; CHECK-NEXT: [[COPY:%[0-9]+]]:gprb(s32) = COPY [[C]](s32)
505 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:gprb(s64) = G_ANYEXT [[COPY]](s32)
506 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 16 x s32>) = G_SPLAT_VECTOR [[ANYEXT]](s64)
507 ; CHECK-NEXT: $v8m8 = COPY [[SPLAT_VECTOR]](<vscale x 16 x s32>)
508 ; CHECK-NEXT: PseudoRET implicit $v8m8
509 %1:_(s32) = G_FCONSTANT float 0.000000e+00
510 %2:_(s64) = G_ANYEXT %1(s32)
511 %0:_(<vscale x 16 x s32>) = G_SPLAT_VECTOR %2(s64)
512 $v8m8 = COPY %0(<vscale x 16 x s32>)
513 PseudoRET implicit $v8m8
517 name: splat_zero_nxv1f64
519 regBankSelected: false
522 ; CHECK-LABEL: name: splat_zero_nxv1f64
523 ; CHECK: [[C:%[0-9]+]]:fprb(s64) = G_FCONSTANT double 0.000000e+00
524 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 1 x s64>) = G_SPLAT_VECTOR [[C]](s64)
525 ; CHECK-NEXT: $v8 = COPY [[SPLAT_VECTOR]](<vscale x 1 x s64>)
526 ; CHECK-NEXT: PseudoRET implicit $v8
527 %1:_(s64) = G_FCONSTANT double 0.000000e+00
528 %0:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR %1(s64)
529 $v8 = COPY %0(<vscale x 1 x s64>)
530 PseudoRET implicit $v8
534 name: splat_zero_nxv2f64
536 regBankSelected: false
539 ; CHECK-LABEL: name: splat_zero_nxv2f64
540 ; CHECK: [[C:%[0-9]+]]:fprb(s64) = G_FCONSTANT double 0.000000e+00
541 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 2 x s64>) = G_SPLAT_VECTOR [[C]](s64)
542 ; CHECK-NEXT: $v8m2 = COPY [[SPLAT_VECTOR]](<vscale x 2 x s64>)
543 ; CHECK-NEXT: PseudoRET implicit $v8m2
544 %1:_(s64) = G_FCONSTANT double 0.000000e+00
545 %0:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR %1(s64)
546 $v8m2 = COPY %0(<vscale x 2 x s64>)
547 PseudoRET implicit $v8m2
551 name: splat_zero_nxv4f64
553 regBankSelected: false
556 ; CHECK-LABEL: name: splat_zero_nxv4f64
557 ; CHECK: [[C:%[0-9]+]]:fprb(s64) = G_FCONSTANT double 0.000000e+00
558 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 4 x s64>) = G_SPLAT_VECTOR [[C]](s64)
559 ; CHECK-NEXT: $v8m4 = COPY [[SPLAT_VECTOR]](<vscale x 4 x s64>)
560 ; CHECK-NEXT: PseudoRET implicit $v8m4
561 %1:_(s64) = G_FCONSTANT double 0.000000e+00
562 %0:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR %1(s64)
563 $v8m4 = COPY %0(<vscale x 4 x s64>)
564 PseudoRET implicit $v8m4
568 name: splat_zero_nxv8f64
570 regBankSelected: false
573 ; CHECK-LABEL: name: splat_zero_nxv8f64
574 ; CHECK: [[C:%[0-9]+]]:fprb(s64) = G_FCONSTANT double 0.000000e+00
575 ; CHECK-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:vrb(<vscale x 8 x s64>) = G_SPLAT_VECTOR [[C]](s64)
576 ; CHECK-NEXT: $v8m8 = COPY [[SPLAT_VECTOR]](<vscale x 8 x s64>)
577 ; CHECK-NEXT: PseudoRET implicit $v8m8
578 %1:_(s64) = G_FCONSTANT double 0.000000e+00
579 %0:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR %1(s64)
580 $v8m8 = COPY %0(<vscale x 8 x s64>)
581 PseudoRET implicit $v8m8