1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=riscv32 -mattr=+v -run-pass=legalizer %s -o - | FileCheck --check-prefix=RV32 %s
3 # RUN: llc -mtriple=riscv64 -mattr=+v -run-pass=legalizer %s -o - | FileCheck --check-prefix=RV64 %s
5 # Extend from s1 element vectors
7 name: anyext_nxv1i8_nxv1i1
9 tracksRegLiveness: true
13 ; RV32-LABEL: name: anyext_nxv1i8_nxv1i1
16 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s1>) = COPY $v0
17 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
18 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 1 x s8>) = G_SPLAT_VECTOR [[C]](s32)
19 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
20 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 1 x s8>) = G_SPLAT_VECTOR [[C1]](s32)
21 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 1 x s8>) = G_SELECT [[COPY]](<vscale x 1 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
22 ; RV32-NEXT: $v8 = COPY [[SELECT]](<vscale x 1 x s8>)
23 ; RV32-NEXT: PseudoRET implicit $v8
25 ; RV64-LABEL: name: anyext_nxv1i8_nxv1i1
28 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s1>) = COPY $v0
29 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
30 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 1 x s8>) = G_SPLAT_VECTOR [[C]](s64)
31 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
32 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 1 x s8>) = G_SPLAT_VECTOR [[C1]](s64)
33 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 1 x s8>) = G_SELECT [[COPY]](<vscale x 1 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
34 ; RV64-NEXT: $v8 = COPY [[SELECT]](<vscale x 1 x s8>)
35 ; RV64-NEXT: PseudoRET implicit $v8
36 %1:_(<vscale x 1 x s1>) = COPY $v0
37 %0:_(<vscale x 1 x s8>) = G_ANYEXT %1(<vscale x 1 x s1>)
38 $v8 = COPY %0(<vscale x 1 x s8>)
39 PseudoRET implicit $v8
42 name: anyext_nxv1i16_nxv1i1
44 tracksRegLiveness: true
48 ; RV32-LABEL: name: anyext_nxv1i16_nxv1i1
51 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s1>) = COPY $v0
52 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
53 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 1 x s16>) = G_SPLAT_VECTOR [[C]](s32)
54 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
55 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 1 x s16>) = G_SPLAT_VECTOR [[C1]](s32)
56 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 1 x s16>) = G_SELECT [[COPY]](<vscale x 1 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
57 ; RV32-NEXT: $v8 = COPY [[SELECT]](<vscale x 1 x s16>)
58 ; RV32-NEXT: PseudoRET implicit $v8
60 ; RV64-LABEL: name: anyext_nxv1i16_nxv1i1
63 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s1>) = COPY $v0
64 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
65 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 1 x s16>) = G_SPLAT_VECTOR [[C]](s64)
66 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
67 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 1 x s16>) = G_SPLAT_VECTOR [[C1]](s64)
68 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 1 x s16>) = G_SELECT [[COPY]](<vscale x 1 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
69 ; RV64-NEXT: $v8 = COPY [[SELECT]](<vscale x 1 x s16>)
70 ; RV64-NEXT: PseudoRET implicit $v8
71 %1:_(<vscale x 1 x s1>) = COPY $v0
72 %0:_(<vscale x 1 x s16>) = G_ANYEXT %1(<vscale x 1 x s1>)
73 $v8 = COPY %0(<vscale x 1 x s16>)
74 PseudoRET implicit $v8
77 name: anyext_nxv1i32_nxv1i1
79 tracksRegLiveness: true
83 ; RV32-LABEL: name: anyext_nxv1i32_nxv1i1
86 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s1>) = COPY $v0
87 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
88 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 1 x s32>) = G_SPLAT_VECTOR [[C]](s32)
89 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
90 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 1 x s32>) = G_SPLAT_VECTOR [[C1]](s32)
91 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 1 x s32>) = G_SELECT [[COPY]](<vscale x 1 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
92 ; RV32-NEXT: $v8 = COPY [[SELECT]](<vscale x 1 x s32>)
93 ; RV32-NEXT: PseudoRET implicit $v8
95 ; RV64-LABEL: name: anyext_nxv1i32_nxv1i1
98 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s1>) = COPY $v0
99 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
100 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 1 x s32>) = G_SPLAT_VECTOR [[C]](s64)
101 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
102 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 1 x s32>) = G_SPLAT_VECTOR [[C1]](s64)
103 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 1 x s32>) = G_SELECT [[COPY]](<vscale x 1 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
104 ; RV64-NEXT: $v8 = COPY [[SELECT]](<vscale x 1 x s32>)
105 ; RV64-NEXT: PseudoRET implicit $v8
106 %1:_(<vscale x 1 x s1>) = COPY $v0
107 %0:_(<vscale x 1 x s32>) = G_ANYEXT %1(<vscale x 1 x s1>)
108 $v8 = COPY %0(<vscale x 1 x s32>)
109 PseudoRET implicit $v8
112 name: anyext_nxv1i64_nxv1i1
114 tracksRegLiveness: true
118 ; RV32-LABEL: name: anyext_nxv1i64_nxv1i1
121 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s1>) = COPY $v0
122 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
123 ; RV32-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[C]](s32), [[C]](s32)
124 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
125 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
126 ; RV32-NEXT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[C1]](s32), [[C]](s32)
127 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR [[MV1]](s64)
128 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 1 x s64>) = G_SELECT [[COPY]](<vscale x 1 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
129 ; RV32-NEXT: $v8 = COPY [[SELECT]](<vscale x 1 x s64>)
130 ; RV32-NEXT: PseudoRET implicit $v8
132 ; RV64-LABEL: name: anyext_nxv1i64_nxv1i1
135 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s1>) = COPY $v0
136 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
137 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR [[C]](s64)
138 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
139 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 1 x s64>) = G_SPLAT_VECTOR [[C1]](s64)
140 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 1 x s64>) = G_SELECT [[COPY]](<vscale x 1 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
141 ; RV64-NEXT: $v8 = COPY [[SELECT]](<vscale x 1 x s64>)
142 ; RV64-NEXT: PseudoRET implicit $v8
143 %1:_(<vscale x 1 x s1>) = COPY $v0
144 %0:_(<vscale x 1 x s64>) = G_ANYEXT %1(<vscale x 1 x s1>)
145 $v8 = COPY %0(<vscale x 1 x s64>)
146 PseudoRET implicit $v8
149 name: anyext_nxv2i8_nxv2i1
151 tracksRegLiveness: true
155 ; RV32-LABEL: name: anyext_nxv2i8_nxv2i1
158 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s1>) = COPY $v0
159 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
160 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 2 x s8>) = G_SPLAT_VECTOR [[C]](s32)
161 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
162 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 2 x s8>) = G_SPLAT_VECTOR [[C1]](s32)
163 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 2 x s8>) = G_SELECT [[COPY]](<vscale x 2 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
164 ; RV32-NEXT: $v8 = COPY [[SELECT]](<vscale x 2 x s8>)
165 ; RV32-NEXT: PseudoRET implicit $v8
167 ; RV64-LABEL: name: anyext_nxv2i8_nxv2i1
170 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s1>) = COPY $v0
171 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
172 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 2 x s8>) = G_SPLAT_VECTOR [[C]](s64)
173 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
174 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 2 x s8>) = G_SPLAT_VECTOR [[C1]](s64)
175 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 2 x s8>) = G_SELECT [[COPY]](<vscale x 2 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
176 ; RV64-NEXT: $v8 = COPY [[SELECT]](<vscale x 2 x s8>)
177 ; RV64-NEXT: PseudoRET implicit $v8
178 %1:_(<vscale x 2 x s1>) = COPY $v0
179 %0:_(<vscale x 2 x s8>) = G_ANYEXT %1(<vscale x 2 x s1>)
180 $v8 = COPY %0(<vscale x 2 x s8>)
181 PseudoRET implicit $v8
184 name: anyext_nxv2i16_nxv2i1
186 tracksRegLiveness: true
190 ; RV32-LABEL: name: anyext_nxv2i16_nxv2i1
193 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s1>) = COPY $v0
194 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
195 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 2 x s16>) = G_SPLAT_VECTOR [[C]](s32)
196 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
197 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 2 x s16>) = G_SPLAT_VECTOR [[C1]](s32)
198 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 2 x s16>) = G_SELECT [[COPY]](<vscale x 2 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
199 ; RV32-NEXT: $v8 = COPY [[SELECT]](<vscale x 2 x s16>)
200 ; RV32-NEXT: PseudoRET implicit $v8
202 ; RV64-LABEL: name: anyext_nxv2i16_nxv2i1
205 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s1>) = COPY $v0
206 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
207 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 2 x s16>) = G_SPLAT_VECTOR [[C]](s64)
208 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
209 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 2 x s16>) = G_SPLAT_VECTOR [[C1]](s64)
210 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 2 x s16>) = G_SELECT [[COPY]](<vscale x 2 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
211 ; RV64-NEXT: $v8 = COPY [[SELECT]](<vscale x 2 x s16>)
212 ; RV64-NEXT: PseudoRET implicit $v8
213 %1:_(<vscale x 2 x s1>) = COPY $v0
214 %0:_(<vscale x 2 x s16>) = G_ANYEXT %1(<vscale x 2 x s1>)
215 $v8 = COPY %0(<vscale x 2 x s16>)
216 PseudoRET implicit $v8
219 name: anyext_nxv2i32_nxv2i1
221 tracksRegLiveness: true
225 ; RV32-LABEL: name: anyext_nxv2i32_nxv2i1
228 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s1>) = COPY $v0
229 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
230 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 2 x s32>) = G_SPLAT_VECTOR [[C]](s32)
231 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
232 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 2 x s32>) = G_SPLAT_VECTOR [[C1]](s32)
233 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 2 x s32>) = G_SELECT [[COPY]](<vscale x 2 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
234 ; RV32-NEXT: $v8 = COPY [[SELECT]](<vscale x 2 x s32>)
235 ; RV32-NEXT: PseudoRET implicit $v8
237 ; RV64-LABEL: name: anyext_nxv2i32_nxv2i1
240 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s1>) = COPY $v0
241 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
242 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 2 x s32>) = G_SPLAT_VECTOR [[C]](s64)
243 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
244 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 2 x s32>) = G_SPLAT_VECTOR [[C1]](s64)
245 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 2 x s32>) = G_SELECT [[COPY]](<vscale x 2 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
246 ; RV64-NEXT: $v8 = COPY [[SELECT]](<vscale x 2 x s32>)
247 ; RV64-NEXT: PseudoRET implicit $v8
248 %1:_(<vscale x 2 x s1>) = COPY $v0
249 %0:_(<vscale x 2 x s32>) = G_ANYEXT %1(<vscale x 2 x s1>)
250 $v8 = COPY %0(<vscale x 2 x s32>)
251 PseudoRET implicit $v8
254 name: anyext_nxv2i64_nxv2i1
256 tracksRegLiveness: true
260 ; RV32-LABEL: name: anyext_nxv2i64_nxv2i1
263 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s1>) = COPY $v0
264 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
265 ; RV32-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[C]](s32), [[C]](s32)
266 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
267 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
268 ; RV32-NEXT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[C1]](s32), [[C]](s32)
269 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR [[MV1]](s64)
270 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 2 x s64>) = G_SELECT [[COPY]](<vscale x 2 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
271 ; RV32-NEXT: $v8m2 = COPY [[SELECT]](<vscale x 2 x s64>)
272 ; RV32-NEXT: PseudoRET implicit $v8m2
274 ; RV64-LABEL: name: anyext_nxv2i64_nxv2i1
277 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s1>) = COPY $v0
278 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
279 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR [[C]](s64)
280 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
281 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 2 x s64>) = G_SPLAT_VECTOR [[C1]](s64)
282 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 2 x s64>) = G_SELECT [[COPY]](<vscale x 2 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
283 ; RV64-NEXT: $v8m2 = COPY [[SELECT]](<vscale x 2 x s64>)
284 ; RV64-NEXT: PseudoRET implicit $v8m2
285 %1:_(<vscale x 2 x s1>) = COPY $v0
286 %0:_(<vscale x 2 x s64>) = G_ANYEXT %1(<vscale x 2 x s1>)
287 $v8m2 = COPY %0(<vscale x 2 x s64>)
288 PseudoRET implicit $v8m2
291 name: anyext_nxv4i8_nxv4i1
293 tracksRegLiveness: true
297 ; RV32-LABEL: name: anyext_nxv4i8_nxv4i1
300 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s1>) = COPY $v0
301 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
302 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 4 x s8>) = G_SPLAT_VECTOR [[C]](s32)
303 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
304 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 4 x s8>) = G_SPLAT_VECTOR [[C1]](s32)
305 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 4 x s8>) = G_SELECT [[COPY]](<vscale x 4 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
306 ; RV32-NEXT: $v8 = COPY [[SELECT]](<vscale x 4 x s8>)
307 ; RV32-NEXT: PseudoRET implicit $v8
309 ; RV64-LABEL: name: anyext_nxv4i8_nxv4i1
312 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s1>) = COPY $v0
313 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
314 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 4 x s8>) = G_SPLAT_VECTOR [[C]](s64)
315 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
316 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 4 x s8>) = G_SPLAT_VECTOR [[C1]](s64)
317 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 4 x s8>) = G_SELECT [[COPY]](<vscale x 4 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
318 ; RV64-NEXT: $v8 = COPY [[SELECT]](<vscale x 4 x s8>)
319 ; RV64-NEXT: PseudoRET implicit $v8
320 %1:_(<vscale x 4 x s1>) = COPY $v0
321 %0:_(<vscale x 4 x s8>) = G_ANYEXT %1(<vscale x 4 x s1>)
322 $v8 = COPY %0(<vscale x 4 x s8>)
323 PseudoRET implicit $v8
326 name: anyext_nxv4i16_nxv4i1
328 tracksRegLiveness: true
332 ; RV32-LABEL: name: anyext_nxv4i16_nxv4i1
335 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s1>) = COPY $v0
336 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
337 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 4 x s16>) = G_SPLAT_VECTOR [[C]](s32)
338 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
339 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 4 x s16>) = G_SPLAT_VECTOR [[C1]](s32)
340 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 4 x s16>) = G_SELECT [[COPY]](<vscale x 4 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
341 ; RV32-NEXT: $v8 = COPY [[SELECT]](<vscale x 4 x s16>)
342 ; RV32-NEXT: PseudoRET implicit $v8
344 ; RV64-LABEL: name: anyext_nxv4i16_nxv4i1
347 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s1>) = COPY $v0
348 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
349 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 4 x s16>) = G_SPLAT_VECTOR [[C]](s64)
350 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
351 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 4 x s16>) = G_SPLAT_VECTOR [[C1]](s64)
352 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 4 x s16>) = G_SELECT [[COPY]](<vscale x 4 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
353 ; RV64-NEXT: $v8 = COPY [[SELECT]](<vscale x 4 x s16>)
354 ; RV64-NEXT: PseudoRET implicit $v8
355 %1:_(<vscale x 4 x s1>) = COPY $v0
356 %0:_(<vscale x 4 x s16>) = G_ANYEXT %1(<vscale x 4 x s1>)
357 $v8 = COPY %0(<vscale x 4 x s16>)
358 PseudoRET implicit $v8
361 name: anyext_nxv4i32_nxv4i1
363 tracksRegLiveness: true
367 ; RV32-LABEL: name: anyext_nxv4i32_nxv4i1
370 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s1>) = COPY $v0
371 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
372 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 4 x s32>) = G_SPLAT_VECTOR [[C]](s32)
373 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
374 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 4 x s32>) = G_SPLAT_VECTOR [[C1]](s32)
375 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 4 x s32>) = G_SELECT [[COPY]](<vscale x 4 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
376 ; RV32-NEXT: $v8m2 = COPY [[SELECT]](<vscale x 4 x s32>)
377 ; RV32-NEXT: PseudoRET implicit $v8m2
379 ; RV64-LABEL: name: anyext_nxv4i32_nxv4i1
382 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s1>) = COPY $v0
383 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
384 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 4 x s32>) = G_SPLAT_VECTOR [[C]](s64)
385 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
386 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 4 x s32>) = G_SPLAT_VECTOR [[C1]](s64)
387 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 4 x s32>) = G_SELECT [[COPY]](<vscale x 4 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
388 ; RV64-NEXT: $v8m2 = COPY [[SELECT]](<vscale x 4 x s32>)
389 ; RV64-NEXT: PseudoRET implicit $v8m2
390 %1:_(<vscale x 4 x s1>) = COPY $v0
391 %0:_(<vscale x 4 x s32>) = G_ANYEXT %1(<vscale x 4 x s1>)
392 $v8m2 = COPY %0(<vscale x 4 x s32>)
393 PseudoRET implicit $v8m2
396 name: anyext_nxv4i64_nxv4i1
398 tracksRegLiveness: true
402 ; RV32-LABEL: name: anyext_nxv4i64_nxv4i1
405 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s1>) = COPY $v0
406 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
407 ; RV32-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[C]](s32), [[C]](s32)
408 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
409 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
410 ; RV32-NEXT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[C1]](s32), [[C]](s32)
411 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR [[MV1]](s64)
412 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 4 x s64>) = G_SELECT [[COPY]](<vscale x 4 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
413 ; RV32-NEXT: $v8m4 = COPY [[SELECT]](<vscale x 4 x s64>)
414 ; RV32-NEXT: PseudoRET implicit $v8m4
416 ; RV64-LABEL: name: anyext_nxv4i64_nxv4i1
419 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s1>) = COPY $v0
420 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
421 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR [[C]](s64)
422 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
423 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 4 x s64>) = G_SPLAT_VECTOR [[C1]](s64)
424 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 4 x s64>) = G_SELECT [[COPY]](<vscale x 4 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
425 ; RV64-NEXT: $v8m4 = COPY [[SELECT]](<vscale x 4 x s64>)
426 ; RV64-NEXT: PseudoRET implicit $v8m4
427 %1:_(<vscale x 4 x s1>) = COPY $v0
428 %0:_(<vscale x 4 x s64>) = G_ANYEXT %1(<vscale x 4 x s1>)
429 $v8m4 = COPY %0(<vscale x 4 x s64>)
430 PseudoRET implicit $v8m4
433 name: anyext_nxv8i8_nxv8i1
435 tracksRegLiveness: true
439 ; RV32-LABEL: name: anyext_nxv8i8_nxv8i1
442 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s1>) = COPY $v0
443 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
444 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 8 x s8>) = G_SPLAT_VECTOR [[C]](s32)
445 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
446 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 8 x s8>) = G_SPLAT_VECTOR [[C1]](s32)
447 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 8 x s8>) = G_SELECT [[COPY]](<vscale x 8 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
448 ; RV32-NEXT: $v8 = COPY [[SELECT]](<vscale x 8 x s8>)
449 ; RV32-NEXT: PseudoRET implicit $v8
451 ; RV64-LABEL: name: anyext_nxv8i8_nxv8i1
454 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s1>) = COPY $v0
455 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
456 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 8 x s8>) = G_SPLAT_VECTOR [[C]](s64)
457 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
458 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 8 x s8>) = G_SPLAT_VECTOR [[C1]](s64)
459 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 8 x s8>) = G_SELECT [[COPY]](<vscale x 8 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
460 ; RV64-NEXT: $v8 = COPY [[SELECT]](<vscale x 8 x s8>)
461 ; RV64-NEXT: PseudoRET implicit $v8
462 %1:_(<vscale x 8 x s1>) = COPY $v0
463 %0:_(<vscale x 8 x s8>) = G_ANYEXT %1(<vscale x 8 x s1>)
464 $v8 = COPY %0(<vscale x 8 x s8>)
465 PseudoRET implicit $v8
468 name: anyext_nxv8i16_nxv8i1
470 tracksRegLiveness: true
474 ; RV32-LABEL: name: anyext_nxv8i16_nxv8i1
477 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s1>) = COPY $v0
478 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
479 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 8 x s16>) = G_SPLAT_VECTOR [[C]](s32)
480 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
481 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 8 x s16>) = G_SPLAT_VECTOR [[C1]](s32)
482 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 8 x s16>) = G_SELECT [[COPY]](<vscale x 8 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
483 ; RV32-NEXT: $v8m2 = COPY [[SELECT]](<vscale x 8 x s16>)
484 ; RV32-NEXT: PseudoRET implicit $v8m2
486 ; RV64-LABEL: name: anyext_nxv8i16_nxv8i1
489 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s1>) = COPY $v0
490 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
491 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 8 x s16>) = G_SPLAT_VECTOR [[C]](s64)
492 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
493 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 8 x s16>) = G_SPLAT_VECTOR [[C1]](s64)
494 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 8 x s16>) = G_SELECT [[COPY]](<vscale x 8 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
495 ; RV64-NEXT: $v8m2 = COPY [[SELECT]](<vscale x 8 x s16>)
496 ; RV64-NEXT: PseudoRET implicit $v8m2
497 %1:_(<vscale x 8 x s1>) = COPY $v0
498 %0:_(<vscale x 8 x s16>) = G_ANYEXT %1(<vscale x 8 x s1>)
499 $v8m2 = COPY %0(<vscale x 8 x s16>)
500 PseudoRET implicit $v8m2
503 name: anyext_nxv8i32_nxv8i1
505 tracksRegLiveness: true
509 ; RV32-LABEL: name: anyext_nxv8i32_nxv8i1
512 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s1>) = COPY $v0
513 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
514 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 8 x s32>) = G_SPLAT_VECTOR [[C]](s32)
515 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
516 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 8 x s32>) = G_SPLAT_VECTOR [[C1]](s32)
517 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 8 x s32>) = G_SELECT [[COPY]](<vscale x 8 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
518 ; RV32-NEXT: $v8m4 = COPY [[SELECT]](<vscale x 8 x s32>)
519 ; RV32-NEXT: PseudoRET implicit $v8m4
521 ; RV64-LABEL: name: anyext_nxv8i32_nxv8i1
524 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s1>) = COPY $v0
525 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
526 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 8 x s32>) = G_SPLAT_VECTOR [[C]](s64)
527 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
528 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 8 x s32>) = G_SPLAT_VECTOR [[C1]](s64)
529 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 8 x s32>) = G_SELECT [[COPY]](<vscale x 8 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
530 ; RV64-NEXT: $v8m4 = COPY [[SELECT]](<vscale x 8 x s32>)
531 ; RV64-NEXT: PseudoRET implicit $v8m4
532 %1:_(<vscale x 8 x s1>) = COPY $v0
533 %0:_(<vscale x 8 x s32>) = G_ANYEXT %1(<vscale x 8 x s1>)
534 $v8m4 = COPY %0(<vscale x 8 x s32>)
535 PseudoRET implicit $v8m4
538 name: anyext_nxv8i64_nxv8i1
540 tracksRegLiveness: true
544 ; RV32-LABEL: name: anyext_nxv8i64_nxv8i1
547 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s1>) = COPY $v0
548 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
549 ; RV32-NEXT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[C]](s32), [[C]](s32)
550 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR [[MV]](s64)
551 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
552 ; RV32-NEXT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[C1]](s32), [[C]](s32)
553 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR [[MV1]](s64)
554 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 8 x s64>) = G_SELECT [[COPY]](<vscale x 8 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
555 ; RV32-NEXT: $v8m8 = COPY [[SELECT]](<vscale x 8 x s64>)
556 ; RV32-NEXT: PseudoRET implicit $v8m8
558 ; RV64-LABEL: name: anyext_nxv8i64_nxv8i1
561 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s1>) = COPY $v0
562 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
563 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR [[C]](s64)
564 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
565 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 8 x s64>) = G_SPLAT_VECTOR [[C1]](s64)
566 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 8 x s64>) = G_SELECT [[COPY]](<vscale x 8 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
567 ; RV64-NEXT: $v8m8 = COPY [[SELECT]](<vscale x 8 x s64>)
568 ; RV64-NEXT: PseudoRET implicit $v8m8
569 %1:_(<vscale x 8 x s1>) = COPY $v0
570 %0:_(<vscale x 8 x s64>) = G_ANYEXT %1(<vscale x 8 x s1>)
571 $v8m8 = COPY %0(<vscale x 8 x s64>)
572 PseudoRET implicit $v8m8
575 name: anyext_nxv16i8_nxv16i1
577 tracksRegLiveness: true
581 ; RV32-LABEL: name: anyext_nxv16i8_nxv16i1
584 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s1>) = COPY $v0
585 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
586 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 16 x s8>) = G_SPLAT_VECTOR [[C]](s32)
587 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
588 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 16 x s8>) = G_SPLAT_VECTOR [[C1]](s32)
589 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 16 x s8>) = G_SELECT [[COPY]](<vscale x 16 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
590 ; RV32-NEXT: $v8m2 = COPY [[SELECT]](<vscale x 16 x s8>)
591 ; RV32-NEXT: PseudoRET implicit $v8m2
593 ; RV64-LABEL: name: anyext_nxv16i8_nxv16i1
596 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s1>) = COPY $v0
597 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
598 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 16 x s8>) = G_SPLAT_VECTOR [[C]](s64)
599 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
600 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 16 x s8>) = G_SPLAT_VECTOR [[C1]](s64)
601 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 16 x s8>) = G_SELECT [[COPY]](<vscale x 16 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
602 ; RV64-NEXT: $v8m2 = COPY [[SELECT]](<vscale x 16 x s8>)
603 ; RV64-NEXT: PseudoRET implicit $v8m2
604 %1:_(<vscale x 16 x s1>) = COPY $v0
605 %0:_(<vscale x 16 x s8>) = G_ANYEXT %1(<vscale x 16 x s1>)
606 $v8m2 = COPY %0(<vscale x 16 x s8>)
607 PseudoRET implicit $v8m2
610 name: anyext_nxv16i16_nxv16i1
612 tracksRegLiveness: true
616 ; RV32-LABEL: name: anyext_nxv16i16_nxv16i1
619 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s1>) = COPY $v0
620 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
621 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 16 x s16>) = G_SPLAT_VECTOR [[C]](s32)
622 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
623 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 16 x s16>) = G_SPLAT_VECTOR [[C1]](s32)
624 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 16 x s16>) = G_SELECT [[COPY]](<vscale x 16 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
625 ; RV32-NEXT: $v8m4 = COPY [[SELECT]](<vscale x 16 x s16>)
626 ; RV32-NEXT: PseudoRET implicit $v8m4
628 ; RV64-LABEL: name: anyext_nxv16i16_nxv16i1
631 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s1>) = COPY $v0
632 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
633 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 16 x s16>) = G_SPLAT_VECTOR [[C]](s64)
634 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
635 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 16 x s16>) = G_SPLAT_VECTOR [[C1]](s64)
636 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 16 x s16>) = G_SELECT [[COPY]](<vscale x 16 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
637 ; RV64-NEXT: $v8m4 = COPY [[SELECT]](<vscale x 16 x s16>)
638 ; RV64-NEXT: PseudoRET implicit $v8m4
639 %1:_(<vscale x 16 x s1>) = COPY $v0
640 %0:_(<vscale x 16 x s16>) = G_ANYEXT %1(<vscale x 16 x s1>)
641 $v8m4 = COPY %0(<vscale x 16 x s16>)
642 PseudoRET implicit $v8m4
645 name: anyext_nxv16i32_nxv16i1
647 tracksRegLiveness: true
651 ; RV32-LABEL: name: anyext_nxv16i32_nxv16i1
654 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s1>) = COPY $v0
655 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
656 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 16 x s32>) = G_SPLAT_VECTOR [[C]](s32)
657 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
658 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 16 x s32>) = G_SPLAT_VECTOR [[C1]](s32)
659 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 16 x s32>) = G_SELECT [[COPY]](<vscale x 16 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
660 ; RV32-NEXT: $v8m8 = COPY [[SELECT]](<vscale x 16 x s32>)
661 ; RV32-NEXT: PseudoRET implicit $v8m8
663 ; RV64-LABEL: name: anyext_nxv16i32_nxv16i1
666 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s1>) = COPY $v0
667 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
668 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 16 x s32>) = G_SPLAT_VECTOR [[C]](s64)
669 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
670 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 16 x s32>) = G_SPLAT_VECTOR [[C1]](s64)
671 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 16 x s32>) = G_SELECT [[COPY]](<vscale x 16 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
672 ; RV64-NEXT: $v8m8 = COPY [[SELECT]](<vscale x 16 x s32>)
673 ; RV64-NEXT: PseudoRET implicit $v8m8
674 %1:_(<vscale x 16 x s1>) = COPY $v0
675 %0:_(<vscale x 16 x s32>) = G_ANYEXT %1(<vscale x 16 x s1>)
676 $v8m8 = COPY %0(<vscale x 16 x s32>)
677 PseudoRET implicit $v8m8
680 name: anyext_nxv32i8_nxv32i1
682 tracksRegLiveness: true
686 ; RV32-LABEL: name: anyext_nxv32i8_nxv32i1
689 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 32 x s1>) = COPY $v0
690 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
691 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 32 x s8>) = G_SPLAT_VECTOR [[C]](s32)
692 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
693 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 32 x s8>) = G_SPLAT_VECTOR [[C1]](s32)
694 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 32 x s8>) = G_SELECT [[COPY]](<vscale x 32 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
695 ; RV32-NEXT: $v8m4 = COPY [[SELECT]](<vscale x 32 x s8>)
696 ; RV32-NEXT: PseudoRET implicit $v8m4
698 ; RV64-LABEL: name: anyext_nxv32i8_nxv32i1
701 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 32 x s1>) = COPY $v0
702 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
703 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 32 x s8>) = G_SPLAT_VECTOR [[C]](s64)
704 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
705 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 32 x s8>) = G_SPLAT_VECTOR [[C1]](s64)
706 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 32 x s8>) = G_SELECT [[COPY]](<vscale x 32 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
707 ; RV64-NEXT: $v8m4 = COPY [[SELECT]](<vscale x 32 x s8>)
708 ; RV64-NEXT: PseudoRET implicit $v8m4
709 %1:_(<vscale x 32 x s1>) = COPY $v0
710 %0:_(<vscale x 32 x s8>) = G_ANYEXT %1(<vscale x 32 x s1>)
711 $v8m4 = COPY %0(<vscale x 32 x s8>)
712 PseudoRET implicit $v8m4
715 name: anyext_nxv32i16_nxv32i1
717 tracksRegLiveness: true
721 ; RV32-LABEL: name: anyext_nxv32i16_nxv32i1
724 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 32 x s1>) = COPY $v0
725 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
726 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 32 x s16>) = G_SPLAT_VECTOR [[C]](s32)
727 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
728 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 32 x s16>) = G_SPLAT_VECTOR [[C1]](s32)
729 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 32 x s16>) = G_SELECT [[COPY]](<vscale x 32 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
730 ; RV32-NEXT: $v8m8 = COPY [[SELECT]](<vscale x 32 x s16>)
731 ; RV32-NEXT: PseudoRET implicit $v8m8
733 ; RV64-LABEL: name: anyext_nxv32i16_nxv32i1
736 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 32 x s1>) = COPY $v0
737 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
738 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 32 x s16>) = G_SPLAT_VECTOR [[C]](s64)
739 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
740 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 32 x s16>) = G_SPLAT_VECTOR [[C1]](s64)
741 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 32 x s16>) = G_SELECT [[COPY]](<vscale x 32 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
742 ; RV64-NEXT: $v8m8 = COPY [[SELECT]](<vscale x 32 x s16>)
743 ; RV64-NEXT: PseudoRET implicit $v8m8
744 %1:_(<vscale x 32 x s1>) = COPY $v0
745 %0:_(<vscale x 32 x s16>) = G_ANYEXT %1(<vscale x 32 x s1>)
746 $v8m8 = COPY %0(<vscale x 32 x s16>)
747 PseudoRET implicit $v8m8
750 name: anyext_nxv64i8_nxv64i1
752 tracksRegLiveness: true
756 ; RV32-LABEL: name: anyext_nxv64i8_nxv64i1
759 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 64 x s1>) = COPY $v0
760 ; RV32-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
761 ; RV32-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 64 x s8>) = G_SPLAT_VECTOR [[C]](s32)
762 ; RV32-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 1
763 ; RV32-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 64 x s8>) = G_SPLAT_VECTOR [[C1]](s32)
764 ; RV32-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 64 x s8>) = G_SELECT [[COPY]](<vscale x 64 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
765 ; RV32-NEXT: $v8m8 = COPY [[SELECT]](<vscale x 64 x s8>)
766 ; RV32-NEXT: PseudoRET implicit $v8m8
768 ; RV64-LABEL: name: anyext_nxv64i8_nxv64i1
771 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 64 x s1>) = COPY $v0
772 ; RV64-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
773 ; RV64-NEXT: [[SPLAT_VECTOR:%[0-9]+]]:_(<vscale x 64 x s8>) = G_SPLAT_VECTOR [[C]](s64)
774 ; RV64-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
775 ; RV64-NEXT: [[SPLAT_VECTOR1:%[0-9]+]]:_(<vscale x 64 x s8>) = G_SPLAT_VECTOR [[C1]](s64)
776 ; RV64-NEXT: [[SELECT:%[0-9]+]]:_(<vscale x 64 x s8>) = G_SELECT [[COPY]](<vscale x 64 x s1>), [[SPLAT_VECTOR1]], [[SPLAT_VECTOR]]
777 ; RV64-NEXT: $v8m8 = COPY [[SELECT]](<vscale x 64 x s8>)
778 ; RV64-NEXT: PseudoRET implicit $v8m8
779 %1:_(<vscale x 64 x s1>) = COPY $v0
780 %0:_(<vscale x 64 x s8>) = G_ANYEXT %1(<vscale x 64 x s1>)
781 $v8m8 = COPY %0(<vscale x 64 x s8>)
782 PseudoRET implicit $v8m8
785 # Extend from s8 element vectors
787 name: anyext_nxv1i16_nxv1i8
789 tracksRegLiveness: true
793 ; RV32-LABEL: name: anyext_nxv1i16_nxv1i8
796 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s8>) = COPY $v8
797 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s16>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
798 ; RV32-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s16>)
799 ; RV32-NEXT: PseudoRET implicit $v8
801 ; RV64-LABEL: name: anyext_nxv1i16_nxv1i8
804 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s8>) = COPY $v8
805 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s16>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
806 ; RV64-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s16>)
807 ; RV64-NEXT: PseudoRET implicit $v8
808 %1:_(<vscale x 1 x s8>) = COPY $v8
809 %0:_(<vscale x 1 x s16>) = G_ANYEXT %1(<vscale x 1 x s8>)
810 $v8 = COPY %0(<vscale x 1 x s16>)
811 PseudoRET implicit $v8
814 name: anyext_nxv1i32_nxv1i8
816 tracksRegLiveness: true
820 ; RV32-LABEL: name: anyext_nxv1i32_nxv1i8
823 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s8>) = COPY $v8
824 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s32>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
825 ; RV32-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s32>)
826 ; RV32-NEXT: PseudoRET implicit $v8
828 ; RV64-LABEL: name: anyext_nxv1i32_nxv1i8
831 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s8>) = COPY $v8
832 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s32>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
833 ; RV64-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s32>)
834 ; RV64-NEXT: PseudoRET implicit $v8
835 %1:_(<vscale x 1 x s8>) = COPY $v8
836 %0:_(<vscale x 1 x s32>) = G_ANYEXT %1(<vscale x 1 x s8>)
837 $v8 = COPY %0(<vscale x 1 x s32>)
838 PseudoRET implicit $v8
841 name: anyext_nxv1i64_nxv1i8
843 tracksRegLiveness: true
847 ; RV32-LABEL: name: anyext_nxv1i64_nxv1i8
850 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s8>) = COPY $v8
851 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
852 ; RV32-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
853 ; RV32-NEXT: PseudoRET implicit $v8
855 ; RV64-LABEL: name: anyext_nxv1i64_nxv1i8
858 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s8>) = COPY $v8
859 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s8>)
860 ; RV64-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
861 ; RV64-NEXT: PseudoRET implicit $v8
862 %1:_(<vscale x 1 x s8>) = COPY $v8
863 %0:_(<vscale x 1 x s64>) = G_ANYEXT %1(<vscale x 1 x s8>)
864 $v8 = COPY %0(<vscale x 1 x s64>)
865 PseudoRET implicit $v8
868 name: anyext_nxv2i16_nxv2i8
870 tracksRegLiveness: true
874 ; RV32-LABEL: name: anyext_nxv2i16_nxv2i8
877 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s8>) = COPY $v8
878 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s16>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
879 ; RV32-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s16>)
880 ; RV32-NEXT: PseudoRET implicit $v8
882 ; RV64-LABEL: name: anyext_nxv2i16_nxv2i8
885 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s8>) = COPY $v8
886 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s16>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
887 ; RV64-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s16>)
888 ; RV64-NEXT: PseudoRET implicit $v8
889 %1:_(<vscale x 2 x s8>) = COPY $v8
890 %0:_(<vscale x 2 x s16>) = G_ANYEXT %1(<vscale x 2 x s8>)
891 $v8 = COPY %0(<vscale x 2 x s16>)
892 PseudoRET implicit $v8
895 name: anyext_nxv2i32_nxv2i8
897 tracksRegLiveness: true
901 ; RV32-LABEL: name: anyext_nxv2i32_nxv2i8
904 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s8>) = COPY $v8
905 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s32>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
906 ; RV32-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s32>)
907 ; RV32-NEXT: PseudoRET implicit $v8
909 ; RV64-LABEL: name: anyext_nxv2i32_nxv2i8
912 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s8>) = COPY $v8
913 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s32>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
914 ; RV64-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s32>)
915 ; RV64-NEXT: PseudoRET implicit $v8
916 %1:_(<vscale x 2 x s8>) = COPY $v8
917 %0:_(<vscale x 2 x s32>) = G_ANYEXT %1(<vscale x 2 x s8>)
918 $v8 = COPY %0(<vscale x 2 x s32>)
919 PseudoRET implicit $v8
922 name: anyext_nxv2i64_nxv2i8
924 tracksRegLiveness: true
928 ; RV32-LABEL: name: anyext_nxv2i64_nxv2i8
931 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s8>) = COPY $v8
932 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
933 ; RV32-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
934 ; RV32-NEXT: PseudoRET implicit $v8m2
936 ; RV64-LABEL: name: anyext_nxv2i64_nxv2i8
939 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s8>) = COPY $v8
940 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s8>)
941 ; RV64-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
942 ; RV64-NEXT: PseudoRET implicit $v8m2
943 %1:_(<vscale x 2 x s8>) = COPY $v8
944 %0:_(<vscale x 2 x s64>) = G_ANYEXT %1(<vscale x 2 x s8>)
945 $v8m2 = COPY %0(<vscale x 2 x s64>)
946 PseudoRET implicit $v8m2
949 name: anyext_nxv4i16_nxv4i8
951 tracksRegLiveness: true
955 ; RV32-LABEL: name: anyext_nxv4i16_nxv4i8
958 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s8>) = COPY $v8
959 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s16>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
960 ; RV32-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 4 x s16>)
961 ; RV32-NEXT: PseudoRET implicit $v8
963 ; RV64-LABEL: name: anyext_nxv4i16_nxv4i8
966 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s8>) = COPY $v8
967 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s16>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
968 ; RV64-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 4 x s16>)
969 ; RV64-NEXT: PseudoRET implicit $v8
970 %1:_(<vscale x 4 x s8>) = COPY $v8
971 %0:_(<vscale x 4 x s16>) = G_ANYEXT %1(<vscale x 4 x s8>)
972 $v8 = COPY %0(<vscale x 4 x s16>)
973 PseudoRET implicit $v8
976 name: anyext_nxv4i32_nxv4i8
978 tracksRegLiveness: true
982 ; RV32-LABEL: name: anyext_nxv4i32_nxv4i8
985 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s8>) = COPY $v8
986 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s32>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
987 ; RV32-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 4 x s32>)
988 ; RV32-NEXT: PseudoRET implicit $v8m2
990 ; RV64-LABEL: name: anyext_nxv4i32_nxv4i8
993 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s8>) = COPY $v8
994 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s32>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
995 ; RV64-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 4 x s32>)
996 ; RV64-NEXT: PseudoRET implicit $v8m2
997 %1:_(<vscale x 4 x s8>) = COPY $v8
998 %0:_(<vscale x 4 x s32>) = G_ANYEXT %1(<vscale x 4 x s8>)
999 $v8m2 = COPY %0(<vscale x 4 x s32>)
1000 PseudoRET implicit $v8m2
1003 name: anyext_nxv4i64_nxv4i8
1005 tracksRegLiveness: true
1009 ; RV32-LABEL: name: anyext_nxv4i64_nxv4i8
1010 ; RV32: liveins: $v8
1012 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s8>) = COPY $v8
1013 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
1014 ; RV32-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
1015 ; RV32-NEXT: PseudoRET implicit $v8m4
1017 ; RV64-LABEL: name: anyext_nxv4i64_nxv4i8
1018 ; RV64: liveins: $v8
1020 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s8>) = COPY $v8
1021 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s8>)
1022 ; RV64-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
1023 ; RV64-NEXT: PseudoRET implicit $v8m4
1024 %1:_(<vscale x 4 x s8>) = COPY $v8
1025 %0:_(<vscale x 4 x s64>) = G_ANYEXT %1(<vscale x 4 x s8>)
1026 $v8m4 = COPY %0(<vscale x 4 x s64>)
1027 PseudoRET implicit $v8m4
1030 name: anyext_nxv8i16_nxv8i8
1032 tracksRegLiveness: true
1036 ; RV32-LABEL: name: anyext_nxv8i16_nxv8i8
1037 ; RV32: liveins: $v8
1039 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s8>) = COPY $v8
1040 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s16>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
1041 ; RV32-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 8 x s16>)
1042 ; RV32-NEXT: PseudoRET implicit $v8m2
1044 ; RV64-LABEL: name: anyext_nxv8i16_nxv8i8
1045 ; RV64: liveins: $v8
1047 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s8>) = COPY $v8
1048 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s16>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
1049 ; RV64-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 8 x s16>)
1050 ; RV64-NEXT: PseudoRET implicit $v8m2
1051 %1:_(<vscale x 8 x s8>) = COPY $v8
1052 %0:_(<vscale x 8 x s16>) = G_ANYEXT %1(<vscale x 8 x s8>)
1053 $v8m2 = COPY %0(<vscale x 8 x s16>)
1054 PseudoRET implicit $v8m2
1057 name: anyext_nxv8i32_nxv8i8
1059 tracksRegLiveness: true
1063 ; RV32-LABEL: name: anyext_nxv8i32_nxv8i8
1064 ; RV32: liveins: $v8
1066 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s8>) = COPY $v8
1067 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s32>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
1068 ; RV32-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 8 x s32>)
1069 ; RV32-NEXT: PseudoRET implicit $v8m4
1071 ; RV64-LABEL: name: anyext_nxv8i32_nxv8i8
1072 ; RV64: liveins: $v8
1074 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s8>) = COPY $v8
1075 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s32>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
1076 ; RV64-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 8 x s32>)
1077 ; RV64-NEXT: PseudoRET implicit $v8m4
1078 %1:_(<vscale x 8 x s8>) = COPY $v8
1079 %0:_(<vscale x 8 x s32>) = G_ANYEXT %1(<vscale x 8 x s8>)
1080 $v8m4 = COPY %0(<vscale x 8 x s32>)
1081 PseudoRET implicit $v8m4
1084 name: anyext_nxv8i64_nxv8i8
1086 tracksRegLiveness: true
1090 ; RV32-LABEL: name: anyext_nxv8i64_nxv8i8
1091 ; RV32: liveins: $v8
1093 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s8>) = COPY $v8
1094 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
1095 ; RV32-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
1096 ; RV32-NEXT: PseudoRET implicit $v8m8
1098 ; RV64-LABEL: name: anyext_nxv8i64_nxv8i8
1099 ; RV64: liveins: $v8
1101 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s8>) = COPY $v8
1102 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s8>)
1103 ; RV64-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
1104 ; RV64-NEXT: PseudoRET implicit $v8m8
1105 %1:_(<vscale x 8 x s8>) = COPY $v8
1106 %0:_(<vscale x 8 x s64>) = G_ANYEXT %1(<vscale x 8 x s8>)
1107 $v8m8 = COPY %0(<vscale x 8 x s64>)
1108 PseudoRET implicit $v8m8
1111 name: anyext_nxv16i16_nxv16i8
1113 tracksRegLiveness: true
1117 ; RV32-LABEL: name: anyext_nxv16i16_nxv16i8
1118 ; RV32: liveins: $v8
1120 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s8>) = COPY $v8m2
1121 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 16 x s16>) = G_ANYEXT [[COPY]](<vscale x 16 x s8>)
1122 ; RV32-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 16 x s16>)
1123 ; RV32-NEXT: PseudoRET implicit $v8m4
1125 ; RV64-LABEL: name: anyext_nxv16i16_nxv16i8
1126 ; RV64: liveins: $v8
1128 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s8>) = COPY $v8m2
1129 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 16 x s16>) = G_ANYEXT [[COPY]](<vscale x 16 x s8>)
1130 ; RV64-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 16 x s16>)
1131 ; RV64-NEXT: PseudoRET implicit $v8m4
1132 %1:_(<vscale x 16 x s8>) = COPY $v8m2
1133 %0:_(<vscale x 16 x s16>) = G_ANYEXT %1(<vscale x 16 x s8>)
1134 $v8m4 = COPY %0(<vscale x 16 x s16>)
1135 PseudoRET implicit $v8m4
1138 name: anyext_nxv16i32_nxv16i8
1140 tracksRegLiveness: true
1144 ; RV32-LABEL: name: anyext_nxv16i32_nxv16i8
1145 ; RV32: liveins: $v8
1147 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s8>) = COPY $v8m4
1148 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 16 x s32>) = G_ANYEXT [[COPY]](<vscale x 16 x s8>)
1149 ; RV32-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 16 x s32>)
1150 ; RV32-NEXT: PseudoRET implicit $v8m8
1152 ; RV64-LABEL: name: anyext_nxv16i32_nxv16i8
1153 ; RV64: liveins: $v8
1155 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s8>) = COPY $v8m4
1156 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 16 x s32>) = G_ANYEXT [[COPY]](<vscale x 16 x s8>)
1157 ; RV64-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 16 x s32>)
1158 ; RV64-NEXT: PseudoRET implicit $v8m8
1159 %1:_(<vscale x 16 x s8>) = COPY $v8m4
1160 %0:_(<vscale x 16 x s32>) = G_ANYEXT %1(<vscale x 16 x s8>)
1161 $v8m8 = COPY %0(<vscale x 16 x s32>)
1162 PseudoRET implicit $v8m8
1165 name: anyext_nxv32i16_nxv32i8
1167 tracksRegLiveness: true
1171 ; RV32-LABEL: name: anyext_nxv32i16_nxv32i8
1172 ; RV32: liveins: $v8
1174 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 32 x s8>) = COPY $v8m4
1175 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 32 x s16>) = G_ANYEXT [[COPY]](<vscale x 32 x s8>)
1176 ; RV32-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 32 x s16>)
1177 ; RV32-NEXT: PseudoRET implicit $v8m8
1179 ; RV64-LABEL: name: anyext_nxv32i16_nxv32i8
1180 ; RV64: liveins: $v8
1182 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 32 x s8>) = COPY $v8m4
1183 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 32 x s16>) = G_ANYEXT [[COPY]](<vscale x 32 x s8>)
1184 ; RV64-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 32 x s16>)
1185 ; RV64-NEXT: PseudoRET implicit $v8m8
1186 %1:_(<vscale x 32 x s8>) = COPY $v8m4
1187 %0:_(<vscale x 32 x s16>) = G_ANYEXT %1(<vscale x 32 x s8>)
1188 $v8m8 = COPY %0(<vscale x 32 x s16>)
1189 PseudoRET implicit $v8m8
1192 # Extend from s16 element vectors
1194 name: anyext_nxv1i32_nxv1i16
1196 tracksRegLiveness: true
1200 ; RV32-LABEL: name: anyext_nxv1i32_nxv1i16
1201 ; RV32: liveins: $v8
1203 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s16>) = COPY $v8
1204 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s32>) = G_ANYEXT [[COPY]](<vscale x 1 x s16>)
1205 ; RV32-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s32>)
1206 ; RV32-NEXT: PseudoRET implicit $v8
1208 ; RV64-LABEL: name: anyext_nxv1i32_nxv1i16
1209 ; RV64: liveins: $v8
1211 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s16>) = COPY $v8
1212 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s32>) = G_ANYEXT [[COPY]](<vscale x 1 x s16>)
1213 ; RV64-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s32>)
1214 ; RV64-NEXT: PseudoRET implicit $v8
1215 %1:_(<vscale x 1 x s16>) = COPY $v8
1216 %0:_(<vscale x 1 x s32>) = G_ANYEXT %1(<vscale x 1 x s16>)
1217 $v8 = COPY %0(<vscale x 1 x s32>)
1218 PseudoRET implicit $v8
1221 name: anyext_nxv1i64_nxv1i16
1223 tracksRegLiveness: true
1227 ; RV32-LABEL: name: anyext_nxv1i64_nxv1i16
1228 ; RV32: liveins: $v8
1230 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s16>) = COPY $v8
1231 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s16>)
1232 ; RV32-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
1233 ; RV32-NEXT: PseudoRET implicit $v8
1235 ; RV64-LABEL: name: anyext_nxv1i64_nxv1i16
1236 ; RV64: liveins: $v8
1238 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s16>) = COPY $v8
1239 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s16>)
1240 ; RV64-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
1241 ; RV64-NEXT: PseudoRET implicit $v8
1242 %1:_(<vscale x 1 x s16>) = COPY $v8
1243 %0:_(<vscale x 1 x s64>) = G_ANYEXT %1(<vscale x 1 x s16>)
1244 $v8 = COPY %0(<vscale x 1 x s64>)
1245 PseudoRET implicit $v8
1248 name: anyext_nxv2i32_nxv2i16
1250 tracksRegLiveness: true
1254 ; RV32-LABEL: name: anyext_nxv2i32_nxv2i16
1255 ; RV32: liveins: $v8
1257 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s16>) = COPY $v8
1258 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s32>) = G_ANYEXT [[COPY]](<vscale x 2 x s16>)
1259 ; RV32-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s32>)
1260 ; RV32-NEXT: PseudoRET implicit $v8
1262 ; RV64-LABEL: name: anyext_nxv2i32_nxv2i16
1263 ; RV64: liveins: $v8
1265 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s16>) = COPY $v8
1266 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s32>) = G_ANYEXT [[COPY]](<vscale x 2 x s16>)
1267 ; RV64-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 2 x s32>)
1268 ; RV64-NEXT: PseudoRET implicit $v8
1269 %1:_(<vscale x 2 x s16>) = COPY $v8
1270 %0:_(<vscale x 2 x s32>) = G_ANYEXT %1(<vscale x 2 x s16>)
1271 $v8 = COPY %0(<vscale x 2 x s32>)
1272 PseudoRET implicit $v8
1275 name: anyext_nxv2i64_nxv2i16
1277 tracksRegLiveness: true
1281 ; RV32-LABEL: name: anyext_nxv2i64_nxv2i16
1282 ; RV32: liveins: $v8
1284 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s16>) = COPY $v8
1285 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s16>)
1286 ; RV32-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
1287 ; RV32-NEXT: PseudoRET implicit $v8m2
1289 ; RV64-LABEL: name: anyext_nxv2i64_nxv2i16
1290 ; RV64: liveins: $v8
1292 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s16>) = COPY $v8
1293 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s16>)
1294 ; RV64-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
1295 ; RV64-NEXT: PseudoRET implicit $v8m2
1296 %1:_(<vscale x 2 x s16>) = COPY $v8
1297 %0:_(<vscale x 2 x s64>) = G_ANYEXT %1(<vscale x 2 x s16>)
1298 $v8m2 = COPY %0(<vscale x 2 x s64>)
1299 PseudoRET implicit $v8m2
1302 name: anyext_nxv4i32_nxv4i16
1304 tracksRegLiveness: true
1308 ; RV32-LABEL: name: anyext_nxv4i32_nxv4i16
1309 ; RV32: liveins: $v8
1311 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s16>) = COPY $v8
1312 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s32>) = G_ANYEXT [[COPY]](<vscale x 4 x s16>)
1313 ; RV32-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 4 x s32>)
1314 ; RV32-NEXT: PseudoRET implicit $v8m2
1316 ; RV64-LABEL: name: anyext_nxv4i32_nxv4i16
1317 ; RV64: liveins: $v8
1319 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s16>) = COPY $v8
1320 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s32>) = G_ANYEXT [[COPY]](<vscale x 4 x s16>)
1321 ; RV64-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 4 x s32>)
1322 ; RV64-NEXT: PseudoRET implicit $v8m2
1323 %1:_(<vscale x 4 x s16>) = COPY $v8
1324 %0:_(<vscale x 4 x s32>) = G_ANYEXT %1(<vscale x 4 x s16>)
1325 $v8m2 = COPY %0(<vscale x 4 x s32>)
1326 PseudoRET implicit $v8m2
1329 name: anyext_nxv4i64_nxv4i16
1331 tracksRegLiveness: true
1335 ; RV32-LABEL: name: anyext_nxv4i64_nxv4i16
1336 ; RV32: liveins: $v8
1338 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s16>) = COPY $v8
1339 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s16>)
1340 ; RV32-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
1341 ; RV32-NEXT: PseudoRET implicit $v8m4
1343 ; RV64-LABEL: name: anyext_nxv4i64_nxv4i16
1344 ; RV64: liveins: $v8
1346 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s16>) = COPY $v8
1347 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s16>)
1348 ; RV64-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
1349 ; RV64-NEXT: PseudoRET implicit $v8m4
1350 %1:_(<vscale x 4 x s16>) = COPY $v8
1351 %0:_(<vscale x 4 x s64>) = G_ANYEXT %1(<vscale x 4 x s16>)
1352 $v8m4 = COPY %0(<vscale x 4 x s64>)
1353 PseudoRET implicit $v8m4
1356 name: anyext_nxv8i32_nxv8i16
1358 tracksRegLiveness: true
1362 ; RV32-LABEL: name: anyext_nxv8i32_nxv8i16
1363 ; RV32: liveins: $v8
1365 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s16>) = COPY $v8m2
1366 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s32>) = G_ANYEXT [[COPY]](<vscale x 8 x s16>)
1367 ; RV32-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 8 x s32>)
1368 ; RV32-NEXT: PseudoRET implicit $v8m4
1370 ; RV64-LABEL: name: anyext_nxv8i32_nxv8i16
1371 ; RV64: liveins: $v8
1373 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s16>) = COPY $v8m2
1374 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s32>) = G_ANYEXT [[COPY]](<vscale x 8 x s16>)
1375 ; RV64-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 8 x s32>)
1376 ; RV64-NEXT: PseudoRET implicit $v8m4
1377 %1:_(<vscale x 8 x s16>) = COPY $v8m2
1378 %0:_(<vscale x 8 x s32>) = G_ANYEXT %1(<vscale x 8 x s16>)
1379 $v8m4 = COPY %0(<vscale x 8 x s32>)
1380 PseudoRET implicit $v8m4
1383 name: anyext_nxv8i64_nxv8i16
1385 tracksRegLiveness: true
1389 ; RV32-LABEL: name: anyext_nxv8i64_nxv8i16
1390 ; RV32: liveins: $v8
1392 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s16>) = COPY $v8m2
1393 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s16>)
1394 ; RV32-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
1395 ; RV32-NEXT: PseudoRET implicit $v8m8
1397 ; RV64-LABEL: name: anyext_nxv8i64_nxv8i16
1398 ; RV64: liveins: $v8
1400 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s16>) = COPY $v8m2
1401 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s16>)
1402 ; RV64-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
1403 ; RV64-NEXT: PseudoRET implicit $v8m8
1404 %1:_(<vscale x 8 x s16>) = COPY $v8m2
1405 %0:_(<vscale x 8 x s64>) = G_ANYEXT %1(<vscale x 8 x s16>)
1406 $v8m8 = COPY %0(<vscale x 8 x s64>)
1407 PseudoRET implicit $v8m8
1410 name: anyext_nxv16i32_nxv16i16
1412 tracksRegLiveness: true
1416 ; RV32-LABEL: name: anyext_nxv16i32_nxv16i16
1417 ; RV32: liveins: $v8
1419 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s16>) = COPY $v8m4
1420 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 16 x s32>) = G_ANYEXT [[COPY]](<vscale x 16 x s16>)
1421 ; RV32-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 16 x s32>)
1422 ; RV32-NEXT: PseudoRET implicit $v8m8
1424 ; RV64-LABEL: name: anyext_nxv16i32_nxv16i16
1425 ; RV64: liveins: $v8
1427 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 16 x s16>) = COPY $v8m4
1428 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 16 x s32>) = G_ANYEXT [[COPY]](<vscale x 16 x s16>)
1429 ; RV64-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 16 x s32>)
1430 ; RV64-NEXT: PseudoRET implicit $v8m8
1431 %1:_(<vscale x 16 x s16>) = COPY $v8m4
1432 %0:_(<vscale x 16 x s32>) = G_ANYEXT %1(<vscale x 16 x s16>)
1433 $v8m8 = COPY %0(<vscale x 16 x s32>)
1434 PseudoRET implicit $v8m8
1437 # Extend from s32 element vectors
1439 name: anyext_nxv1i64_nxv1i32
1441 tracksRegLiveness: true
1445 ; RV32-LABEL: name: anyext_nxv1i64_nxv1i32
1446 ; RV32: liveins: $v8
1448 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s32>) = COPY $v8
1449 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s32>)
1450 ; RV32-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
1451 ; RV32-NEXT: PseudoRET implicit $v8
1453 ; RV64-LABEL: name: anyext_nxv1i64_nxv1i32
1454 ; RV64: liveins: $v8
1456 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 1 x s32>) = COPY $v8
1457 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 1 x s64>) = G_ANYEXT [[COPY]](<vscale x 1 x s32>)
1458 ; RV64-NEXT: $v8 = COPY [[ANYEXT]](<vscale x 1 x s64>)
1459 ; RV64-NEXT: PseudoRET implicit $v8
1460 %1:_(<vscale x 1 x s32>) = COPY $v8
1461 %0:_(<vscale x 1 x s64>) = G_ANYEXT %1(<vscale x 1 x s32>)
1462 $v8 = COPY %0(<vscale x 1 x s64>)
1463 PseudoRET implicit $v8
1466 name: anyext_nxv2i64_nxv2i32
1468 tracksRegLiveness: true
1472 ; RV32-LABEL: name: anyext_nxv2i64_nxv2i32
1473 ; RV32: liveins: $v8
1475 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s32>) = COPY $v8
1476 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s32>)
1477 ; RV32-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
1478 ; RV32-NEXT: PseudoRET implicit $v8m2
1480 ; RV64-LABEL: name: anyext_nxv2i64_nxv2i32
1481 ; RV64: liveins: $v8
1483 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 2 x s32>) = COPY $v8
1484 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 2 x s64>) = G_ANYEXT [[COPY]](<vscale x 2 x s32>)
1485 ; RV64-NEXT: $v8m2 = COPY [[ANYEXT]](<vscale x 2 x s64>)
1486 ; RV64-NEXT: PseudoRET implicit $v8m2
1487 %1:_(<vscale x 2 x s32>) = COPY $v8
1488 %0:_(<vscale x 2 x s64>) = G_ANYEXT %1(<vscale x 2 x s32>)
1489 $v8m2 = COPY %0(<vscale x 2 x s64>)
1490 PseudoRET implicit $v8m2
1493 name: anyext_nxv4i64_nxv4i32
1495 tracksRegLiveness: true
1499 ; RV32-LABEL: name: anyext_nxv4i64_nxv4i32
1500 ; RV32: liveins: $v8
1502 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s32>) = COPY $v8m2
1503 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s32>)
1504 ; RV32-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
1505 ; RV32-NEXT: PseudoRET implicit $v8m4
1507 ; RV64-LABEL: name: anyext_nxv4i64_nxv4i32
1508 ; RV64: liveins: $v8
1510 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 4 x s32>) = COPY $v8m2
1511 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 4 x s64>) = G_ANYEXT [[COPY]](<vscale x 4 x s32>)
1512 ; RV64-NEXT: $v8m4 = COPY [[ANYEXT]](<vscale x 4 x s64>)
1513 ; RV64-NEXT: PseudoRET implicit $v8m4
1514 %1:_(<vscale x 4 x s32>) = COPY $v8m2
1515 %0:_(<vscale x 4 x s64>) = G_ANYEXT %1(<vscale x 4 x s32>)
1516 $v8m4 = COPY %0(<vscale x 4 x s64>)
1517 PseudoRET implicit $v8m4
1520 name: anyext_nxv8i64_nxv8i32
1522 tracksRegLiveness: true
1526 ; RV32-LABEL: name: anyext_nxv8i64_nxv8i32
1527 ; RV32: liveins: $v8
1529 ; RV32-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s32>) = COPY $v8m4
1530 ; RV32-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s32>)
1531 ; RV32-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
1532 ; RV32-NEXT: PseudoRET implicit $v8m8
1534 ; RV64-LABEL: name: anyext_nxv8i64_nxv8i32
1535 ; RV64: liveins: $v8
1537 ; RV64-NEXT: [[COPY:%[0-9]+]]:_(<vscale x 8 x s32>) = COPY $v8m4
1538 ; RV64-NEXT: [[ANYEXT:%[0-9]+]]:_(<vscale x 8 x s64>) = G_ANYEXT [[COPY]](<vscale x 8 x s32>)
1539 ; RV64-NEXT: $v8m8 = COPY [[ANYEXT]](<vscale x 8 x s64>)
1540 ; RV64-NEXT: PseudoRET implicit $v8m8
1541 %1:_(<vscale x 8 x s32>) = COPY $v8m4
1542 %0:_(<vscale x 8 x s64>) = G_ANYEXT %1(<vscale x 8 x s32>)
1543 $v8m8 = COPY %0(<vscale x 8 x s64>)
1544 PseudoRET implicit $v8m8