1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2 ; RUN: opt < %s -passes=instcombine -S | FileCheck %s
4 declare i32 @llvm.riscv.vsetvli.i32(i32, i32, i32)
5 declare i64 @llvm.riscv.vsetvli.i64(i64, i64, i64)
7 define i32 @vsetvli_i32() nounwind #0 {
8 ; CHECK-LABEL: @vsetvli_i32(
10 ; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvli.i32(i32 1, i32 1, i32 1)
11 ; CHECK-NEXT: ret i32 [[TMP0]]
14 %0 = call i32 @llvm.riscv.vsetvli.i32(i32 1, i32 1, i32 1)
15 %1 = and i32 %0, 2147483647
19 define i64 @vsetvli_sext_i64() nounwind #0 {
20 ; CHECK-LABEL: @vsetvli_sext_i64(
22 ; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
23 ; CHECK-NEXT: ret i64 [[TMP0]]
26 %0 = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
27 %1 = trunc i64 %0 to i32
28 %2 = sext i32 %1 to i64
32 define i64 @vsetvli_zext_i64() nounwind #0 {
33 ; CHECK-LABEL: @vsetvli_zext_i64(
35 ; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
36 ; CHECK-NEXT: ret i64 [[TMP0]]
39 %0 = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
40 %1 = trunc i64 %0 to i32
41 %2 = zext i32 %1 to i64
45 define signext i32 @vsetvl_sext() nounwind #0 {
46 ; CHECK-LABEL: @vsetvl_sext(
47 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
48 ; CHECK-NEXT: [[B:%.*]] = trunc nuw nsw i64 [[A]] to i32
49 ; CHECK-NEXT: ret i32 [[B]]
51 %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 1, i64 1)
52 %b = trunc i64 %a to i32
56 define zeroext i32 @vsetvl_zext() nounwind #0 {
57 ; CHECK-LABEL: @vsetvl_zext(
58 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
59 ; CHECK-NEXT: [[B:%.*]] = trunc nuw nsw i64 [[A]] to i32
60 ; CHECK-NEXT: ret i32 [[B]]
62 %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 1, i64 1)
63 %b = trunc i64 %a to i32
67 define i32 @vsetvli_and17_i32() nounwind #0 {
68 ; CHECK-LABEL: @vsetvli_and17_i32(
70 ; CHECK-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvli.i32(i32 1, i32 1, i32 1)
71 ; CHECK-NEXT: ret i32 [[TMP0]]
74 %0 = call i32 @llvm.riscv.vsetvli.i32(i32 1, i32 1, i32 1)
75 %1 = and i32 %0, 131071
79 define i64 @vsetvli_and17_i64() nounwind #0 {
80 ; CHECK-LABEL: @vsetvli_and17_i64(
82 ; CHECK-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
83 ; CHECK-NEXT: ret i64 [[TMP0]]
86 %0 = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
87 %1 = and i64 %0, 131071
91 define i64 @vsetvl_e8m1_and14bits(i64 %avl) nounwind #0 {
92 ; CHECK-LABEL: @vsetvl_e8m1_and14bits(
93 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
94 ; CHECK-NEXT: ret i64 [[A]]
96 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
97 %b = and i64 %a, 16383
101 define i64 @vsetvl_e8m1_and13bits(i64 %avl) nounwind #0 {
102 ; CHECK-LABEL: @vsetvl_e8m1_and13bits(
103 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
104 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
105 ; CHECK-NEXT: ret i64 [[B]]
107 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
108 %b = and i64 %a, 8191
112 define i64 @vsetvl_e8m1_constant_avl() nounwind #0 {
113 ; CHECK-LABEL: @vsetvl_e8m1_constant_avl(
114 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 0, i64 0)
115 ; CHECK-NEXT: ret i64 [[A]]
117 %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 0, i64 0)
122 define i64 @vsetvl_e8m2_and15bits(i64 %avl) nounwind #0 {
123 ; CHECK-LABEL: @vsetvl_e8m2_and15bits(
124 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
125 ; CHECK-NEXT: ret i64 [[A]]
127 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
128 %b = and i64 %a, 32767
132 define i64 @vsetvl_e8m2_and14bits(i64 %avl) nounwind #0 {
133 ; CHECK-LABEL: @vsetvl_e8m2_and14bits(
134 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
135 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
136 ; CHECK-NEXT: ret i64 [[B]]
138 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
139 %b = and i64 %a, 16383
143 define i64 @vsetvl_e8m4_and16bits(i64 %avl) nounwind #0 {
144 ; CHECK-LABEL: @vsetvl_e8m4_and16bits(
145 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
146 ; CHECK-NEXT: ret i64 [[A]]
148 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
149 %b = and i64 %a, 65535
153 define i64 @vsetvl_e8m4_and15bits(i64 %avl) nounwind #0 {
154 ; CHECK-LABEL: @vsetvl_e8m4_and15bits(
155 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
156 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
157 ; CHECK-NEXT: ret i64 [[B]]
159 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
160 %b = and i64 %a, 32767
164 define i64 @vsetvl_e8m8_and17bits(i64 %avl) nounwind #0 {
165 ; CHECK-LABEL: @vsetvl_e8m8_and17bits(
166 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
167 ; CHECK-NEXT: ret i64 [[A]]
169 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
170 %b = and i64 %a, 131071
174 define i64 @vsetvl_e8m8_and16bits(i64 %avl) nounwind #0 {
175 ; CHECK-LABEL: @vsetvl_e8m8_and16bits(
176 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
177 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 65535
178 ; CHECK-NEXT: ret i64 [[B]]
180 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
181 %b = and i64 %a, 65535
185 define i64 @vsetvl_e8mf2_and11bits(i64 %avl) nounwind #0 {
186 ; CHECK-LABEL: @vsetvl_e8mf2_and11bits(
187 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
188 ; CHECK-NEXT: ret i64 [[A]]
190 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
191 %b = and i64 %a, 2047
195 define i64 @vsetvl_e8mf2_and10bits(i64 %avl) nounwind #0 {
196 ; CHECK-LABEL: @vsetvl_e8mf2_and10bits(
197 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
198 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
199 ; CHECK-NEXT: ret i64 [[B]]
201 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
202 %b = and i64 %a, 1023
206 define i64 @vsetvl_e8mf4_and12bits(i64 %avl) nounwind #0 {
207 ; CHECK-LABEL: @vsetvl_e8mf4_and12bits(
208 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
209 ; CHECK-NEXT: ret i64 [[A]]
211 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
212 %b = and i64 %a, 4095
216 define i64 @vsetvl_e8mf4_and11bits(i64 %avl) nounwind #0 {
217 ; CHECK-LABEL: @vsetvl_e8mf4_and11bits(
218 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
219 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
220 ; CHECK-NEXT: ret i64 [[B]]
222 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
223 %b = and i64 %a, 2047
227 define i64 @vsetvl_e8mf8_and13bits(i64 %avl) nounwind #0 {
228 ; CHECK-LABEL: @vsetvl_e8mf8_and13bits(
229 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
230 ; CHECK-NEXT: ret i64 [[A]]
232 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
233 %b = and i64 %a, 8191
237 define i64 @vsetvl_e8mf8_and12bits(i64 %avl) nounwind #0 {
238 ; CHECK-LABEL: @vsetvl_e8mf8_and12bits(
239 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
240 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
241 ; CHECK-NEXT: ret i64 [[B]]
243 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
244 %b = and i64 %a, 4095
248 define i64 @vsetvl_e16m1_and13bits(i64 %avl) nounwind #0 {
249 ; CHECK-LABEL: @vsetvl_e16m1_and13bits(
250 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
251 ; CHECK-NEXT: ret i64 [[A]]
253 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
254 %b = and i64 %a, 8191
258 define i64 @vsetvl_e16m1_and12bits(i64 %avl) nounwind #0 {
259 ; CHECK-LABEL: @vsetvl_e16m1_and12bits(
260 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
261 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
262 ; CHECK-NEXT: ret i64 [[B]]
264 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
265 %b = and i64 %a, 4095
269 define i64 @vsetvl_e16m2_and14bits(i64 %avl) nounwind #0 {
270 ; CHECK-LABEL: @vsetvl_e16m2_and14bits(
271 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
272 ; CHECK-NEXT: ret i64 [[A]]
274 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
275 %b = and i64 %a, 16383
279 define i64 @vsetvl_e16m2_and13bits(i64 %avl) nounwind #0 {
280 ; CHECK-LABEL: @vsetvl_e16m2_and13bits(
281 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
282 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
283 ; CHECK-NEXT: ret i64 [[B]]
285 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
286 %b = and i64 %a, 8191
290 define i64 @vsetvl_e16m4_and15bits(i64 %avl) nounwind #0 {
291 ; CHECK-LABEL: @vsetvl_e16m4_and15bits(
292 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
293 ; CHECK-NEXT: ret i64 [[A]]
295 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
296 %b = and i64 %a, 32767
300 define i64 @vsetvl_e16m4_and14bits(i64 %avl) nounwind #0 {
301 ; CHECK-LABEL: @vsetvl_e16m4_and14bits(
302 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
303 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
304 ; CHECK-NEXT: ret i64 [[B]]
306 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
307 %b = and i64 %a, 16383
311 define i64 @vsetvl_e16m8_and16bits(i64 %avl) nounwind #0 {
312 ; CHECK-LABEL: @vsetvl_e16m8_and16bits(
313 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
314 ; CHECK-NEXT: ret i64 [[A]]
316 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
317 %b = and i64 %a, 65535
321 define i64 @vsetvl_e16m8_and15bits(i64 %avl) nounwind #0 {
322 ; CHECK-LABEL: @vsetvl_e16m8_and15bits(
323 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
324 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 32767
325 ; CHECK-NEXT: ret i64 [[B]]
327 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
328 %b = and i64 %a, 32767
332 define i64 @vsetvl_e16mf2_and10bits(i64 %avl) nounwind #0 {
333 ; CHECK-LABEL: @vsetvl_e16mf2_and10bits(
334 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
335 ; CHECK-NEXT: ret i64 [[A]]
337 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
338 %b = and i64 %a, 1023
342 define i64 @vsetvl_e16mf2_and9bits(i64 %avl) nounwind #0 {
343 ; CHECK-LABEL: @vsetvl_e16mf2_and9bits(
344 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
345 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
346 ; CHECK-NEXT: ret i64 [[B]]
348 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
353 define i64 @vsetvl_e16mf4_and11bits(i64 %avl) nounwind #0 {
354 ; CHECK-LABEL: @vsetvl_e16mf4_and11bits(
355 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
356 ; CHECK-NEXT: ret i64 [[A]]
358 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
359 %b = and i64 %a, 2047
363 define i64 @vsetvl_e16mf4_and10bits(i64 %avl) nounwind #0 {
364 ; CHECK-LABEL: @vsetvl_e16mf4_and10bits(
365 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
366 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
367 ; CHECK-NEXT: ret i64 [[B]]
369 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
370 %b = and i64 %a, 1023
374 define i64 @vsetvl_e16mf8_and12bits(i64 %avl) nounwind #0 {
375 ; CHECK-LABEL: @vsetvl_e16mf8_and12bits(
376 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
377 ; CHECK-NEXT: ret i64 [[A]]
379 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
380 %b = and i64 %a, 4095
384 define i64 @vsetvl_e16mf8_and11bits(i64 %avl) nounwind #0 {
385 ; CHECK-LABEL: @vsetvl_e16mf8_and11bits(
386 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
387 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
388 ; CHECK-NEXT: ret i64 [[B]]
390 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
391 %b = and i64 %a, 2047
395 define i64 @vsetvl_e32m1_and12bits(i64 %avl) nounwind #0 {
396 ; CHECK-LABEL: @vsetvl_e32m1_and12bits(
397 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
398 ; CHECK-NEXT: ret i64 [[A]]
400 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
401 %b = and i64 %a, 4095
405 define i64 @vsetvl_e32m1_and11bits(i64 %avl) nounwind #0 {
406 ; CHECK-LABEL: @vsetvl_e32m1_and11bits(
407 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
408 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
409 ; CHECK-NEXT: ret i64 [[B]]
411 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
412 %b = and i64 %a, 2047
416 define i64 @vsetvl_e32m2_and13bits(i64 %avl) nounwind #0 {
417 ; CHECK-LABEL: @vsetvl_e32m2_and13bits(
418 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
419 ; CHECK-NEXT: ret i64 [[A]]
421 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
422 %b = and i64 %a, 8191
426 define i64 @vsetvl_e32m2_and12bits(i64 %avl) nounwind #0 {
427 ; CHECK-LABEL: @vsetvl_e32m2_and12bits(
428 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
429 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
430 ; CHECK-NEXT: ret i64 [[B]]
432 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
433 %b = and i64 %a, 4095
437 define i64 @vsetvl_e32m4_and14bits(i64 %avl) nounwind #0 {
438 ; CHECK-LABEL: @vsetvl_e32m4_and14bits(
439 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
440 ; CHECK-NEXT: ret i64 [[A]]
442 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
443 %b = and i64 %a, 16383
447 define i64 @vsetvl_e32m4_and13bits(i64 %avl) nounwind #0 {
448 ; CHECK-LABEL: @vsetvl_e32m4_and13bits(
449 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
450 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
451 ; CHECK-NEXT: ret i64 [[B]]
453 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
454 %b = and i64 %a, 8191
458 define i64 @vsetvl_e32m8_and15bits(i64 %avl) nounwind #0 {
459 ; CHECK-LABEL: @vsetvl_e32m8_and15bits(
460 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
461 ; CHECK-NEXT: ret i64 [[A]]
463 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
464 %b = and i64 %a, 32767
468 define i64 @vsetvl_e32m8_and14bits(i64 %avl) nounwind #0 {
469 ; CHECK-LABEL: @vsetvl_e32m8_and14bits(
470 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
471 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 16383
472 ; CHECK-NEXT: ret i64 [[B]]
474 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
475 %b = and i64 %a, 16383
479 define i64 @vsetvl_e32mf2_and9bits(i64 %avl) nounwind #0 {
480 ; CHECK-LABEL: @vsetvl_e32mf2_and9bits(
481 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
482 ; CHECK-NEXT: ret i64 [[A]]
484 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
489 define i64 @vsetvl_e32mf2_and8bits(i64 %avl) nounwind #0 {
490 ; CHECK-LABEL: @vsetvl_e32mf2_and8bits(
491 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
492 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
493 ; CHECK-NEXT: ret i64 [[B]]
495 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
500 define i64 @vsetvl_e32mf4_and10bits(i64 %avl) nounwind #0 {
501 ; CHECK-LABEL: @vsetvl_e32mf4_and10bits(
502 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
503 ; CHECK-NEXT: ret i64 [[A]]
505 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
506 %b = and i64 %a, 1023
510 define i64 @vsetvl_e32mf4_and9bits(i64 %avl) nounwind #0 {
511 ; CHECK-LABEL: @vsetvl_e32mf4_and9bits(
512 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
513 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
514 ; CHECK-NEXT: ret i64 [[B]]
516 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
521 define i64 @vsetvl_e32mf8_and11bits(i64 %avl) nounwind #0 {
522 ; CHECK-LABEL: @vsetvl_e32mf8_and11bits(
523 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
524 ; CHECK-NEXT: ret i64 [[A]]
526 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
527 %b = and i64 %a, 2047
531 define i64 @vsetvl_e32mf8_and10bits(i64 %avl) nounwind #0 {
532 ; CHECK-LABEL: @vsetvl_e32mf8_and10bits(
533 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
534 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
535 ; CHECK-NEXT: ret i64 [[B]]
537 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
538 %b = and i64 %a, 1023
542 define i64 @vsetvl_e64m1_and11bits(i64 %avl) nounwind #0 {
543 ; CHECK-LABEL: @vsetvl_e64m1_and11bits(
544 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
545 ; CHECK-NEXT: ret i64 [[A]]
547 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
548 %b = and i64 %a, 2047
552 define i64 @vsetvl_e64m1_and10bits(i64 %avl) nounwind #0 {
553 ; CHECK-LABEL: @vsetvl_e64m1_and10bits(
554 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
555 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 1023
556 ; CHECK-NEXT: ret i64 [[B]]
558 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
559 %b = and i64 %a, 1023
563 define i64 @vsetvl_e64m2_and12bits(i64 %avl) nounwind #0 {
564 ; CHECK-LABEL: @vsetvl_e64m2_and12bits(
565 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
566 ; CHECK-NEXT: ret i64 [[A]]
568 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
569 %b = and i64 %a, 4095
573 define i64 @vsetvl_e64m2_and11bits(i64 %avl) nounwind #0 {
574 ; CHECK-LABEL: @vsetvl_e64m2_and11bits(
575 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
576 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 2047
577 ; CHECK-NEXT: ret i64 [[B]]
579 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
580 %b = and i64 %a, 2047
584 define i64 @vsetvl_e64m4_and13bits(i64 %avl) nounwind #0 {
585 ; CHECK-LABEL: @vsetvl_e64m4_and13bits(
586 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
587 ; CHECK-NEXT: ret i64 [[A]]
589 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
590 %b = and i64 %a, 8191
594 define i64 @vsetvl_e64m4_and12bits(i64 %avl) nounwind #0 {
595 ; CHECK-LABEL: @vsetvl_e64m4_and12bits(
596 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
597 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 4095
598 ; CHECK-NEXT: ret i64 [[B]]
600 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
601 %b = and i64 %a, 4095
605 define i64 @vsetvl_e64m8_and14bits(i64 %avl) nounwind #0 {
606 ; CHECK-LABEL: @vsetvl_e64m8_and14bits(
607 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
608 ; CHECK-NEXT: ret i64 [[A]]
610 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
611 %b = and i64 %a, 16383
615 define i64 @vsetvl_e64m8_and13bits(i64 %avl) nounwind #0 {
616 ; CHECK-LABEL: @vsetvl_e64m8_and13bits(
617 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
618 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 8191
619 ; CHECK-NEXT: ret i64 [[B]]
621 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
622 %b = and i64 %a, 8191
626 define i64 @vsetvl_e64mf2_and8bits(i64 %avl) nounwind #0 {
627 ; CHECK-LABEL: @vsetvl_e64mf2_and8bits(
628 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
629 ; CHECK-NEXT: ret i64 [[A]]
631 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
636 define i64 @vsetvl_e64mf2_and7bits(i64 %avl) nounwind #0 {
637 ; CHECK-LABEL: @vsetvl_e64mf2_and7bits(
638 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
639 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 127
640 ; CHECK-NEXT: ret i64 [[B]]
642 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
647 define i64 @vsetvl_e64mf4_and9bits(i64 %avl) nounwind #0 {
648 ; CHECK-LABEL: @vsetvl_e64mf4_and9bits(
649 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
650 ; CHECK-NEXT: ret i64 [[A]]
652 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
657 define i64 @vsetvl_e64mf4_and8bits(i64 %avl) nounwind #0 {
658 ; CHECK-LABEL: @vsetvl_e64mf4_and8bits(
659 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
660 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 255
661 ; CHECK-NEXT: ret i64 [[B]]
663 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
668 define i64 @vsetvl_e64mf8_and10bits(i64 %avl) nounwind #0 {
669 ; CHECK-LABEL: @vsetvl_e64mf8_and10bits(
670 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
671 ; CHECK-NEXT: ret i64 [[A]]
673 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
674 %b = and i64 %a, 1023
678 define i64 @vsetvl_e64mf8_and9bits(i64 %avl) nounwind #0 {
679 ; CHECK-LABEL: @vsetvl_e64mf8_and9bits(
680 ; CHECK-NEXT: [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
681 ; CHECK-NEXT: [[B:%.*]] = and i64 [[A]], 511
682 ; CHECK-NEXT: ret i64 [[B]]
684 %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
689 attributes #0 = { vscale_range(2,1024) }