1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv64 -mattr=+d,+zfh,+zvfh,+v -target-abi=lp64d \
3 ; RUN: -verify-machineinstrs < %s | FileCheck %s -check-prefixes=CHECK,RV64NOM
4 ; RUN: llc -mtriple=riscv64 -mattr=+d,+zfh,+zvfh,+v,+m -target-abi=lp64d \
5 ; RUN: -verify-machineinstrs < %s | FileCheck %s -check-prefixes=CHECK,RV64M
7 define signext i8 @extractelt_nxv1i8_0(<vscale x 1 x i8> %v) {
8 ; CHECK-LABEL: extractelt_nxv1i8_0:
10 ; CHECK-NEXT: vsetivli zero, 1, e8, mf8, ta, ma
11 ; CHECK-NEXT: vmv.x.s a0, v8
13 %r = extractelement <vscale x 1 x i8> %v, i32 0
17 define signext i8 @extractelt_nxv1i8_imm(<vscale x 1 x i8> %v) {
18 ; CHECK-LABEL: extractelt_nxv1i8_imm:
20 ; CHECK-NEXT: vsetivli zero, 1, e8, mf8, ta, ma
21 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
22 ; CHECK-NEXT: vmv.x.s a0, v8
24 %r = extractelement <vscale x 1 x i8> %v, i32 2
28 define signext i8 @extractelt_nxv1i8_idx(<vscale x 1 x i8> %v, i32 zeroext %idx) {
29 ; CHECK-LABEL: extractelt_nxv1i8_idx:
31 ; CHECK-NEXT: vsetivli zero, 1, e8, mf8, ta, ma
32 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
33 ; CHECK-NEXT: vmv.x.s a0, v8
35 %r = extractelement <vscale x 1 x i8> %v, i32 %idx
39 define signext i8 @extractelt_nxv2i8_0(<vscale x 2 x i8> %v) {
40 ; CHECK-LABEL: extractelt_nxv2i8_0:
42 ; CHECK-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
43 ; CHECK-NEXT: vmv.x.s a0, v8
45 %r = extractelement <vscale x 2 x i8> %v, i32 0
49 define signext i8 @extractelt_nxv2i8_imm(<vscale x 2 x i8> %v) {
50 ; CHECK-LABEL: extractelt_nxv2i8_imm:
52 ; CHECK-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
53 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
54 ; CHECK-NEXT: vmv.x.s a0, v8
56 %r = extractelement <vscale x 2 x i8> %v, i32 2
60 define signext i8 @extractelt_nxv2i8_idx(<vscale x 2 x i8> %v, i32 zeroext %idx) {
61 ; CHECK-LABEL: extractelt_nxv2i8_idx:
63 ; CHECK-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
64 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
65 ; CHECK-NEXT: vmv.x.s a0, v8
67 %r = extractelement <vscale x 2 x i8> %v, i32 %idx
71 define signext i8 @extractelt_nxv4i8_0(<vscale x 4 x i8> %v) {
72 ; CHECK-LABEL: extractelt_nxv4i8_0:
74 ; CHECK-NEXT: vsetivli zero, 1, e8, mf2, ta, ma
75 ; CHECK-NEXT: vmv.x.s a0, v8
77 %r = extractelement <vscale x 4 x i8> %v, i32 0
81 define signext i8 @extractelt_nxv4i8_imm(<vscale x 4 x i8> %v) {
82 ; CHECK-LABEL: extractelt_nxv4i8_imm:
84 ; CHECK-NEXT: vsetivli zero, 1, e8, mf2, ta, ma
85 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
86 ; CHECK-NEXT: vmv.x.s a0, v8
88 %r = extractelement <vscale x 4 x i8> %v, i32 2
92 define signext i8 @extractelt_nxv4i8_idx(<vscale x 4 x i8> %v, i32 zeroext %idx) {
93 ; CHECK-LABEL: extractelt_nxv4i8_idx:
95 ; CHECK-NEXT: vsetivli zero, 1, e8, mf2, ta, ma
96 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
97 ; CHECK-NEXT: vmv.x.s a0, v8
99 %r = extractelement <vscale x 4 x i8> %v, i32 %idx
103 define signext i8 @extractelt_nxv8i8_0(<vscale x 8 x i8> %v) {
104 ; CHECK-LABEL: extractelt_nxv8i8_0:
106 ; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma
107 ; CHECK-NEXT: vmv.x.s a0, v8
109 %r = extractelement <vscale x 8 x i8> %v, i32 0
113 define signext i8 @extractelt_nxv8i8_imm(<vscale x 8 x i8> %v) {
114 ; CHECK-LABEL: extractelt_nxv8i8_imm:
116 ; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma
117 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
118 ; CHECK-NEXT: vmv.x.s a0, v8
120 %r = extractelement <vscale x 8 x i8> %v, i32 2
124 define signext i8 @extractelt_nxv8i8_idx(<vscale x 8 x i8> %v, i32 zeroext %idx) {
125 ; CHECK-LABEL: extractelt_nxv8i8_idx:
127 ; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma
128 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
129 ; CHECK-NEXT: vmv.x.s a0, v8
131 %r = extractelement <vscale x 8 x i8> %v, i32 %idx
135 define signext i8 @extractelt_nxv16i8_0(<vscale x 16 x i8> %v) {
136 ; CHECK-LABEL: extractelt_nxv16i8_0:
138 ; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma
139 ; CHECK-NEXT: vmv.x.s a0, v8
141 %r = extractelement <vscale x 16 x i8> %v, i32 0
145 define signext i8 @extractelt_nxv16i8_imm(<vscale x 16 x i8> %v) {
146 ; CHECK-LABEL: extractelt_nxv16i8_imm:
148 ; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma
149 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
150 ; CHECK-NEXT: vmv.x.s a0, v8
152 %r = extractelement <vscale x 16 x i8> %v, i32 2
156 define signext i8 @extractelt_nxv16i8_idx(<vscale x 16 x i8> %v, i32 zeroext %idx) {
157 ; CHECK-LABEL: extractelt_nxv16i8_idx:
159 ; CHECK-NEXT: vsetivli zero, 1, e8, m2, ta, ma
160 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
161 ; CHECK-NEXT: vmv.x.s a0, v8
163 %r = extractelement <vscale x 16 x i8> %v, i32 %idx
167 define signext i8 @extractelt_nxv32i8_0(<vscale x 32 x i8> %v) {
168 ; CHECK-LABEL: extractelt_nxv32i8_0:
170 ; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma
171 ; CHECK-NEXT: vmv.x.s a0, v8
173 %r = extractelement <vscale x 32 x i8> %v, i32 0
177 define signext i8 @extractelt_nxv32i8_imm(<vscale x 32 x i8> %v) {
178 ; CHECK-LABEL: extractelt_nxv32i8_imm:
180 ; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma
181 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
182 ; CHECK-NEXT: vmv.x.s a0, v8
184 %r = extractelement <vscale x 32 x i8> %v, i32 2
188 define signext i8 @extractelt_nxv32i8_idx(<vscale x 32 x i8> %v, i32 zeroext %idx) {
189 ; CHECK-LABEL: extractelt_nxv32i8_idx:
191 ; CHECK-NEXT: vsetivli zero, 1, e8, m4, ta, ma
192 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
193 ; CHECK-NEXT: vmv.x.s a0, v8
195 %r = extractelement <vscale x 32 x i8> %v, i32 %idx
199 define signext i8 @extractelt_nxv64i8_0(<vscale x 64 x i8> %v) {
200 ; CHECK-LABEL: extractelt_nxv64i8_0:
202 ; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma
203 ; CHECK-NEXT: vmv.x.s a0, v8
205 %r = extractelement <vscale x 64 x i8> %v, i32 0
209 define signext i8 @extractelt_nxv64i8_imm(<vscale x 64 x i8> %v) {
210 ; CHECK-LABEL: extractelt_nxv64i8_imm:
212 ; CHECK-NEXT: vsetivli zero, 1, e8, m1, ta, ma
213 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
214 ; CHECK-NEXT: vmv.x.s a0, v8
216 %r = extractelement <vscale x 64 x i8> %v, i32 2
220 define signext i8 @extractelt_nxv64i8_idx(<vscale x 64 x i8> %v, i32 zeroext %idx) {
221 ; CHECK-LABEL: extractelt_nxv64i8_idx:
223 ; CHECK-NEXT: vsetivli zero, 1, e8, m8, ta, ma
224 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
225 ; CHECK-NEXT: vmv.x.s a0, v8
227 %r = extractelement <vscale x 64 x i8> %v, i32 %idx
231 define signext i16 @extractelt_nxv1i16_0(<vscale x 1 x i16> %v) {
232 ; CHECK-LABEL: extractelt_nxv1i16_0:
234 ; CHECK-NEXT: vsetivli zero, 1, e16, mf4, ta, ma
235 ; CHECK-NEXT: vmv.x.s a0, v8
237 %r = extractelement <vscale x 1 x i16> %v, i32 0
241 define signext i16 @extractelt_nxv1i16_imm(<vscale x 1 x i16> %v) {
242 ; CHECK-LABEL: extractelt_nxv1i16_imm:
244 ; CHECK-NEXT: vsetivli zero, 1, e16, mf4, ta, ma
245 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
246 ; CHECK-NEXT: vmv.x.s a0, v8
248 %r = extractelement <vscale x 1 x i16> %v, i32 2
252 define signext i16 @extractelt_nxv1i16_idx(<vscale x 1 x i16> %v, i32 zeroext %idx) {
253 ; CHECK-LABEL: extractelt_nxv1i16_idx:
255 ; CHECK-NEXT: vsetivli zero, 1, e16, mf4, ta, ma
256 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
257 ; CHECK-NEXT: vmv.x.s a0, v8
259 %r = extractelement <vscale x 1 x i16> %v, i32 %idx
263 define signext i16 @extractelt_nxv2i16_0(<vscale x 2 x i16> %v) {
264 ; CHECK-LABEL: extractelt_nxv2i16_0:
266 ; CHECK-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
267 ; CHECK-NEXT: vmv.x.s a0, v8
269 %r = extractelement <vscale x 2 x i16> %v, i32 0
273 define signext i16 @extractelt_nxv2i16_imm(<vscale x 2 x i16> %v) {
274 ; CHECK-LABEL: extractelt_nxv2i16_imm:
276 ; CHECK-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
277 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
278 ; CHECK-NEXT: vmv.x.s a0, v8
280 %r = extractelement <vscale x 2 x i16> %v, i32 2
284 define signext i16 @extractelt_nxv2i16_idx(<vscale x 2 x i16> %v, i32 zeroext %idx) {
285 ; CHECK-LABEL: extractelt_nxv2i16_idx:
287 ; CHECK-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
288 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
289 ; CHECK-NEXT: vmv.x.s a0, v8
291 %r = extractelement <vscale x 2 x i16> %v, i32 %idx
295 define signext i16 @extractelt_nxv4i16_0(<vscale x 4 x i16> %v) {
296 ; CHECK-LABEL: extractelt_nxv4i16_0:
298 ; CHECK-NEXT: vsetivli zero, 1, e16, m1, ta, ma
299 ; CHECK-NEXT: vmv.x.s a0, v8
301 %r = extractelement <vscale x 4 x i16> %v, i32 0
305 define signext i16 @extractelt_nxv4i16_imm(<vscale x 4 x i16> %v) {
306 ; CHECK-LABEL: extractelt_nxv4i16_imm:
308 ; CHECK-NEXT: vsetivli zero, 1, e16, m1, ta, ma
309 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
310 ; CHECK-NEXT: vmv.x.s a0, v8
312 %r = extractelement <vscale x 4 x i16> %v, i32 2
316 define signext i16 @extractelt_nxv4i16_idx(<vscale x 4 x i16> %v, i32 zeroext %idx) {
317 ; CHECK-LABEL: extractelt_nxv4i16_idx:
319 ; CHECK-NEXT: vsetivli zero, 1, e16, m1, ta, ma
320 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
321 ; CHECK-NEXT: vmv.x.s a0, v8
323 %r = extractelement <vscale x 4 x i16> %v, i32 %idx
327 define signext i16 @extractelt_nxv8i16_0(<vscale x 8 x i16> %v) {
328 ; CHECK-LABEL: extractelt_nxv8i16_0:
330 ; CHECK-NEXT: vsetivli zero, 1, e16, m1, ta, ma
331 ; CHECK-NEXT: vmv.x.s a0, v8
333 %r = extractelement <vscale x 8 x i16> %v, i32 0
337 define signext i16 @extractelt_nxv8i16_imm(<vscale x 8 x i16> %v) {
338 ; CHECK-LABEL: extractelt_nxv8i16_imm:
340 ; CHECK-NEXT: vsetivli zero, 1, e16, m1, ta, ma
341 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
342 ; CHECK-NEXT: vmv.x.s a0, v8
344 %r = extractelement <vscale x 8 x i16> %v, i32 2
348 define signext i16 @extractelt_nxv8i16_idx(<vscale x 8 x i16> %v, i32 zeroext %idx) {
349 ; CHECK-LABEL: extractelt_nxv8i16_idx:
351 ; CHECK-NEXT: vsetivli zero, 1, e16, m2, ta, ma
352 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
353 ; CHECK-NEXT: vmv.x.s a0, v8
355 %r = extractelement <vscale x 8 x i16> %v, i32 %idx
359 define signext i16 @extractelt_nxv16i16_0(<vscale x 16 x i16> %v) {
360 ; CHECK-LABEL: extractelt_nxv16i16_0:
362 ; CHECK-NEXT: vsetivli zero, 1, e16, m1, ta, ma
363 ; CHECK-NEXT: vmv.x.s a0, v8
365 %r = extractelement <vscale x 16 x i16> %v, i32 0
369 define signext i16 @extractelt_nxv16i16_imm(<vscale x 16 x i16> %v) {
370 ; CHECK-LABEL: extractelt_nxv16i16_imm:
372 ; CHECK-NEXT: vsetivli zero, 1, e16, m1, ta, ma
373 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
374 ; CHECK-NEXT: vmv.x.s a0, v8
376 %r = extractelement <vscale x 16 x i16> %v, i32 2
380 define signext i16 @extractelt_nxv16i16_idx(<vscale x 16 x i16> %v, i32 zeroext %idx) {
381 ; CHECK-LABEL: extractelt_nxv16i16_idx:
383 ; CHECK-NEXT: vsetivli zero, 1, e16, m4, ta, ma
384 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
385 ; CHECK-NEXT: vmv.x.s a0, v8
387 %r = extractelement <vscale x 16 x i16> %v, i32 %idx
391 define signext i16 @extractelt_nxv32i16_0(<vscale x 32 x i16> %v) {
392 ; CHECK-LABEL: extractelt_nxv32i16_0:
394 ; CHECK-NEXT: vsetivli zero, 1, e16, m1, ta, ma
395 ; CHECK-NEXT: vmv.x.s a0, v8
397 %r = extractelement <vscale x 32 x i16> %v, i32 0
401 define signext i16 @extractelt_nxv32i16_imm(<vscale x 32 x i16> %v) {
402 ; CHECK-LABEL: extractelt_nxv32i16_imm:
404 ; CHECK-NEXT: vsetivli zero, 1, e16, m1, ta, ma
405 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
406 ; CHECK-NEXT: vmv.x.s a0, v8
408 %r = extractelement <vscale x 32 x i16> %v, i32 2
412 define signext i16 @extractelt_nxv32i16_idx(<vscale x 32 x i16> %v, i32 zeroext %idx) {
413 ; CHECK-LABEL: extractelt_nxv32i16_idx:
415 ; CHECK-NEXT: vsetivli zero, 1, e16, m8, ta, ma
416 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
417 ; CHECK-NEXT: vmv.x.s a0, v8
419 %r = extractelement <vscale x 32 x i16> %v, i32 %idx
423 define signext i32 @extractelt_nxv1i32_0(<vscale x 1 x i32> %v) {
424 ; CHECK-LABEL: extractelt_nxv1i32_0:
426 ; CHECK-NEXT: vsetivli zero, 1, e32, mf2, ta, ma
427 ; CHECK-NEXT: vmv.x.s a0, v8
429 %r = extractelement <vscale x 1 x i32> %v, i32 0
433 define signext i32 @extractelt_nxv1i32_imm(<vscale x 1 x i32> %v) {
434 ; CHECK-LABEL: extractelt_nxv1i32_imm:
436 ; CHECK-NEXT: vsetivli zero, 1, e32, mf2, ta, ma
437 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
438 ; CHECK-NEXT: vmv.x.s a0, v8
440 %r = extractelement <vscale x 1 x i32> %v, i32 2
444 define signext i32 @extractelt_nxv1i32_idx(<vscale x 1 x i32> %v, i32 zeroext %idx) {
445 ; CHECK-LABEL: extractelt_nxv1i32_idx:
447 ; CHECK-NEXT: vsetivli zero, 1, e32, mf2, ta, ma
448 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
449 ; CHECK-NEXT: vmv.x.s a0, v8
451 %r = extractelement <vscale x 1 x i32> %v, i32 %idx
455 define signext i32 @extractelt_nxv2i32_0(<vscale x 2 x i32> %v) {
456 ; CHECK-LABEL: extractelt_nxv2i32_0:
458 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
459 ; CHECK-NEXT: vmv.x.s a0, v8
461 %r = extractelement <vscale x 2 x i32> %v, i32 0
465 define signext i32 @extractelt_nxv2i32_imm(<vscale x 2 x i32> %v) {
466 ; CHECK-LABEL: extractelt_nxv2i32_imm:
468 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
469 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
470 ; CHECK-NEXT: vmv.x.s a0, v8
472 %r = extractelement <vscale x 2 x i32> %v, i32 2
476 define signext i32 @extractelt_nxv2i32_idx(<vscale x 2 x i32> %v, i32 zeroext %idx) {
477 ; CHECK-LABEL: extractelt_nxv2i32_idx:
479 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
480 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
481 ; CHECK-NEXT: vmv.x.s a0, v8
483 %r = extractelement <vscale x 2 x i32> %v, i32 %idx
487 define signext i32 @extractelt_nxv4i32_0(<vscale x 4 x i32> %v) {
488 ; CHECK-LABEL: extractelt_nxv4i32_0:
490 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
491 ; CHECK-NEXT: vmv.x.s a0, v8
493 %r = extractelement <vscale x 4 x i32> %v, i32 0
497 define signext i32 @extractelt_nxv4i32_imm(<vscale x 4 x i32> %v) {
498 ; CHECK-LABEL: extractelt_nxv4i32_imm:
500 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
501 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
502 ; CHECK-NEXT: vmv.x.s a0, v8
504 %r = extractelement <vscale x 4 x i32> %v, i32 2
508 define signext i32 @extractelt_nxv4i32_idx(<vscale x 4 x i32> %v, i32 zeroext %idx) {
509 ; CHECK-LABEL: extractelt_nxv4i32_idx:
511 ; CHECK-NEXT: vsetivli zero, 1, e32, m2, ta, ma
512 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
513 ; CHECK-NEXT: vmv.x.s a0, v8
515 %r = extractelement <vscale x 4 x i32> %v, i32 %idx
519 define signext i32 @extractelt_nxv8i32_0(<vscale x 8 x i32> %v) {
520 ; CHECK-LABEL: extractelt_nxv8i32_0:
522 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
523 ; CHECK-NEXT: vmv.x.s a0, v8
525 %r = extractelement <vscale x 8 x i32> %v, i32 0
529 define signext i32 @extractelt_nxv8i32_imm(<vscale x 8 x i32> %v) {
530 ; CHECK-LABEL: extractelt_nxv8i32_imm:
532 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
533 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
534 ; CHECK-NEXT: vmv.x.s a0, v8
536 %r = extractelement <vscale x 8 x i32> %v, i32 2
540 define signext i32 @extractelt_nxv8i32_idx(<vscale x 8 x i32> %v, i32 zeroext %idx) {
541 ; CHECK-LABEL: extractelt_nxv8i32_idx:
543 ; CHECK-NEXT: vsetivli zero, 1, e32, m4, ta, ma
544 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
545 ; CHECK-NEXT: vmv.x.s a0, v8
547 %r = extractelement <vscale x 8 x i32> %v, i32 %idx
551 define signext i32 @extractelt_nxv16i32_0(<vscale x 16 x i32> %v) {
552 ; CHECK-LABEL: extractelt_nxv16i32_0:
554 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
555 ; CHECK-NEXT: vmv.x.s a0, v8
557 %r = extractelement <vscale x 16 x i32> %v, i32 0
561 define signext i32 @extractelt_nxv16i32_imm(<vscale x 16 x i32> %v) {
562 ; CHECK-LABEL: extractelt_nxv16i32_imm:
564 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
565 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
566 ; CHECK-NEXT: vmv.x.s a0, v8
568 %r = extractelement <vscale x 16 x i32> %v, i32 2
572 define signext i32 @extractelt_nxv16i32_idx(<vscale x 16 x i32> %v, i32 zeroext %idx) {
573 ; CHECK-LABEL: extractelt_nxv16i32_idx:
575 ; CHECK-NEXT: vsetivli zero, 1, e32, m8, ta, ma
576 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
577 ; CHECK-NEXT: vmv.x.s a0, v8
579 %r = extractelement <vscale x 16 x i32> %v, i32 %idx
583 define i64 @extractelt_nxv1i64_0(<vscale x 1 x i64> %v) {
584 ; CHECK-LABEL: extractelt_nxv1i64_0:
586 ; CHECK-NEXT: vsetivli zero, 1, e64, m1, ta, ma
587 ; CHECK-NEXT: vmv.x.s a0, v8
589 %r = extractelement <vscale x 1 x i64> %v, i32 0
593 define i64 @extractelt_nxv1i64_imm(<vscale x 1 x i64> %v) {
594 ; CHECK-LABEL: extractelt_nxv1i64_imm:
596 ; CHECK-NEXT: vsetivli zero, 1, e64, m1, ta, ma
597 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
598 ; CHECK-NEXT: vmv.x.s a0, v8
600 %r = extractelement <vscale x 1 x i64> %v, i32 2
604 define i64 @extractelt_nxv1i64_idx(<vscale x 1 x i64> %v, i32 zeroext %idx) {
605 ; CHECK-LABEL: extractelt_nxv1i64_idx:
607 ; CHECK-NEXT: vsetivli zero, 1, e64, m1, ta, ma
608 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
609 ; CHECK-NEXT: vmv.x.s a0, v8
611 %r = extractelement <vscale x 1 x i64> %v, i32 %idx
615 define i64 @extractelt_nxv2i64_0(<vscale x 2 x i64> %v) {
616 ; CHECK-LABEL: extractelt_nxv2i64_0:
618 ; CHECK-NEXT: vsetivli zero, 1, e64, m1, ta, ma
619 ; CHECK-NEXT: vmv.x.s a0, v8
621 %r = extractelement <vscale x 2 x i64> %v, i32 0
625 define i64 @extractelt_nxv2i64_imm(<vscale x 2 x i64> %v) {
626 ; CHECK-LABEL: extractelt_nxv2i64_imm:
628 ; CHECK-NEXT: vsetivli zero, 1, e64, m2, ta, ma
629 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
630 ; CHECK-NEXT: vmv.x.s a0, v8
632 %r = extractelement <vscale x 2 x i64> %v, i32 2
636 define i64 @extractelt_nxv2i64_idx(<vscale x 2 x i64> %v, i32 zeroext %idx) {
637 ; CHECK-LABEL: extractelt_nxv2i64_idx:
639 ; CHECK-NEXT: vsetivli zero, 1, e64, m2, ta, ma
640 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
641 ; CHECK-NEXT: vmv.x.s a0, v8
643 %r = extractelement <vscale x 2 x i64> %v, i32 %idx
647 define i64 @extractelt_nxv4i64_0(<vscale x 4 x i64> %v) {
648 ; CHECK-LABEL: extractelt_nxv4i64_0:
650 ; CHECK-NEXT: vsetivli zero, 1, e64, m1, ta, ma
651 ; CHECK-NEXT: vmv.x.s a0, v8
653 %r = extractelement <vscale x 4 x i64> %v, i32 0
657 define i64 @extractelt_nxv4i64_imm(<vscale x 4 x i64> %v) {
658 ; CHECK-LABEL: extractelt_nxv4i64_imm:
660 ; CHECK-NEXT: vsetivli zero, 1, e64, m2, ta, ma
661 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
662 ; CHECK-NEXT: vmv.x.s a0, v8
664 %r = extractelement <vscale x 4 x i64> %v, i32 2
668 define i64 @extractelt_nxv4i64_idx(<vscale x 4 x i64> %v, i32 zeroext %idx) {
669 ; CHECK-LABEL: extractelt_nxv4i64_idx:
671 ; CHECK-NEXT: vsetivli zero, 1, e64, m4, ta, ma
672 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
673 ; CHECK-NEXT: vmv.x.s a0, v8
675 %r = extractelement <vscale x 4 x i64> %v, i32 %idx
679 define i64 @extractelt_nxv8i64_0(<vscale x 8 x i64> %v) {
680 ; CHECK-LABEL: extractelt_nxv8i64_0:
682 ; CHECK-NEXT: vsetivli zero, 1, e64, m1, ta, ma
683 ; CHECK-NEXT: vmv.x.s a0, v8
685 %r = extractelement <vscale x 8 x i64> %v, i32 0
689 define i64 @extractelt_nxv8i64_imm(<vscale x 8 x i64> %v) {
690 ; CHECK-LABEL: extractelt_nxv8i64_imm:
692 ; CHECK-NEXT: vsetivli zero, 1, e64, m2, ta, ma
693 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
694 ; CHECK-NEXT: vmv.x.s a0, v8
696 %r = extractelement <vscale x 8 x i64> %v, i32 2
700 define i64 @extractelt_nxv8i64_idx(<vscale x 8 x i64> %v, i32 zeroext %idx) {
701 ; CHECK-LABEL: extractelt_nxv8i64_idx:
703 ; CHECK-NEXT: vsetivli zero, 1, e64, m8, ta, ma
704 ; CHECK-NEXT: vslidedown.vx v8, v8, a0
705 ; CHECK-NEXT: vmv.x.s a0, v8
707 %r = extractelement <vscale x 8 x i64> %v, i32 %idx
711 define i32 @extractelt_add_nxv4i32_splat(<vscale x 4 x i32> %x) {
712 ; CHECK-LABEL: extractelt_add_nxv4i32_splat:
714 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
715 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
716 ; CHECK-NEXT: vmv.x.s a0, v8
717 ; CHECK-NEXT: addiw a0, a0, 3
719 %head = insertelement <vscale x 4 x i32> poison, i32 3, i32 0
720 %splat = shufflevector <vscale x 4 x i32> %head, <vscale x 4 x i32> poison, <vscale x 4 x i32> zeroinitializer
721 %bo = add <vscale x 4 x i32> %x, %splat
722 %ext = extractelement <vscale x 4 x i32> %bo, i32 2
726 define i32 @extractelt_sub_nxv4i32_splat(<vscale x 4 x i32> %x) {
727 ; CHECK-LABEL: extractelt_sub_nxv4i32_splat:
729 ; CHECK-NEXT: vsetivli zero, 1, e32, m1, ta, ma
730 ; CHECK-NEXT: vslidedown.vi v8, v8, 1
731 ; CHECK-NEXT: vmv.x.s a0, v8
732 ; CHECK-NEXT: li a1, 3
733 ; CHECK-NEXT: subw a0, a1, a0
735 %head = insertelement <vscale x 4 x i32> poison, i32 3, i32 0
736 %splat = shufflevector <vscale x 4 x i32> %head, <vscale x 4 x i32> poison, <vscale x 4 x i32> zeroinitializer
737 %bo = sub <vscale x 4 x i32> %splat, %x
738 %ext = extractelement <vscale x 4 x i32> %bo, i32 1
742 define i32 @extractelt_mul_nxv4i32_splat(<vscale x 4 x i32> %x) {
743 ; RV64NOM-LABEL: extractelt_mul_nxv4i32_splat:
745 ; RV64NOM-NEXT: li a0, 3
746 ; RV64NOM-NEXT: vsetvli a1, zero, e32, m2, ta, ma
747 ; RV64NOM-NEXT: vmul.vx v8, v8, a0
748 ; RV64NOM-NEXT: vsetivli zero, 1, e32, m1, ta, ma
749 ; RV64NOM-NEXT: vslidedown.vi v8, v8, 3
750 ; RV64NOM-NEXT: vmv.x.s a0, v8
753 ; RV64M-LABEL: extractelt_mul_nxv4i32_splat:
755 ; RV64M-NEXT: vsetivli zero, 1, e32, m1, ta, ma
756 ; RV64M-NEXT: vslidedown.vi v8, v8, 3
757 ; RV64M-NEXT: vmv.x.s a0, v8
758 ; RV64M-NEXT: slli a1, a0, 1
759 ; RV64M-NEXT: addw a0, a1, a0
761 %head = insertelement <vscale x 4 x i32> poison, i32 3, i32 0
762 %splat = shufflevector <vscale x 4 x i32> %head, <vscale x 4 x i32> poison, <vscale x 4 x i32> zeroinitializer
763 %bo = mul <vscale x 4 x i32> %x, %splat
764 %ext = extractelement <vscale x 4 x i32> %bo, i32 3
768 define i32 @extractelt_sdiv_nxv4i32_splat(<vscale x 4 x i32> %x) {
769 ; RV64NOM-LABEL: extractelt_sdiv_nxv4i32_splat:
771 ; RV64NOM-NEXT: lui a0, 349525
772 ; RV64NOM-NEXT: addi a0, a0, 1366
773 ; RV64NOM-NEXT: vsetvli a1, zero, e32, m2, ta, ma
774 ; RV64NOM-NEXT: vmulh.vx v8, v8, a0
775 ; RV64NOM-NEXT: vsrl.vi v10, v8, 31
776 ; RV64NOM-NEXT: vadd.vv v8, v8, v10
777 ; RV64NOM-NEXT: vmv.x.s a0, v8
780 ; RV64M-LABEL: extractelt_sdiv_nxv4i32_splat:
782 ; RV64M-NEXT: vsetivli zero, 1, e32, m1, ta, ma
783 ; RV64M-NEXT: vmv.x.s a0, v8
784 ; RV64M-NEXT: lui a1, 349525
785 ; RV64M-NEXT: addiw a1, a1, 1366
786 ; RV64M-NEXT: mul a0, a0, a1
787 ; RV64M-NEXT: srli a1, a0, 63
788 ; RV64M-NEXT: srli a0, a0, 32
789 ; RV64M-NEXT: addw a0, a0, a1
791 %head = insertelement <vscale x 4 x i32> poison, i32 3, i32 0
792 %splat = shufflevector <vscale x 4 x i32> %head, <vscale x 4 x i32> poison, <vscale x 4 x i32> zeroinitializer
793 %bo = sdiv <vscale x 4 x i32> %x, %splat
794 %ext = extractelement <vscale x 4 x i32> %bo, i32 0
798 define i32 @extractelt_udiv_nxv4i32_splat(<vscale x 4 x i32> %x) {
799 ; RV64NOM-LABEL: extractelt_udiv_nxv4i32_splat:
801 ; RV64NOM-NEXT: lui a0, 349525
802 ; RV64NOM-NEXT: addi a0, a0, 1366
803 ; RV64NOM-NEXT: vsetvli a1, zero, e32, m2, ta, ma
804 ; RV64NOM-NEXT: vmulh.vx v8, v8, a0
805 ; RV64NOM-NEXT: vsrl.vi v10, v8, 31
806 ; RV64NOM-NEXT: vadd.vv v8, v8, v10
807 ; RV64NOM-NEXT: vmv.x.s a0, v8
810 ; RV64M-LABEL: extractelt_udiv_nxv4i32_splat:
812 ; RV64M-NEXT: vsetivli zero, 1, e32, m1, ta, ma
813 ; RV64M-NEXT: vmv.x.s a0, v8
814 ; RV64M-NEXT: lui a1, 349525
815 ; RV64M-NEXT: addiw a1, a1, 1366
816 ; RV64M-NEXT: mul a0, a0, a1
817 ; RV64M-NEXT: srli a1, a0, 63
818 ; RV64M-NEXT: srli a0, a0, 32
819 ; RV64M-NEXT: addw a0, a0, a1
821 %head = insertelement <vscale x 4 x i32> poison, i32 3, i32 0
822 %splat = shufflevector <vscale x 4 x i32> %head, <vscale x 4 x i32> poison, <vscale x 4 x i32> zeroinitializer
823 %bo = sdiv <vscale x 4 x i32> %x, %splat
824 %ext = extractelement <vscale x 4 x i32> %bo, i32 0
828 define i64 @extractelt_nxv16i64_0(<vscale x 16 x i64> %v) {
829 ; CHECK-LABEL: extractelt_nxv16i64_0:
831 ; CHECK-NEXT: vsetivli zero, 1, e64, m1, ta, ma
832 ; CHECK-NEXT: vmv.x.s a0, v8
834 %r = extractelement <vscale x 16 x i64> %v, i32 0
838 define i64 @extractelt_nxv16i64_neg1(<vscale x 16 x i64> %v) {
839 ; CHECK-LABEL: extractelt_nxv16i64_neg1:
841 ; CHECK-NEXT: addi sp, sp, -80
842 ; CHECK-NEXT: .cfi_def_cfa_offset 80
843 ; CHECK-NEXT: sd ra, 72(sp) # 8-byte Folded Spill
844 ; CHECK-NEXT: sd s0, 64(sp) # 8-byte Folded Spill
845 ; CHECK-NEXT: .cfi_offset ra, -8
846 ; CHECK-NEXT: .cfi_offset s0, -16
847 ; CHECK-NEXT: addi s0, sp, 80
848 ; CHECK-NEXT: .cfi_def_cfa s0, 0
849 ; CHECK-NEXT: csrr a0, vlenb
850 ; CHECK-NEXT: slli a0, a0, 4
851 ; CHECK-NEXT: sub sp, sp, a0
852 ; CHECK-NEXT: andi sp, sp, -64
853 ; CHECK-NEXT: addi a0, sp, 64
854 ; CHECK-NEXT: vs8r.v v8, (a0)
855 ; CHECK-NEXT: csrr a2, vlenb
856 ; CHECK-NEXT: slli a1, a2, 3
857 ; CHECK-NEXT: add a3, a0, a1
858 ; CHECK-NEXT: li a1, -1
859 ; CHECK-NEXT: srli a1, a1, 32
860 ; CHECK-NEXT: slli a2, a2, 1
861 ; CHECK-NEXT: addi a2, a2, -1
862 ; CHECK-NEXT: vs8r.v v16, (a3)
863 ; CHECK-NEXT: bltu a2, a1, .LBB72_2
864 ; CHECK-NEXT: # %bb.1:
865 ; CHECK-NEXT: mv a2, a1
866 ; CHECK-NEXT: .LBB72_2:
867 ; CHECK-NEXT: slli a2, a2, 3
868 ; CHECK-NEXT: add a0, a0, a2
869 ; CHECK-NEXT: ld a0, 0(a0)
870 ; CHECK-NEXT: addi sp, s0, -80
871 ; CHECK-NEXT: ld ra, 72(sp) # 8-byte Folded Reload
872 ; CHECK-NEXT: ld s0, 64(sp) # 8-byte Folded Reload
873 ; CHECK-NEXT: addi sp, sp, 80
875 %r = extractelement <vscale x 16 x i64> %v, i32 -1
879 define i64 @extractelt_nxv16i64_imm(<vscale x 16 x i64> %v) {
880 ; CHECK-LABEL: extractelt_nxv16i64_imm:
882 ; CHECK-NEXT: vsetivli zero, 1, e64, m2, ta, ma
883 ; CHECK-NEXT: vslidedown.vi v8, v8, 2
884 ; CHECK-NEXT: vmv.x.s a0, v8
886 %r = extractelement <vscale x 16 x i64> %v, i32 2
890 define i64 @extractelt_nxv16i64_idx(<vscale x 16 x i64> %v, i32 zeroext %idx) {
891 ; CHECK-LABEL: extractelt_nxv16i64_idx:
893 ; CHECK-NEXT: csrr a1, vlenb
894 ; CHECK-NEXT: slli a2, a1, 1
895 ; CHECK-NEXT: addi a2, a2, -1
896 ; CHECK-NEXT: bltu a0, a2, .LBB74_2
897 ; CHECK-NEXT: # %bb.1:
898 ; CHECK-NEXT: mv a0, a2
899 ; CHECK-NEXT: .LBB74_2:
900 ; CHECK-NEXT: addi sp, sp, -80
901 ; CHECK-NEXT: .cfi_def_cfa_offset 80
902 ; CHECK-NEXT: sd ra, 72(sp) # 8-byte Folded Spill
903 ; CHECK-NEXT: sd s0, 64(sp) # 8-byte Folded Spill
904 ; CHECK-NEXT: .cfi_offset ra, -8
905 ; CHECK-NEXT: .cfi_offset s0, -16
906 ; CHECK-NEXT: addi s0, sp, 80
907 ; CHECK-NEXT: .cfi_def_cfa s0, 0
908 ; CHECK-NEXT: csrr a2, vlenb
909 ; CHECK-NEXT: slli a2, a2, 4
910 ; CHECK-NEXT: sub sp, sp, a2
911 ; CHECK-NEXT: andi sp, sp, -64
912 ; CHECK-NEXT: slli a0, a0, 3
913 ; CHECK-NEXT: addi a2, sp, 64
914 ; CHECK-NEXT: add a0, a2, a0
915 ; CHECK-NEXT: vs8r.v v8, (a2)
916 ; CHECK-NEXT: slli a1, a1, 3
917 ; CHECK-NEXT: add a1, a2, a1
918 ; CHECK-NEXT: vs8r.v v16, (a1)
919 ; CHECK-NEXT: ld a0, 0(a0)
920 ; CHECK-NEXT: addi sp, s0, -80
921 ; CHECK-NEXT: ld ra, 72(sp) # 8-byte Folded Reload
922 ; CHECK-NEXT: ld s0, 64(sp) # 8-byte Folded Reload
923 ; CHECK-NEXT: addi sp, sp, 80
925 %r = extractelement <vscale x 16 x i64> %v, i32 %idx