1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv32 -mattr=+d,+zfh,+zvfh,+v -verify-machineinstrs < %s | FileCheck %s
3 ; RUN: llc -mtriple=riscv64 -mattr=+d,+zfh,+zvfh,+v -verify-machineinstrs < %s | FileCheck %s
5 define <2 x i16> @sextload_v2i1_v2i16(ptr %x) {
6 ; CHECK-LABEL: sextload_v2i1_v2i16:
8 ; CHECK-NEXT: vsetivli zero, 2, e16, mf4, ta, ma
9 ; CHECK-NEXT: vlm.v v0, (a0)
10 ; CHECK-NEXT: vmv.v.i v8, 0
11 ; CHECK-NEXT: vmerge.vim v8, v8, -1, v0
13 %y = load <2 x i1>, ptr %x
14 %z = sext <2 x i1> %y to <2 x i16>
18 define <2 x i16> @sextload_v2i8_v2i16(ptr %x) {
19 ; CHECK-LABEL: sextload_v2i8_v2i16:
21 ; CHECK-NEXT: vsetivli zero, 2, e16, mf4, ta, ma
22 ; CHECK-NEXT: vle8.v v9, (a0)
23 ; CHECK-NEXT: vsext.vf2 v8, v9
25 %y = load <2 x i8>, ptr %x
26 %z = sext <2 x i8> %y to <2 x i16>
30 define <2 x i16> @zextload_v2i8_v2i16(ptr %x) {
31 ; CHECK-LABEL: zextload_v2i8_v2i16:
33 ; CHECK-NEXT: vsetivli zero, 2, e16, mf4, ta, ma
34 ; CHECK-NEXT: vle8.v v9, (a0)
35 ; CHECK-NEXT: vzext.vf2 v8, v9
37 %y = load <2 x i8>, ptr %x
38 %z = zext <2 x i8> %y to <2 x i16>
42 define <2 x i32> @sextload_v2i8_v2i32(ptr %x) {
43 ; CHECK-LABEL: sextload_v2i8_v2i32:
45 ; CHECK-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
46 ; CHECK-NEXT: vle8.v v9, (a0)
47 ; CHECK-NEXT: vsext.vf4 v8, v9
49 %y = load <2 x i8>, ptr %x
50 %z = sext <2 x i8> %y to <2 x i32>
54 define <2 x i32> @zextload_v2i8_v2i32(ptr %x) {
55 ; CHECK-LABEL: zextload_v2i8_v2i32:
57 ; CHECK-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
58 ; CHECK-NEXT: vle8.v v9, (a0)
59 ; CHECK-NEXT: vzext.vf4 v8, v9
61 %y = load <2 x i8>, ptr %x
62 %z = zext <2 x i8> %y to <2 x i32>
66 define <2 x i64> @sextload_v2i8_v2i64(ptr %x) {
67 ; CHECK-LABEL: sextload_v2i8_v2i64:
69 ; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma
70 ; CHECK-NEXT: vle8.v v9, (a0)
71 ; CHECK-NEXT: vsext.vf8 v8, v9
73 %y = load <2 x i8>, ptr %x
74 %z = sext <2 x i8> %y to <2 x i64>
78 define <2 x i64> @zextload_v2i8_v2i64(ptr %x) {
79 ; CHECK-LABEL: zextload_v2i8_v2i64:
81 ; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma
82 ; CHECK-NEXT: vle8.v v9, (a0)
83 ; CHECK-NEXT: vzext.vf8 v8, v9
85 %y = load <2 x i8>, ptr %x
86 %z = zext <2 x i8> %y to <2 x i64>
90 define <4 x i16> @sextload_v4i8_v4i16(ptr %x) {
91 ; CHECK-LABEL: sextload_v4i8_v4i16:
93 ; CHECK-NEXT: vsetivli zero, 4, e16, mf2, ta, ma
94 ; CHECK-NEXT: vle8.v v9, (a0)
95 ; CHECK-NEXT: vsext.vf2 v8, v9
97 %y = load <4 x i8>, ptr %x
98 %z = sext <4 x i8> %y to <4 x i16>
102 define <4 x i16> @zextload_v4i8_v4i16(ptr %x) {
103 ; CHECK-LABEL: zextload_v4i8_v4i16:
105 ; CHECK-NEXT: vsetivli zero, 4, e16, mf2, ta, ma
106 ; CHECK-NEXT: vle8.v v9, (a0)
107 ; CHECK-NEXT: vzext.vf2 v8, v9
109 %y = load <4 x i8>, ptr %x
110 %z = zext <4 x i8> %y to <4 x i16>
114 define <4 x i32> @sextload_v4i8_v4i32(ptr %x) {
115 ; CHECK-LABEL: sextload_v4i8_v4i32:
117 ; CHECK-NEXT: vsetivli zero, 4, e32, m1, ta, ma
118 ; CHECK-NEXT: vle8.v v9, (a0)
119 ; CHECK-NEXT: vsext.vf4 v8, v9
121 %y = load <4 x i8>, ptr %x
122 %z = sext <4 x i8> %y to <4 x i32>
126 define <4 x i32> @zextload_v4i8_v4i32(ptr %x) {
127 ; CHECK-LABEL: zextload_v4i8_v4i32:
129 ; CHECK-NEXT: vsetivli zero, 4, e32, m1, ta, ma
130 ; CHECK-NEXT: vle8.v v9, (a0)
131 ; CHECK-NEXT: vzext.vf4 v8, v9
133 %y = load <4 x i8>, ptr %x
134 %z = zext <4 x i8> %y to <4 x i32>
138 define <4 x i64> @sextload_v4i8_v4i64(ptr %x) {
139 ; CHECK-LABEL: sextload_v4i8_v4i64:
141 ; CHECK-NEXT: vsetivli zero, 4, e64, m2, ta, ma
142 ; CHECK-NEXT: vle8.v v10, (a0)
143 ; CHECK-NEXT: vsext.vf8 v8, v10
145 %y = load <4 x i8>, ptr %x
146 %z = sext <4 x i8> %y to <4 x i64>
150 define <4 x i64> @zextload_v4i8_v4i64(ptr %x) {
151 ; CHECK-LABEL: zextload_v4i8_v4i64:
153 ; CHECK-NEXT: vsetivli zero, 4, e64, m2, ta, ma
154 ; CHECK-NEXT: vle8.v v10, (a0)
155 ; CHECK-NEXT: vzext.vf8 v8, v10
157 %y = load <4 x i8>, ptr %x
158 %z = zext <4 x i8> %y to <4 x i64>
162 define <8 x i16> @sextload_v8i8_v8i16(ptr %x) {
163 ; CHECK-LABEL: sextload_v8i8_v8i16:
165 ; CHECK-NEXT: vsetivli zero, 8, e16, m1, ta, ma
166 ; CHECK-NEXT: vle8.v v9, (a0)
167 ; CHECK-NEXT: vsext.vf2 v8, v9
169 %y = load <8 x i8>, ptr %x
170 %z = sext <8 x i8> %y to <8 x i16>
174 define <8 x i16> @zextload_v8i8_v8i16(ptr %x) {
175 ; CHECK-LABEL: zextload_v8i8_v8i16:
177 ; CHECK-NEXT: vsetivli zero, 8, e16, m1, ta, ma
178 ; CHECK-NEXT: vle8.v v9, (a0)
179 ; CHECK-NEXT: vzext.vf2 v8, v9
181 %y = load <8 x i8>, ptr %x
182 %z = zext <8 x i8> %y to <8 x i16>
186 define <8 x i32> @sextload_v8i8_v8i32(ptr %x) {
187 ; CHECK-LABEL: sextload_v8i8_v8i32:
189 ; CHECK-NEXT: vsetivli zero, 8, e32, m2, ta, ma
190 ; CHECK-NEXT: vle8.v v10, (a0)
191 ; CHECK-NEXT: vsext.vf4 v8, v10
193 %y = load <8 x i8>, ptr %x
194 %z = sext <8 x i8> %y to <8 x i32>
198 define <8 x i32> @zextload_v8i8_v8i32(ptr %x) {
199 ; CHECK-LABEL: zextload_v8i8_v8i32:
201 ; CHECK-NEXT: vsetivli zero, 8, e32, m2, ta, ma
202 ; CHECK-NEXT: vle8.v v10, (a0)
203 ; CHECK-NEXT: vzext.vf4 v8, v10
205 %y = load <8 x i8>, ptr %x
206 %z = zext <8 x i8> %y to <8 x i32>
210 define <8 x i64> @sextload_v8i8_v8i64(ptr %x) {
211 ; CHECK-LABEL: sextload_v8i8_v8i64:
213 ; CHECK-NEXT: vsetivli zero, 8, e64, m4, ta, ma
214 ; CHECK-NEXT: vle8.v v12, (a0)
215 ; CHECK-NEXT: vsext.vf8 v8, v12
217 %y = load <8 x i8>, ptr %x
218 %z = sext <8 x i8> %y to <8 x i64>
222 define <8 x i64> @zextload_v8i8_v8i64(ptr %x) {
223 ; CHECK-LABEL: zextload_v8i8_v8i64:
225 ; CHECK-NEXT: vsetivli zero, 8, e64, m4, ta, ma
226 ; CHECK-NEXT: vle8.v v12, (a0)
227 ; CHECK-NEXT: vzext.vf8 v8, v12
229 %y = load <8 x i8>, ptr %x
230 %z = zext <8 x i8> %y to <8 x i64>
234 define <16 x i16> @sextload_v16i8_v16i16(ptr %x) {
235 ; CHECK-LABEL: sextload_v16i8_v16i16:
237 ; CHECK-NEXT: vsetivli zero, 16, e16, m2, ta, ma
238 ; CHECK-NEXT: vle8.v v10, (a0)
239 ; CHECK-NEXT: vsext.vf2 v8, v10
241 %y = load <16 x i8>, ptr %x
242 %z = sext <16 x i8> %y to <16 x i16>
246 define <16 x i16> @zextload_v16i8_v16i16(ptr %x) {
247 ; CHECK-LABEL: zextload_v16i8_v16i16:
249 ; CHECK-NEXT: vsetivli zero, 16, e16, m2, ta, ma
250 ; CHECK-NEXT: vle8.v v10, (a0)
251 ; CHECK-NEXT: vzext.vf2 v8, v10
253 %y = load <16 x i8>, ptr %x
254 %z = zext <16 x i8> %y to <16 x i16>
258 define <16 x i32> @sextload_v16i8_v16i32(ptr %x) {
259 ; CHECK-LABEL: sextload_v16i8_v16i32:
261 ; CHECK-NEXT: vsetivli zero, 16, e32, m4, ta, ma
262 ; CHECK-NEXT: vle8.v v12, (a0)
263 ; CHECK-NEXT: vsext.vf4 v8, v12
265 %y = load <16 x i8>, ptr %x
266 %z = sext <16 x i8> %y to <16 x i32>
270 define <16 x i32> @zextload_v16i8_v16i32(ptr %x) {
271 ; CHECK-LABEL: zextload_v16i8_v16i32:
273 ; CHECK-NEXT: vsetivli zero, 16, e32, m4, ta, ma
274 ; CHECK-NEXT: vle8.v v12, (a0)
275 ; CHECK-NEXT: vzext.vf4 v8, v12
277 %y = load <16 x i8>, ptr %x
278 %z = zext <16 x i8> %y to <16 x i32>
282 define <16 x i64> @sextload_v16i8_v16i64(ptr %x) {
283 ; CHECK-LABEL: sextload_v16i8_v16i64:
285 ; CHECK-NEXT: vsetivli zero, 16, e64, m8, ta, ma
286 ; CHECK-NEXT: vle8.v v16, (a0)
287 ; CHECK-NEXT: vsext.vf8 v8, v16
289 %y = load <16 x i8>, ptr %x
290 %z = sext <16 x i8> %y to <16 x i64>
294 define <16 x i64> @zextload_v16i8_v16i64(ptr %x) {
295 ; CHECK-LABEL: zextload_v16i8_v16i64:
297 ; CHECK-NEXT: vsetivli zero, 16, e64, m8, ta, ma
298 ; CHECK-NEXT: vle8.v v16, (a0)
299 ; CHECK-NEXT: vzext.vf8 v8, v16
301 %y = load <16 x i8>, ptr %x
302 %z = zext <16 x i8> %y to <16 x i64>
306 define void @truncstore_v2i8_v2i1(<2 x i8> %x, ptr %z) {
307 ; CHECK-LABEL: truncstore_v2i8_v2i1:
309 ; CHECK-NEXT: vsetivli zero, 2, e8, mf8, ta, ma
310 ; CHECK-NEXT: vand.vi v8, v8, 1
311 ; CHECK-NEXT: vmsne.vi v0, v8, 0
312 ; CHECK-NEXT: vmv.v.i v8, 0
313 ; CHECK-NEXT: vmerge.vim v8, v8, 1, v0
314 ; CHECK-NEXT: vsetivli zero, 8, e8, mf2, ta, ma
315 ; CHECK-NEXT: vmv.v.i v9, 0
316 ; CHECK-NEXT: vsetivli zero, 2, e8, mf2, tu, ma
317 ; CHECK-NEXT: vmv.v.v v9, v8
318 ; CHECK-NEXT: vsetivli zero, 8, e8, mf2, ta, ma
319 ; CHECK-NEXT: vmsne.vi v8, v9, 0
320 ; CHECK-NEXT: vsm.v v8, (a0)
322 %y = trunc <2 x i8> %x to <2 x i1>
323 store <2 x i1> %y, ptr %z
327 define void @truncstore_v2i16_v2i8(<2 x i16> %x, ptr %z) {
328 ; CHECK-LABEL: truncstore_v2i16_v2i8:
330 ; CHECK-NEXT: vsetivli zero, 2, e8, mf8, ta, ma
331 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
332 ; CHECK-NEXT: vse8.v v8, (a0)
334 %y = trunc <2 x i16> %x to <2 x i8>
335 store <2 x i8> %y, ptr %z
339 define <2 x i32> @sextload_v2i16_v2i32(ptr %x) {
340 ; CHECK-LABEL: sextload_v2i16_v2i32:
342 ; CHECK-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
343 ; CHECK-NEXT: vle16.v v9, (a0)
344 ; CHECK-NEXT: vsext.vf2 v8, v9
346 %y = load <2 x i16>, ptr %x
347 %z = sext <2 x i16> %y to <2 x i32>
351 define <2 x i32> @zextload_v2i16_v2i32(ptr %x) {
352 ; CHECK-LABEL: zextload_v2i16_v2i32:
354 ; CHECK-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
355 ; CHECK-NEXT: vle16.v v9, (a0)
356 ; CHECK-NEXT: vzext.vf2 v8, v9
358 %y = load <2 x i16>, ptr %x
359 %z = zext <2 x i16> %y to <2 x i32>
363 define <2 x i64> @sextload_v2i16_v2i64(ptr %x) {
364 ; CHECK-LABEL: sextload_v2i16_v2i64:
366 ; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma
367 ; CHECK-NEXT: vle16.v v9, (a0)
368 ; CHECK-NEXT: vsext.vf4 v8, v9
370 %y = load <2 x i16>, ptr %x
371 %z = sext <2 x i16> %y to <2 x i64>
375 define <2 x i64> @zextload_v2i16_v2i64(ptr %x) {
376 ; CHECK-LABEL: zextload_v2i16_v2i64:
378 ; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma
379 ; CHECK-NEXT: vle16.v v9, (a0)
380 ; CHECK-NEXT: vzext.vf4 v8, v9
382 %y = load <2 x i16>, ptr %x
383 %z = zext <2 x i16> %y to <2 x i64>
387 define void @truncstore_v4i16_v4i8(<4 x i16> %x, ptr %z) {
388 ; CHECK-LABEL: truncstore_v4i16_v4i8:
390 ; CHECK-NEXT: vsetivli zero, 4, e8, mf4, ta, ma
391 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
392 ; CHECK-NEXT: vse8.v v8, (a0)
394 %y = trunc <4 x i16> %x to <4 x i8>
395 store <4 x i8> %y, ptr %z
399 define <4 x i32> @sextload_v4i16_v4i32(ptr %x) {
400 ; CHECK-LABEL: sextload_v4i16_v4i32:
402 ; CHECK-NEXT: vsetivli zero, 4, e32, m1, ta, ma
403 ; CHECK-NEXT: vle16.v v9, (a0)
404 ; CHECK-NEXT: vsext.vf2 v8, v9
406 %y = load <4 x i16>, ptr %x
407 %z = sext <4 x i16> %y to <4 x i32>
411 define <4 x i32> @zextload_v4i16_v4i32(ptr %x) {
412 ; CHECK-LABEL: zextload_v4i16_v4i32:
414 ; CHECK-NEXT: vsetivli zero, 4, e32, m1, ta, ma
415 ; CHECK-NEXT: vle16.v v9, (a0)
416 ; CHECK-NEXT: vzext.vf2 v8, v9
418 %y = load <4 x i16>, ptr %x
419 %z = zext <4 x i16> %y to <4 x i32>
423 define <4 x i64> @sextload_v4i16_v4i64(ptr %x) {
424 ; CHECK-LABEL: sextload_v4i16_v4i64:
426 ; CHECK-NEXT: vsetivli zero, 4, e64, m2, ta, ma
427 ; CHECK-NEXT: vle16.v v10, (a0)
428 ; CHECK-NEXT: vsext.vf4 v8, v10
430 %y = load <4 x i16>, ptr %x
431 %z = sext <4 x i16> %y to <4 x i64>
435 define <4 x i64> @zextload_v4i16_v4i64(ptr %x) {
436 ; CHECK-LABEL: zextload_v4i16_v4i64:
438 ; CHECK-NEXT: vsetivli zero, 4, e64, m2, ta, ma
439 ; CHECK-NEXT: vle16.v v10, (a0)
440 ; CHECK-NEXT: vzext.vf4 v8, v10
442 %y = load <4 x i16>, ptr %x
443 %z = zext <4 x i16> %y to <4 x i64>
447 define void @truncstore_v8i16_v8i8(<8 x i16> %x, ptr %z) {
448 ; CHECK-LABEL: truncstore_v8i16_v8i8:
450 ; CHECK-NEXT: vsetivli zero, 8, e8, mf2, ta, ma
451 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
452 ; CHECK-NEXT: vse8.v v8, (a0)
454 %y = trunc <8 x i16> %x to <8 x i8>
455 store <8 x i8> %y, ptr %z
459 define <8 x i32> @sextload_v8i16_v8i32(ptr %x) {
460 ; CHECK-LABEL: sextload_v8i16_v8i32:
462 ; CHECK-NEXT: vsetivli zero, 8, e32, m2, ta, ma
463 ; CHECK-NEXT: vle16.v v10, (a0)
464 ; CHECK-NEXT: vsext.vf2 v8, v10
466 %y = load <8 x i16>, ptr %x
467 %z = sext <8 x i16> %y to <8 x i32>
471 define <8 x i32> @zextload_v8i16_v8i32(ptr %x) {
472 ; CHECK-LABEL: zextload_v8i16_v8i32:
474 ; CHECK-NEXT: vsetivli zero, 8, e32, m2, ta, ma
475 ; CHECK-NEXT: vle16.v v10, (a0)
476 ; CHECK-NEXT: vzext.vf2 v8, v10
478 %y = load <8 x i16>, ptr %x
479 %z = zext <8 x i16> %y to <8 x i32>
483 define <8 x i64> @sextload_v8i16_v8i64(ptr %x) {
484 ; CHECK-LABEL: sextload_v8i16_v8i64:
486 ; CHECK-NEXT: vsetivli zero, 8, e64, m4, ta, ma
487 ; CHECK-NEXT: vle16.v v12, (a0)
488 ; CHECK-NEXT: vsext.vf4 v8, v12
490 %y = load <8 x i16>, ptr %x
491 %z = sext <8 x i16> %y to <8 x i64>
495 define <8 x i64> @zextload_v8i16_v8i64(ptr %x) {
496 ; CHECK-LABEL: zextload_v8i16_v8i64:
498 ; CHECK-NEXT: vsetivli zero, 8, e64, m4, ta, ma
499 ; CHECK-NEXT: vle16.v v12, (a0)
500 ; CHECK-NEXT: vzext.vf4 v8, v12
502 %y = load <8 x i16>, ptr %x
503 %z = zext <8 x i16> %y to <8 x i64>
507 define void @truncstore_v16i16_v16i8(<16 x i16> %x, ptr %z) {
508 ; CHECK-LABEL: truncstore_v16i16_v16i8:
510 ; CHECK-NEXT: vsetivli zero, 16, e8, m1, ta, ma
511 ; CHECK-NEXT: vnsrl.wi v10, v8, 0
512 ; CHECK-NEXT: vse8.v v10, (a0)
514 %y = trunc <16 x i16> %x to <16 x i8>
515 store <16 x i8> %y, ptr %z
519 define <16 x i32> @sextload_v16i16_v16i32(ptr %x) {
520 ; CHECK-LABEL: sextload_v16i16_v16i32:
522 ; CHECK-NEXT: vsetivli zero, 16, e32, m4, ta, ma
523 ; CHECK-NEXT: vle16.v v12, (a0)
524 ; CHECK-NEXT: vsext.vf2 v8, v12
526 %y = load <16 x i16>, ptr %x
527 %z = sext <16 x i16> %y to <16 x i32>
531 define <16 x i32> @zextload_v16i16_v16i32(ptr %x) {
532 ; CHECK-LABEL: zextload_v16i16_v16i32:
534 ; CHECK-NEXT: vsetivli zero, 16, e32, m4, ta, ma
535 ; CHECK-NEXT: vle16.v v12, (a0)
536 ; CHECK-NEXT: vzext.vf2 v8, v12
538 %y = load <16 x i16>, ptr %x
539 %z = zext <16 x i16> %y to <16 x i32>
543 define <16 x i64> @sextload_v16i16_v16i64(ptr %x) {
544 ; CHECK-LABEL: sextload_v16i16_v16i64:
546 ; CHECK-NEXT: vsetivli zero, 16, e64, m8, ta, ma
547 ; CHECK-NEXT: vle16.v v16, (a0)
548 ; CHECK-NEXT: vsext.vf4 v8, v16
550 %y = load <16 x i16>, ptr %x
551 %z = sext <16 x i16> %y to <16 x i64>
555 define <16 x i64> @zextload_v16i16_v16i64(ptr %x) {
556 ; CHECK-LABEL: zextload_v16i16_v16i64:
558 ; CHECK-NEXT: vsetivli zero, 16, e64, m8, ta, ma
559 ; CHECK-NEXT: vle16.v v16, (a0)
560 ; CHECK-NEXT: vzext.vf4 v8, v16
562 %y = load <16 x i16>, ptr %x
563 %z = zext <16 x i16> %y to <16 x i64>
567 define void @truncstore_v2i32_v2i8(<2 x i32> %x, ptr %z) {
568 ; CHECK-LABEL: truncstore_v2i32_v2i8:
570 ; CHECK-NEXT: vsetivli zero, 2, e16, mf4, ta, ma
571 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
572 ; CHECK-NEXT: vsetvli zero, zero, e8, mf8, ta, ma
573 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
574 ; CHECK-NEXT: vse8.v v8, (a0)
576 %y = trunc <2 x i32> %x to <2 x i8>
577 store <2 x i8> %y, ptr %z
581 define void @truncstore_v2i32_v2i16(<2 x i32> %x, ptr %z) {
582 ; CHECK-LABEL: truncstore_v2i32_v2i16:
584 ; CHECK-NEXT: vsetivli zero, 2, e16, mf4, ta, ma
585 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
586 ; CHECK-NEXT: vse16.v v8, (a0)
588 %y = trunc <2 x i32> %x to <2 x i16>
589 store <2 x i16> %y, ptr %z
593 define <2 x i64> @sextload_v2i32_v2i64(ptr %x) {
594 ; CHECK-LABEL: sextload_v2i32_v2i64:
596 ; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma
597 ; CHECK-NEXT: vle32.v v9, (a0)
598 ; CHECK-NEXT: vsext.vf2 v8, v9
600 %y = load <2 x i32>, ptr %x
601 %z = sext <2 x i32> %y to <2 x i64>
605 define <2 x i64> @zextload_v2i32_v2i64(ptr %x) {
606 ; CHECK-LABEL: zextload_v2i32_v2i64:
608 ; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma
609 ; CHECK-NEXT: vle32.v v9, (a0)
610 ; CHECK-NEXT: vzext.vf2 v8, v9
612 %y = load <2 x i32>, ptr %x
613 %z = zext <2 x i32> %y to <2 x i64>
617 define void @truncstore_v4i32_v4i8(<4 x i32> %x, ptr %z) {
618 ; CHECK-LABEL: truncstore_v4i32_v4i8:
620 ; CHECK-NEXT: vsetivli zero, 4, e16, mf2, ta, ma
621 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
622 ; CHECK-NEXT: vsetvli zero, zero, e8, mf4, ta, ma
623 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
624 ; CHECK-NEXT: vse8.v v8, (a0)
626 %y = trunc <4 x i32> %x to <4 x i8>
627 store <4 x i8> %y, ptr %z
631 define void @truncstore_v4i32_v4i16(<4 x i32> %x, ptr %z) {
632 ; CHECK-LABEL: truncstore_v4i32_v4i16:
634 ; CHECK-NEXT: vsetivli zero, 4, e16, mf2, ta, ma
635 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
636 ; CHECK-NEXT: vse16.v v8, (a0)
638 %y = trunc <4 x i32> %x to <4 x i16>
639 store <4 x i16> %y, ptr %z
643 define <4 x i64> @sextload_v4i32_v4i64(ptr %x) {
644 ; CHECK-LABEL: sextload_v4i32_v4i64:
646 ; CHECK-NEXT: vsetivli zero, 4, e64, m2, ta, ma
647 ; CHECK-NEXT: vle32.v v10, (a0)
648 ; CHECK-NEXT: vsext.vf2 v8, v10
650 %y = load <4 x i32>, ptr %x
651 %z = sext <4 x i32> %y to <4 x i64>
655 define <4 x i64> @zextload_v4i32_v4i64(ptr %x) {
656 ; CHECK-LABEL: zextload_v4i32_v4i64:
658 ; CHECK-NEXT: vsetivli zero, 4, e64, m2, ta, ma
659 ; CHECK-NEXT: vle32.v v10, (a0)
660 ; CHECK-NEXT: vzext.vf2 v8, v10
662 %y = load <4 x i32>, ptr %x
663 %z = zext <4 x i32> %y to <4 x i64>
667 define void @truncstore_v8i32_v8i8(<8 x i32> %x, ptr %z) {
668 ; CHECK-LABEL: truncstore_v8i32_v8i8:
670 ; CHECK-NEXT: vsetivli zero, 8, e16, m1, ta, ma
671 ; CHECK-NEXT: vnsrl.wi v10, v8, 0
672 ; CHECK-NEXT: vsetvli zero, zero, e8, mf2, ta, ma
673 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
674 ; CHECK-NEXT: vse8.v v8, (a0)
676 %y = trunc <8 x i32> %x to <8 x i8>
677 store <8 x i8> %y, ptr %z
681 define void @truncstore_v8i32_v8i16(<8 x i32> %x, ptr %z) {
682 ; CHECK-LABEL: truncstore_v8i32_v8i16:
684 ; CHECK-NEXT: vsetivli zero, 8, e16, m1, ta, ma
685 ; CHECK-NEXT: vnsrl.wi v10, v8, 0
686 ; CHECK-NEXT: vse16.v v10, (a0)
688 %y = trunc <8 x i32> %x to <8 x i16>
689 store <8 x i16> %y, ptr %z
693 define <8 x i64> @sextload_v8i32_v8i64(ptr %x) {
694 ; CHECK-LABEL: sextload_v8i32_v8i64:
696 ; CHECK-NEXT: vsetivli zero, 8, e64, m4, ta, ma
697 ; CHECK-NEXT: vle32.v v12, (a0)
698 ; CHECK-NEXT: vsext.vf2 v8, v12
700 %y = load <8 x i32>, ptr %x
701 %z = sext <8 x i32> %y to <8 x i64>
705 define <8 x i64> @zextload_v8i32_v8i64(ptr %x) {
706 ; CHECK-LABEL: zextload_v8i32_v8i64:
708 ; CHECK-NEXT: vsetivli zero, 8, e64, m4, ta, ma
709 ; CHECK-NEXT: vle32.v v12, (a0)
710 ; CHECK-NEXT: vzext.vf2 v8, v12
712 %y = load <8 x i32>, ptr %x
713 %z = zext <8 x i32> %y to <8 x i64>
717 define void @truncstore_v16i32_v16i8(<16 x i32> %x, ptr %z) {
718 ; CHECK-LABEL: truncstore_v16i32_v16i8:
720 ; CHECK-NEXT: vsetivli zero, 16, e16, m2, ta, ma
721 ; CHECK-NEXT: vnsrl.wi v12, v8, 0
722 ; CHECK-NEXT: vsetvli zero, zero, e8, m1, ta, ma
723 ; CHECK-NEXT: vnsrl.wi v8, v12, 0
724 ; CHECK-NEXT: vse8.v v8, (a0)
726 %y = trunc <16 x i32> %x to <16 x i8>
727 store <16 x i8> %y, ptr %z
731 define void @truncstore_v16i32_v16i16(<16 x i32> %x, ptr %z) {
732 ; CHECK-LABEL: truncstore_v16i32_v16i16:
734 ; CHECK-NEXT: vsetivli zero, 16, e16, m2, ta, ma
735 ; CHECK-NEXT: vnsrl.wi v12, v8, 0
736 ; CHECK-NEXT: vse16.v v12, (a0)
738 %y = trunc <16 x i32> %x to <16 x i16>
739 store <16 x i16> %y, ptr %z
743 define <16 x i64> @sextload_v16i32_v16i64(ptr %x) {
744 ; CHECK-LABEL: sextload_v16i32_v16i64:
746 ; CHECK-NEXT: vsetivli zero, 16, e64, m8, ta, ma
747 ; CHECK-NEXT: vle32.v v16, (a0)
748 ; CHECK-NEXT: vsext.vf2 v8, v16
750 %y = load <16 x i32>, ptr %x
751 %z = sext <16 x i32> %y to <16 x i64>
755 define <16 x i64> @zextload_v16i32_v16i64(ptr %x) {
756 ; CHECK-LABEL: zextload_v16i32_v16i64:
758 ; CHECK-NEXT: vsetivli zero, 16, e64, m8, ta, ma
759 ; CHECK-NEXT: vle32.v v16, (a0)
760 ; CHECK-NEXT: vzext.vf2 v8, v16
762 %y = load <16 x i32>, ptr %x
763 %z = zext <16 x i32> %y to <16 x i64>
767 define void @truncstore_v2i64_v2i8(<2 x i64> %x, ptr %z) {
768 ; CHECK-LABEL: truncstore_v2i64_v2i8:
770 ; CHECK-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
771 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
772 ; CHECK-NEXT: vsetvli zero, zero, e16, mf4, ta, ma
773 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
774 ; CHECK-NEXT: vsetvli zero, zero, e8, mf8, ta, ma
775 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
776 ; CHECK-NEXT: vse8.v v8, (a0)
778 %y = trunc <2 x i64> %x to <2 x i8>
779 store <2 x i8> %y, ptr %z
783 define void @truncstore_v2i64_v2i16(<2 x i64> %x, ptr %z) {
784 ; CHECK-LABEL: truncstore_v2i64_v2i16:
786 ; CHECK-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
787 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
788 ; CHECK-NEXT: vsetvli zero, zero, e16, mf4, ta, ma
789 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
790 ; CHECK-NEXT: vse16.v v8, (a0)
792 %y = trunc <2 x i64> %x to <2 x i16>
793 store <2 x i16> %y, ptr %z
797 define void @truncstore_v2i64_v2i32(<2 x i64> %x, ptr %z) {
798 ; CHECK-LABEL: truncstore_v2i64_v2i32:
800 ; CHECK-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
801 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
802 ; CHECK-NEXT: vse32.v v8, (a0)
804 %y = trunc <2 x i64> %x to <2 x i32>
805 store <2 x i32> %y, ptr %z
809 define void @truncstore_v4i64_v4i8(<4 x i64> %x, ptr %z) {
810 ; CHECK-LABEL: truncstore_v4i64_v4i8:
812 ; CHECK-NEXT: vsetivli zero, 4, e32, m1, ta, ma
813 ; CHECK-NEXT: vnsrl.wi v10, v8, 0
814 ; CHECK-NEXT: vsetvli zero, zero, e16, mf2, ta, ma
815 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
816 ; CHECK-NEXT: vsetvli zero, zero, e8, mf4, ta, ma
817 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
818 ; CHECK-NEXT: vse8.v v8, (a0)
820 %y = trunc <4 x i64> %x to <4 x i8>
821 store <4 x i8> %y, ptr %z
825 define void @truncstore_v4i64_v4i16(<4 x i64> %x, ptr %z) {
826 ; CHECK-LABEL: truncstore_v4i64_v4i16:
828 ; CHECK-NEXT: vsetivli zero, 4, e32, m1, ta, ma
829 ; CHECK-NEXT: vnsrl.wi v10, v8, 0
830 ; CHECK-NEXT: vsetvli zero, zero, e16, mf2, ta, ma
831 ; CHECK-NEXT: vnsrl.wi v8, v10, 0
832 ; CHECK-NEXT: vse16.v v8, (a0)
834 %y = trunc <4 x i64> %x to <4 x i16>
835 store <4 x i16> %y, ptr %z
839 define void @truncstore_v4i64_v4i32(<4 x i64> %x, ptr %z) {
840 ; CHECK-LABEL: truncstore_v4i64_v4i32:
842 ; CHECK-NEXT: vsetivli zero, 4, e32, m1, ta, ma
843 ; CHECK-NEXT: vnsrl.wi v10, v8, 0
844 ; CHECK-NEXT: vse32.v v10, (a0)
846 %y = trunc <4 x i64> %x to <4 x i32>
847 store <4 x i32> %y, ptr %z
851 define void @truncstore_v8i64_v8i8(<8 x i64> %x, ptr %z) {
852 ; CHECK-LABEL: truncstore_v8i64_v8i8:
854 ; CHECK-NEXT: vsetivli zero, 8, e32, m2, ta, ma
855 ; CHECK-NEXT: vnsrl.wi v12, v8, 0
856 ; CHECK-NEXT: vsetvli zero, zero, e16, m1, ta, ma
857 ; CHECK-NEXT: vnsrl.wi v8, v12, 0
858 ; CHECK-NEXT: vsetvli zero, zero, e8, mf2, ta, ma
859 ; CHECK-NEXT: vnsrl.wi v8, v8, 0
860 ; CHECK-NEXT: vse8.v v8, (a0)
862 %y = trunc <8 x i64> %x to <8 x i8>
863 store <8 x i8> %y, ptr %z
867 define void @truncstore_v8i64_v8i16(<8 x i64> %x, ptr %z) {
868 ; CHECK-LABEL: truncstore_v8i64_v8i16:
870 ; CHECK-NEXT: vsetivli zero, 8, e32, m2, ta, ma
871 ; CHECK-NEXT: vnsrl.wi v12, v8, 0
872 ; CHECK-NEXT: vsetvli zero, zero, e16, m1, ta, ma
873 ; CHECK-NEXT: vnsrl.wi v8, v12, 0
874 ; CHECK-NEXT: vse16.v v8, (a0)
876 %y = trunc <8 x i64> %x to <8 x i16>
877 store <8 x i16> %y, ptr %z
881 define void @truncstore_v8i64_v8i32(<8 x i64> %x, ptr %z) {
882 ; CHECK-LABEL: truncstore_v8i64_v8i32:
884 ; CHECK-NEXT: vsetivli zero, 8, e32, m2, ta, ma
885 ; CHECK-NEXT: vnsrl.wi v12, v8, 0
886 ; CHECK-NEXT: vse32.v v12, (a0)
888 %y = trunc <8 x i64> %x to <8 x i32>
889 store <8 x i32> %y, ptr %z
893 define void @truncstore_v16i64_v16i8(<16 x i64> %x, ptr %z) {
894 ; CHECK-LABEL: truncstore_v16i64_v16i8:
896 ; CHECK-NEXT: vsetivli zero, 16, e32, m4, ta, ma
897 ; CHECK-NEXT: vnsrl.wi v16, v8, 0
898 ; CHECK-NEXT: vsetvli zero, zero, e16, m2, ta, ma
899 ; CHECK-NEXT: vnsrl.wi v8, v16, 0
900 ; CHECK-NEXT: vsetvli zero, zero, e8, m1, ta, ma
901 ; CHECK-NEXT: vnsrl.wi v10, v8, 0
902 ; CHECK-NEXT: vse8.v v10, (a0)
904 %y = trunc <16 x i64> %x to <16 x i8>
905 store <16 x i8> %y, ptr %z
909 define void @truncstore_v16i64_v16i16(<16 x i64> %x, ptr %z) {
910 ; CHECK-LABEL: truncstore_v16i64_v16i16:
912 ; CHECK-NEXT: vsetivli zero, 16, e32, m4, ta, ma
913 ; CHECK-NEXT: vnsrl.wi v16, v8, 0
914 ; CHECK-NEXT: vsetvli zero, zero, e16, m2, ta, ma
915 ; CHECK-NEXT: vnsrl.wi v8, v16, 0
916 ; CHECK-NEXT: vse16.v v8, (a0)
918 %y = trunc <16 x i64> %x to <16 x i16>
919 store <16 x i16> %y, ptr %z
923 define void @truncstore_v16i64_v16i32(<16 x i64> %x, ptr %z) {
924 ; CHECK-LABEL: truncstore_v16i64_v16i32:
926 ; CHECK-NEXT: vsetivli zero, 16, e32, m4, ta, ma
927 ; CHECK-NEXT: vnsrl.wi v16, v8, 0
928 ; CHECK-NEXT: vse32.v v16, (a0)
930 %y = trunc <16 x i64> %x to <16 x i32>
931 store <16 x i32> %y, ptr %z