1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv64 -mattr=+zve64d,+f,+d,+zfh,+zvfh \
3 ; RUN: -verify-machineinstrs < %s | FileCheck %s
5 declare {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.nxv16i16(<vscale x 16 x i16>,<vscale x 16 x i16>, ptr , i64)
6 declare {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16(<vscale x 16 x i16>,<vscale x 16 x i16>, ptr, <vscale x 16 x i1>, i64, i64)
8 define void @test_vlseg2ff_dead_value(ptr %base, i64 %vl, ptr %outvl) {
9 ; CHECK-LABEL: test_vlseg2ff_dead_value:
10 ; CHECK: # %bb.0: # %entry
11 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
12 ; CHECK-NEXT: vlseg2e16ff.v v8, (a0)
13 ; CHECK-NEXT: csrr a0, vl
14 ; CHECK-NEXT: sd a0, 0(a2)
17 %0 = tail call {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.nxv16i16(<vscale x 16 x i16> undef, <vscale x 16 x i16> undef, ptr %base, i64 %vl)
18 %1 = extractvalue {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} %0, 2
19 store i64 %1, ptr %outvl
23 define void @test_vlseg2ff_mask_dead_value(<vscale x 16 x i16> %val, ptr %base, i64 %vl, <vscale x 16 x i1> %mask, ptr %outvl) {
24 ; CHECK-LABEL: test_vlseg2ff_mask_dead_value:
25 ; CHECK: # %bb.0: # %entry
26 ; CHECK-NEXT: vmv4r.v v12, v8
27 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu
28 ; CHECK-NEXT: vlseg2e16ff.v v8, (a0), v0.t
29 ; CHECK-NEXT: csrr a0, vl
30 ; CHECK-NEXT: sd a0, 0(a2)
33 %0 = tail call {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i1> %mask, i64 %vl, i64 1)
34 %1 = extractvalue {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} %0, 2
35 store i64 %1, ptr %outvl
39 define <vscale x 16 x i16> @test_vlseg2ff_dead_vl(ptr %base, i64 %vl) {
40 ; CHECK-LABEL: test_vlseg2ff_dead_vl:
41 ; CHECK: # %bb.0: # %entry
42 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
43 ; CHECK-NEXT: vlseg2e16ff.v v4, (a0)
46 %0 = tail call {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.nxv16i16(<vscale x 16 x i16> undef, <vscale x 16 x i16> undef, ptr %base, i64 %vl)
47 %1 = extractvalue {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} %0, 1
48 ret <vscale x 16 x i16> %1
51 define <vscale x 16 x i16> @test_vlseg2ff_mask_dead_vl(<vscale x 16 x i16> %val, ptr %base, i64 %vl, <vscale x 16 x i1> %mask) {
52 ; CHECK-LABEL: test_vlseg2ff_mask_dead_vl:
53 ; CHECK: # %bb.0: # %entry
54 ; CHECK-NEXT: vmv4r.v v4, v8
55 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu
56 ; CHECK-NEXT: vlseg2e16ff.v v4, (a0), v0.t
59 %0 = tail call {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i1> %mask, i64 %vl, i64 1)
60 %1 = extractvalue {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} %0, 1
61 ret <vscale x 16 x i16> %1
64 define void @test_vlseg2ff_dead_all(ptr %base, i64 %vl) {
65 ; CHECK-LABEL: test_vlseg2ff_dead_all:
66 ; CHECK: # %bb.0: # %entry
67 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, ma
68 ; CHECK-NEXT: vlseg2e16ff.v v8, (a0)
71 tail call {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.nxv16i16(<vscale x 16 x i16> undef, <vscale x 16 x i16> undef, ptr %base, i64 %vl)
75 define void @test_vlseg2ff_mask_dead_all(<vscale x 16 x i16> %val, ptr %base, i64 %vl, <vscale x 16 x i1> %mask) {
76 ; CHECK-LABEL: test_vlseg2ff_mask_dead_all:
77 ; CHECK: # %bb.0: # %entry
78 ; CHECK-NEXT: vmv4r.v v12, v8
79 ; CHECK-NEXT: vsetvli zero, a1, e16, m4, ta, mu
80 ; CHECK-NEXT: vlseg2e16ff.v v8, (a0), v0.t
83 tail call {<vscale x 16 x i16>,<vscale x 16 x i16>, i64} @llvm.riscv.vlseg2ff.mask.nxv16i16(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, ptr %base, <vscale x 16 x i1> %mask, i64 %vl, i64 1)