Run DCE after a LoopFlatten test to reduce spurious output [nfc]
[llvm-project.git] / clang / test / CodeGen / RISCV / rvv-intrinsics-autogenerated / policy / overloaded / vlseg2e64.c
blobaa9e6adf36d302b217e61462a1705484f1c270b6
1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zfh \
4 // RUN: -target-feature +zvfh -disable-O0-optnone \
5 // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \
6 // RUN: FileCheck --check-prefix=CHECK-RV64 %s
8 #include <riscv_vector.h>
10 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x double>, <vscale x 1 x double> } @test_vlseg2e64_v_f64m1x2_tu
11 // CHECK-RV64-SAME: (<vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
12 // CHECK-RV64-NEXT: entry:
13 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x double>, <vscale x 1 x double> } poison, <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
14 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP0]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
15 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP1]], 0
16 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP1]], 1
17 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x double>, <vscale x 1 x double> } @llvm.riscv.vlseg2.nxv1f64.i64(<vscale x 1 x double> [[TMP2]], <vscale x 1 x double> [[TMP3]], ptr [[BASE]], i64 [[VL]])
18 // CHECK-RV64-NEXT: ret { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP4]]
20 vfloat64m1x2_t test_vlseg2e64_v_f64m1x2_tu(vfloat64m1x2_t maskedoff_tuple, const double *base, size_t vl) {
21 return __riscv_vlseg2e64_tu(maskedoff_tuple, base, vl);
24 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x double>, <vscale x 2 x double> } @test_vlseg2e64_v_f64m2x2_tu
25 // CHECK-RV64-SAME: (<vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
26 // CHECK-RV64-NEXT: entry:
27 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x double>, <vscale x 2 x double> } poison, <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
28 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP0]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
29 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP1]], 0
30 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP1]], 1
31 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x double>, <vscale x 2 x double> } @llvm.riscv.vlseg2.nxv2f64.i64(<vscale x 2 x double> [[TMP2]], <vscale x 2 x double> [[TMP3]], ptr [[BASE]], i64 [[VL]])
32 // CHECK-RV64-NEXT: ret { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP4]]
34 vfloat64m2x2_t test_vlseg2e64_v_f64m2x2_tu(vfloat64m2x2_t maskedoff_tuple, const double *base, size_t vl) {
35 return __riscv_vlseg2e64_tu(maskedoff_tuple, base, vl);
38 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x double>, <vscale x 4 x double> } @test_vlseg2e64_v_f64m4x2_tu
39 // CHECK-RV64-SAME: (<vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
40 // CHECK-RV64-NEXT: entry:
41 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x double>, <vscale x 4 x double> } poison, <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
42 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP0]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
43 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP1]], 0
44 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP1]], 1
45 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x double>, <vscale x 4 x double> } @llvm.riscv.vlseg2.nxv4f64.i64(<vscale x 4 x double> [[TMP2]], <vscale x 4 x double> [[TMP3]], ptr [[BASE]], i64 [[VL]])
46 // CHECK-RV64-NEXT: ret { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP4]]
48 vfloat64m4x2_t test_vlseg2e64_v_f64m4x2_tu(vfloat64m4x2_t maskedoff_tuple, const double *base, size_t vl) {
49 return __riscv_vlseg2e64_tu(maskedoff_tuple, base, vl);
52 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x i64>, <vscale x 1 x i64> } @test_vlseg2e64_v_i64m1x2_tu
53 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
54 // CHECK-RV64-NEXT: entry:
55 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } poison, <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
56 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP0]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
57 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 0
58 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 1
59 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x i64>, <vscale x 1 x i64> } @llvm.riscv.vlseg2.nxv1i64.i64(<vscale x 1 x i64> [[TMP2]], <vscale x 1 x i64> [[TMP3]], ptr [[BASE]], i64 [[VL]])
60 // CHECK-RV64-NEXT: ret { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP4]]
62 vint64m1x2_t test_vlseg2e64_v_i64m1x2_tu(vint64m1x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
63 return __riscv_vlseg2e64_tu(maskedoff_tuple, base, vl);
66 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x i64>, <vscale x 2 x i64> } @test_vlseg2e64_v_i64m2x2_tu
67 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
68 // CHECK-RV64-NEXT: entry:
69 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } poison, <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
70 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP0]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
71 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 0
72 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 1
73 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x i64>, <vscale x 2 x i64> } @llvm.riscv.vlseg2.nxv2i64.i64(<vscale x 2 x i64> [[TMP2]], <vscale x 2 x i64> [[TMP3]], ptr [[BASE]], i64 [[VL]])
74 // CHECK-RV64-NEXT: ret { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP4]]
76 vint64m2x2_t test_vlseg2e64_v_i64m2x2_tu(vint64m2x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
77 return __riscv_vlseg2e64_tu(maskedoff_tuple, base, vl);
80 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x i64>, <vscale x 4 x i64> } @test_vlseg2e64_v_i64m4x2_tu
81 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
82 // CHECK-RV64-NEXT: entry:
83 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } poison, <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
84 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP0]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
85 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 0
86 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 1
87 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x i64>, <vscale x 4 x i64> } @llvm.riscv.vlseg2.nxv4i64.i64(<vscale x 4 x i64> [[TMP2]], <vscale x 4 x i64> [[TMP3]], ptr [[BASE]], i64 [[VL]])
88 // CHECK-RV64-NEXT: ret { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP4]]
90 vint64m4x2_t test_vlseg2e64_v_i64m4x2_tu(vint64m4x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
91 return __riscv_vlseg2e64_tu(maskedoff_tuple, base, vl);
94 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x i64>, <vscale x 1 x i64> } @test_vlseg2e64_v_u64m1x2_tu
95 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
96 // CHECK-RV64-NEXT: entry:
97 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } poison, <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
98 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP0]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
99 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 0
100 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 1
101 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x i64>, <vscale x 1 x i64> } @llvm.riscv.vlseg2.nxv1i64.i64(<vscale x 1 x i64> [[TMP2]], <vscale x 1 x i64> [[TMP3]], ptr [[BASE]], i64 [[VL]])
102 // CHECK-RV64-NEXT: ret { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP4]]
104 vuint64m1x2_t test_vlseg2e64_v_u64m1x2_tu(vuint64m1x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
105 return __riscv_vlseg2e64_tu(maskedoff_tuple, base, vl);
108 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x i64>, <vscale x 2 x i64> } @test_vlseg2e64_v_u64m2x2_tu
109 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
110 // CHECK-RV64-NEXT: entry:
111 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } poison, <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
112 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP0]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
113 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 0
114 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 1
115 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x i64>, <vscale x 2 x i64> } @llvm.riscv.vlseg2.nxv2i64.i64(<vscale x 2 x i64> [[TMP2]], <vscale x 2 x i64> [[TMP3]], ptr [[BASE]], i64 [[VL]])
116 // CHECK-RV64-NEXT: ret { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP4]]
118 vuint64m2x2_t test_vlseg2e64_v_u64m2x2_tu(vuint64m2x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
119 return __riscv_vlseg2e64_tu(maskedoff_tuple, base, vl);
122 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x i64>, <vscale x 4 x i64> } @test_vlseg2e64_v_u64m4x2_tu
123 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
124 // CHECK-RV64-NEXT: entry:
125 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } poison, <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
126 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP0]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
127 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 0
128 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 1
129 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x i64>, <vscale x 4 x i64> } @llvm.riscv.vlseg2.nxv4i64.i64(<vscale x 4 x i64> [[TMP2]], <vscale x 4 x i64> [[TMP3]], ptr [[BASE]], i64 [[VL]])
130 // CHECK-RV64-NEXT: ret { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP4]]
132 vuint64m4x2_t test_vlseg2e64_v_u64m4x2_tu(vuint64m4x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
133 return __riscv_vlseg2e64_tu(maskedoff_tuple, base, vl);
136 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x double>, <vscale x 1 x double> } @test_vlseg2e64_v_f64m1x2_tum
137 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
138 // CHECK-RV64-NEXT: entry:
139 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x double>, <vscale x 1 x double> } poison, <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
140 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP0]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
141 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP1]], 0
142 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP1]], 1
143 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x double>, <vscale x 1 x double> } @llvm.riscv.vlseg2.mask.nxv1f64.i64(<vscale x 1 x double> [[TMP2]], <vscale x 1 x double> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
144 // CHECK-RV64-NEXT: ret { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP4]]
146 vfloat64m1x2_t test_vlseg2e64_v_f64m1x2_tum(vbool64_t mask, vfloat64m1x2_t maskedoff_tuple, const double *base, size_t vl) {
147 return __riscv_vlseg2e64_tum(mask, maskedoff_tuple, base, vl);
150 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x double>, <vscale x 2 x double> } @test_vlseg2e64_v_f64m2x2_tum
151 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
152 // CHECK-RV64-NEXT: entry:
153 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x double>, <vscale x 2 x double> } poison, <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
154 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP0]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
155 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP1]], 0
156 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP1]], 1
157 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x double>, <vscale x 2 x double> } @llvm.riscv.vlseg2.mask.nxv2f64.i64(<vscale x 2 x double> [[TMP2]], <vscale x 2 x double> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
158 // CHECK-RV64-NEXT: ret { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP4]]
160 vfloat64m2x2_t test_vlseg2e64_v_f64m2x2_tum(vbool32_t mask, vfloat64m2x2_t maskedoff_tuple, const double *base, size_t vl) {
161 return __riscv_vlseg2e64_tum(mask, maskedoff_tuple, base, vl);
164 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x double>, <vscale x 4 x double> } @test_vlseg2e64_v_f64m4x2_tum
165 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
166 // CHECK-RV64-NEXT: entry:
167 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x double>, <vscale x 4 x double> } poison, <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
168 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP0]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
169 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP1]], 0
170 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP1]], 1
171 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x double>, <vscale x 4 x double> } @llvm.riscv.vlseg2.mask.nxv4f64.i64(<vscale x 4 x double> [[TMP2]], <vscale x 4 x double> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
172 // CHECK-RV64-NEXT: ret { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP4]]
174 vfloat64m4x2_t test_vlseg2e64_v_f64m4x2_tum(vbool16_t mask, vfloat64m4x2_t maskedoff_tuple, const double *base, size_t vl) {
175 return __riscv_vlseg2e64_tum(mask, maskedoff_tuple, base, vl);
178 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x i64>, <vscale x 1 x i64> } @test_vlseg2e64_v_i64m1x2_tum
179 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
180 // CHECK-RV64-NEXT: entry:
181 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } poison, <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
182 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP0]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
183 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 0
184 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 1
185 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x i64>, <vscale x 1 x i64> } @llvm.riscv.vlseg2.mask.nxv1i64.i64(<vscale x 1 x i64> [[TMP2]], <vscale x 1 x i64> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
186 // CHECK-RV64-NEXT: ret { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP4]]
188 vint64m1x2_t test_vlseg2e64_v_i64m1x2_tum(vbool64_t mask, vint64m1x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
189 return __riscv_vlseg2e64_tum(mask, maskedoff_tuple, base, vl);
192 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x i64>, <vscale x 2 x i64> } @test_vlseg2e64_v_i64m2x2_tum
193 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
194 // CHECK-RV64-NEXT: entry:
195 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } poison, <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
196 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP0]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
197 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 0
198 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 1
199 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x i64>, <vscale x 2 x i64> } @llvm.riscv.vlseg2.mask.nxv2i64.i64(<vscale x 2 x i64> [[TMP2]], <vscale x 2 x i64> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
200 // CHECK-RV64-NEXT: ret { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP4]]
202 vint64m2x2_t test_vlseg2e64_v_i64m2x2_tum(vbool32_t mask, vint64m2x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
203 return __riscv_vlseg2e64_tum(mask, maskedoff_tuple, base, vl);
206 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x i64>, <vscale x 4 x i64> } @test_vlseg2e64_v_i64m4x2_tum
207 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
208 // CHECK-RV64-NEXT: entry:
209 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } poison, <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
210 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP0]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
211 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 0
212 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 1
213 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x i64>, <vscale x 4 x i64> } @llvm.riscv.vlseg2.mask.nxv4i64.i64(<vscale x 4 x i64> [[TMP2]], <vscale x 4 x i64> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
214 // CHECK-RV64-NEXT: ret { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP4]]
216 vint64m4x2_t test_vlseg2e64_v_i64m4x2_tum(vbool16_t mask, vint64m4x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
217 return __riscv_vlseg2e64_tum(mask, maskedoff_tuple, base, vl);
220 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x i64>, <vscale x 1 x i64> } @test_vlseg2e64_v_u64m1x2_tum
221 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
222 // CHECK-RV64-NEXT: entry:
223 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } poison, <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
224 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP0]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
225 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 0
226 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 1
227 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x i64>, <vscale x 1 x i64> } @llvm.riscv.vlseg2.mask.nxv1i64.i64(<vscale x 1 x i64> [[TMP2]], <vscale x 1 x i64> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
228 // CHECK-RV64-NEXT: ret { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP4]]
230 vuint64m1x2_t test_vlseg2e64_v_u64m1x2_tum(vbool64_t mask, vuint64m1x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
231 return __riscv_vlseg2e64_tum(mask, maskedoff_tuple, base, vl);
234 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x i64>, <vscale x 2 x i64> } @test_vlseg2e64_v_u64m2x2_tum
235 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
236 // CHECK-RV64-NEXT: entry:
237 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } poison, <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
238 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP0]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
239 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 0
240 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 1
241 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x i64>, <vscale x 2 x i64> } @llvm.riscv.vlseg2.mask.nxv2i64.i64(<vscale x 2 x i64> [[TMP2]], <vscale x 2 x i64> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
242 // CHECK-RV64-NEXT: ret { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP4]]
244 vuint64m2x2_t test_vlseg2e64_v_u64m2x2_tum(vbool32_t mask, vuint64m2x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
245 return __riscv_vlseg2e64_tum(mask, maskedoff_tuple, base, vl);
248 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x i64>, <vscale x 4 x i64> } @test_vlseg2e64_v_u64m4x2_tum
249 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
250 // CHECK-RV64-NEXT: entry:
251 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } poison, <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
252 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP0]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
253 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 0
254 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 1
255 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x i64>, <vscale x 4 x i64> } @llvm.riscv.vlseg2.mask.nxv4i64.i64(<vscale x 4 x i64> [[TMP2]], <vscale x 4 x i64> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
256 // CHECK-RV64-NEXT: ret { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP4]]
258 vuint64m4x2_t test_vlseg2e64_v_u64m4x2_tum(vbool16_t mask, vuint64m4x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
259 return __riscv_vlseg2e64_tum(mask, maskedoff_tuple, base, vl);
262 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x double>, <vscale x 1 x double> } @test_vlseg2e64_v_f64m1x2_tumu
263 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
264 // CHECK-RV64-NEXT: entry:
265 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x double>, <vscale x 1 x double> } poison, <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
266 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP0]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
267 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP1]], 0
268 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP1]], 1
269 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x double>, <vscale x 1 x double> } @llvm.riscv.vlseg2.mask.nxv1f64.i64(<vscale x 1 x double> [[TMP2]], <vscale x 1 x double> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
270 // CHECK-RV64-NEXT: ret { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP4]]
272 vfloat64m1x2_t test_vlseg2e64_v_f64m1x2_tumu(vbool64_t mask, vfloat64m1x2_t maskedoff_tuple, const double *base, size_t vl) {
273 return __riscv_vlseg2e64_tumu(mask, maskedoff_tuple, base, vl);
276 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x double>, <vscale x 2 x double> } @test_vlseg2e64_v_f64m2x2_tumu
277 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
278 // CHECK-RV64-NEXT: entry:
279 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x double>, <vscale x 2 x double> } poison, <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
280 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP0]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
281 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP1]], 0
282 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP1]], 1
283 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x double>, <vscale x 2 x double> } @llvm.riscv.vlseg2.mask.nxv2f64.i64(<vscale x 2 x double> [[TMP2]], <vscale x 2 x double> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
284 // CHECK-RV64-NEXT: ret { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP4]]
286 vfloat64m2x2_t test_vlseg2e64_v_f64m2x2_tumu(vbool32_t mask, vfloat64m2x2_t maskedoff_tuple, const double *base, size_t vl) {
287 return __riscv_vlseg2e64_tumu(mask, maskedoff_tuple, base, vl);
290 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x double>, <vscale x 4 x double> } @test_vlseg2e64_v_f64m4x2_tumu
291 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
292 // CHECK-RV64-NEXT: entry:
293 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x double>, <vscale x 4 x double> } poison, <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
294 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP0]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
295 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP1]], 0
296 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP1]], 1
297 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x double>, <vscale x 4 x double> } @llvm.riscv.vlseg2.mask.nxv4f64.i64(<vscale x 4 x double> [[TMP2]], <vscale x 4 x double> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
298 // CHECK-RV64-NEXT: ret { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP4]]
300 vfloat64m4x2_t test_vlseg2e64_v_f64m4x2_tumu(vbool16_t mask, vfloat64m4x2_t maskedoff_tuple, const double *base, size_t vl) {
301 return __riscv_vlseg2e64_tumu(mask, maskedoff_tuple, base, vl);
304 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x i64>, <vscale x 1 x i64> } @test_vlseg2e64_v_i64m1x2_tumu
305 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
306 // CHECK-RV64-NEXT: entry:
307 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } poison, <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
308 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP0]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
309 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 0
310 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 1
311 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x i64>, <vscale x 1 x i64> } @llvm.riscv.vlseg2.mask.nxv1i64.i64(<vscale x 1 x i64> [[TMP2]], <vscale x 1 x i64> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
312 // CHECK-RV64-NEXT: ret { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP4]]
314 vint64m1x2_t test_vlseg2e64_v_i64m1x2_tumu(vbool64_t mask, vint64m1x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
315 return __riscv_vlseg2e64_tumu(mask, maskedoff_tuple, base, vl);
318 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x i64>, <vscale x 2 x i64> } @test_vlseg2e64_v_i64m2x2_tumu
319 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
320 // CHECK-RV64-NEXT: entry:
321 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } poison, <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
322 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP0]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
323 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 0
324 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 1
325 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x i64>, <vscale x 2 x i64> } @llvm.riscv.vlseg2.mask.nxv2i64.i64(<vscale x 2 x i64> [[TMP2]], <vscale x 2 x i64> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
326 // CHECK-RV64-NEXT: ret { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP4]]
328 vint64m2x2_t test_vlseg2e64_v_i64m2x2_tumu(vbool32_t mask, vint64m2x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
329 return __riscv_vlseg2e64_tumu(mask, maskedoff_tuple, base, vl);
332 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x i64>, <vscale x 4 x i64> } @test_vlseg2e64_v_i64m4x2_tumu
333 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
334 // CHECK-RV64-NEXT: entry:
335 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } poison, <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
336 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP0]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
337 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 0
338 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 1
339 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x i64>, <vscale x 4 x i64> } @llvm.riscv.vlseg2.mask.nxv4i64.i64(<vscale x 4 x i64> [[TMP2]], <vscale x 4 x i64> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
340 // CHECK-RV64-NEXT: ret { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP4]]
342 vint64m4x2_t test_vlseg2e64_v_i64m4x2_tumu(vbool16_t mask, vint64m4x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
343 return __riscv_vlseg2e64_tumu(mask, maskedoff_tuple, base, vl);
346 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x i64>, <vscale x 1 x i64> } @test_vlseg2e64_v_u64m1x2_tumu
347 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
348 // CHECK-RV64-NEXT: entry:
349 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } poison, <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
350 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP0]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
351 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 0
352 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 1
353 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x i64>, <vscale x 1 x i64> } @llvm.riscv.vlseg2.mask.nxv1i64.i64(<vscale x 1 x i64> [[TMP2]], <vscale x 1 x i64> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
354 // CHECK-RV64-NEXT: ret { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP4]]
356 vuint64m1x2_t test_vlseg2e64_v_u64m1x2_tumu(vbool64_t mask, vuint64m1x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
357 return __riscv_vlseg2e64_tumu(mask, maskedoff_tuple, base, vl);
360 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x i64>, <vscale x 2 x i64> } @test_vlseg2e64_v_u64m2x2_tumu
361 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
362 // CHECK-RV64-NEXT: entry:
363 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } poison, <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
364 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP0]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
365 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 0
366 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 1
367 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x i64>, <vscale x 2 x i64> } @llvm.riscv.vlseg2.mask.nxv2i64.i64(<vscale x 2 x i64> [[TMP2]], <vscale x 2 x i64> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
368 // CHECK-RV64-NEXT: ret { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP4]]
370 vuint64m2x2_t test_vlseg2e64_v_u64m2x2_tumu(vbool32_t mask, vuint64m2x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
371 return __riscv_vlseg2e64_tumu(mask, maskedoff_tuple, base, vl);
374 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x i64>, <vscale x 4 x i64> } @test_vlseg2e64_v_u64m4x2_tumu
375 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
376 // CHECK-RV64-NEXT: entry:
377 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } poison, <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
378 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP0]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
379 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 0
380 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 1
381 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x i64>, <vscale x 4 x i64> } @llvm.riscv.vlseg2.mask.nxv4i64.i64(<vscale x 4 x i64> [[TMP2]], <vscale x 4 x i64> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
382 // CHECK-RV64-NEXT: ret { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP4]]
384 vuint64m4x2_t test_vlseg2e64_v_u64m4x2_tumu(vbool16_t mask, vuint64m4x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
385 return __riscv_vlseg2e64_tumu(mask, maskedoff_tuple, base, vl);
388 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x double>, <vscale x 1 x double> } @test_vlseg2e64_v_f64m1x2_mu
389 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
390 // CHECK-RV64-NEXT: entry:
391 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x double>, <vscale x 1 x double> } poison, <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
392 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP0]], <vscale x 1 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
393 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP1]], 0
394 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP1]], 1
395 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x double>, <vscale x 1 x double> } @llvm.riscv.vlseg2.mask.nxv1f64.i64(<vscale x 1 x double> [[TMP2]], <vscale x 1 x double> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
396 // CHECK-RV64-NEXT: ret { <vscale x 1 x double>, <vscale x 1 x double> } [[TMP4]]
398 vfloat64m1x2_t test_vlseg2e64_v_f64m1x2_mu(vbool64_t mask, vfloat64m1x2_t maskedoff_tuple, const double *base, size_t vl) {
399 return __riscv_vlseg2e64_mu(mask, maskedoff_tuple, base, vl);
402 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x double>, <vscale x 2 x double> } @test_vlseg2e64_v_f64m2x2_mu
403 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
404 // CHECK-RV64-NEXT: entry:
405 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x double>, <vscale x 2 x double> } poison, <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
406 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP0]], <vscale x 2 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
407 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP1]], 0
408 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP1]], 1
409 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x double>, <vscale x 2 x double> } @llvm.riscv.vlseg2.mask.nxv2f64.i64(<vscale x 2 x double> [[TMP2]], <vscale x 2 x double> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
410 // CHECK-RV64-NEXT: ret { <vscale x 2 x double>, <vscale x 2 x double> } [[TMP4]]
412 vfloat64m2x2_t test_vlseg2e64_v_f64m2x2_mu(vbool32_t mask, vfloat64m2x2_t maskedoff_tuple, const double *base, size_t vl) {
413 return __riscv_vlseg2e64_mu(mask, maskedoff_tuple, base, vl);
416 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x double>, <vscale x 4 x double> } @test_vlseg2e64_v_f64m4x2_mu
417 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
418 // CHECK-RV64-NEXT: entry:
419 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x double>, <vscale x 4 x double> } poison, <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE0]], 0
420 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP0]], <vscale x 4 x double> [[MASKEDOFF_TUPLE_COERCE1]], 1
421 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP1]], 0
422 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP1]], 1
423 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x double>, <vscale x 4 x double> } @llvm.riscv.vlseg2.mask.nxv4f64.i64(<vscale x 4 x double> [[TMP2]], <vscale x 4 x double> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
424 // CHECK-RV64-NEXT: ret { <vscale x 4 x double>, <vscale x 4 x double> } [[TMP4]]
426 vfloat64m4x2_t test_vlseg2e64_v_f64m4x2_mu(vbool16_t mask, vfloat64m4x2_t maskedoff_tuple, const double *base, size_t vl) {
427 return __riscv_vlseg2e64_mu(mask, maskedoff_tuple, base, vl);
430 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x i64>, <vscale x 1 x i64> } @test_vlseg2e64_v_i64m1x2_mu
431 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
432 // CHECK-RV64-NEXT: entry:
433 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } poison, <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
434 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP0]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
435 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 0
436 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 1
437 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x i64>, <vscale x 1 x i64> } @llvm.riscv.vlseg2.mask.nxv1i64.i64(<vscale x 1 x i64> [[TMP2]], <vscale x 1 x i64> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
438 // CHECK-RV64-NEXT: ret { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP4]]
440 vint64m1x2_t test_vlseg2e64_v_i64m1x2_mu(vbool64_t mask, vint64m1x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
441 return __riscv_vlseg2e64_mu(mask, maskedoff_tuple, base, vl);
444 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x i64>, <vscale x 2 x i64> } @test_vlseg2e64_v_i64m2x2_mu
445 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
446 // CHECK-RV64-NEXT: entry:
447 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } poison, <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
448 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP0]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
449 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 0
450 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 1
451 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x i64>, <vscale x 2 x i64> } @llvm.riscv.vlseg2.mask.nxv2i64.i64(<vscale x 2 x i64> [[TMP2]], <vscale x 2 x i64> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
452 // CHECK-RV64-NEXT: ret { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP4]]
454 vint64m2x2_t test_vlseg2e64_v_i64m2x2_mu(vbool32_t mask, vint64m2x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
455 return __riscv_vlseg2e64_mu(mask, maskedoff_tuple, base, vl);
458 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x i64>, <vscale x 4 x i64> } @test_vlseg2e64_v_i64m4x2_mu
459 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
460 // CHECK-RV64-NEXT: entry:
461 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } poison, <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
462 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP0]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
463 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 0
464 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 1
465 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x i64>, <vscale x 4 x i64> } @llvm.riscv.vlseg2.mask.nxv4i64.i64(<vscale x 4 x i64> [[TMP2]], <vscale x 4 x i64> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
466 // CHECK-RV64-NEXT: ret { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP4]]
468 vint64m4x2_t test_vlseg2e64_v_i64m4x2_mu(vbool16_t mask, vint64m4x2_t maskedoff_tuple, const int64_t *base, size_t vl) {
469 return __riscv_vlseg2e64_mu(mask, maskedoff_tuple, base, vl);
472 // CHECK-RV64-LABEL: define dso_local { <vscale x 1 x i64>, <vscale x 1 x i64> } @test_vlseg2e64_v_u64m1x2_mu
473 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
474 // CHECK-RV64-NEXT: entry:
475 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } poison, <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
476 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP0]], <vscale x 1 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
477 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 0
478 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP1]], 1
479 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 1 x i64>, <vscale x 1 x i64> } @llvm.riscv.vlseg2.mask.nxv1i64.i64(<vscale x 1 x i64> [[TMP2]], <vscale x 1 x i64> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
480 // CHECK-RV64-NEXT: ret { <vscale x 1 x i64>, <vscale x 1 x i64> } [[TMP4]]
482 vuint64m1x2_t test_vlseg2e64_v_u64m1x2_mu(vbool64_t mask, vuint64m1x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
483 return __riscv_vlseg2e64_mu(mask, maskedoff_tuple, base, vl);
486 // CHECK-RV64-LABEL: define dso_local { <vscale x 2 x i64>, <vscale x 2 x i64> } @test_vlseg2e64_v_u64m2x2_mu
487 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
488 // CHECK-RV64-NEXT: entry:
489 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } poison, <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
490 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP0]], <vscale x 2 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
491 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 0
492 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP1]], 1
493 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 2 x i64>, <vscale x 2 x i64> } @llvm.riscv.vlseg2.mask.nxv2i64.i64(<vscale x 2 x i64> [[TMP2]], <vscale x 2 x i64> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
494 // CHECK-RV64-NEXT: ret { <vscale x 2 x i64>, <vscale x 2 x i64> } [[TMP4]]
496 vuint64m2x2_t test_vlseg2e64_v_u64m2x2_mu(vbool32_t mask, vuint64m2x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
497 return __riscv_vlseg2e64_mu(mask, maskedoff_tuple, base, vl);
500 // CHECK-RV64-LABEL: define dso_local { <vscale x 4 x i64>, <vscale x 4 x i64> } @test_vlseg2e64_v_u64m4x2_mu
501 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0:%.*]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
502 // CHECK-RV64-NEXT: entry:
503 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } poison, <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE0]], 0
504 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP0]], <vscale x 4 x i64> [[MASKEDOFF_TUPLE_COERCE1]], 1
505 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 0
506 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP1]], 1
507 // CHECK-RV64-NEXT: [[TMP4:%.*]] = call { <vscale x 4 x i64>, <vscale x 4 x i64> } @llvm.riscv.vlseg2.mask.nxv4i64.i64(<vscale x 4 x i64> [[TMP2]], <vscale x 4 x i64> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
508 // CHECK-RV64-NEXT: ret { <vscale x 4 x i64>, <vscale x 4 x i64> } [[TMP4]]
510 vuint64m4x2_t test_vlseg2e64_v_u64m4x2_mu(vbool16_t mask, vuint64m4x2_t maskedoff_tuple, const uint64_t *base, size_t vl) {
511 return __riscv_vlseg2e64_mu(mask, maskedoff_tuple, base, vl);