Run DCE after a LoopFlatten test to reduce spurious output [nfc]
[llvm-project.git] / clang / test / CodeGen / RISCV / rvv-intrinsics-autogenerated / non-policy / non-overloaded / vsseg2e8.c
blob94ac5b713d9148ebe74406eb7f9f5ef6dd85c1c9
1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zfh \
4 // RUN: -target-feature +zvfh -disable-O0-optnone \
5 // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \
6 // RUN: FileCheck --check-prefix=CHECK-RV64 %s
8 #include <riscv_vector.h>
10 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8mf8x2
11 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 1 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
12 // CHECK-RV64-NEXT: entry:
13 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } poison, <vscale x 1 x i8> [[V_TUPLE_COERCE0]], 0
14 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP0]], <vscale x 1 x i8> [[V_TUPLE_COERCE1]], 1
15 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP1]], 0
16 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP1]], 1
17 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv1i8.i64(<vscale x 1 x i8> [[TMP2]], <vscale x 1 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
18 // CHECK-RV64-NEXT: ret void
20 void test_vsseg2e8_v_i8mf8x2(int8_t *base, vint8mf8x2_t v_tuple, size_t vl) {
21 return __riscv_vsseg2e8_v_i8mf8x2(base, v_tuple, vl);
24 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8mf4x2
25 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 2 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
26 // CHECK-RV64-NEXT: entry:
27 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } poison, <vscale x 2 x i8> [[V_TUPLE_COERCE0]], 0
28 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP0]], <vscale x 2 x i8> [[V_TUPLE_COERCE1]], 1
29 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP1]], 0
30 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP1]], 1
31 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv2i8.i64(<vscale x 2 x i8> [[TMP2]], <vscale x 2 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
32 // CHECK-RV64-NEXT: ret void
34 void test_vsseg2e8_v_i8mf4x2(int8_t *base, vint8mf4x2_t v_tuple, size_t vl) {
35 return __riscv_vsseg2e8_v_i8mf4x2(base, v_tuple, vl);
38 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8mf2x2
39 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 4 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
40 // CHECK-RV64-NEXT: entry:
41 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } poison, <vscale x 4 x i8> [[V_TUPLE_COERCE0]], 0
42 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP0]], <vscale x 4 x i8> [[V_TUPLE_COERCE1]], 1
43 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP1]], 0
44 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP1]], 1
45 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv4i8.i64(<vscale x 4 x i8> [[TMP2]], <vscale x 4 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
46 // CHECK-RV64-NEXT: ret void
48 void test_vsseg2e8_v_i8mf2x2(int8_t *base, vint8mf2x2_t v_tuple, size_t vl) {
49 return __riscv_vsseg2e8_v_i8mf2x2(base, v_tuple, vl);
52 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8m1x2
53 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 8 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 8 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
54 // CHECK-RV64-NEXT: entry:
55 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } poison, <vscale x 8 x i8> [[V_TUPLE_COERCE0]], 0
56 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP0]], <vscale x 8 x i8> [[V_TUPLE_COERCE1]], 1
57 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP1]], 0
58 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP1]], 1
59 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv8i8.i64(<vscale x 8 x i8> [[TMP2]], <vscale x 8 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
60 // CHECK-RV64-NEXT: ret void
62 void test_vsseg2e8_v_i8m1x2(int8_t *base, vint8m1x2_t v_tuple, size_t vl) {
63 return __riscv_vsseg2e8_v_i8m1x2(base, v_tuple, vl);
66 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8m2x2
67 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 16 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 16 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
68 // CHECK-RV64-NEXT: entry:
69 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } poison, <vscale x 16 x i8> [[V_TUPLE_COERCE0]], 0
70 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP0]], <vscale x 16 x i8> [[V_TUPLE_COERCE1]], 1
71 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP1]], 0
72 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP1]], 1
73 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv16i8.i64(<vscale x 16 x i8> [[TMP2]], <vscale x 16 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
74 // CHECK-RV64-NEXT: ret void
76 void test_vsseg2e8_v_i8m2x2(int8_t *base, vint8m2x2_t v_tuple, size_t vl) {
77 return __riscv_vsseg2e8_v_i8m2x2(base, v_tuple, vl);
80 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8m4x2
81 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 32 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 32 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
82 // CHECK-RV64-NEXT: entry:
83 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } poison, <vscale x 32 x i8> [[V_TUPLE_COERCE0]], 0
84 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP0]], <vscale x 32 x i8> [[V_TUPLE_COERCE1]], 1
85 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP1]], 0
86 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP1]], 1
87 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv32i8.i64(<vscale x 32 x i8> [[TMP2]], <vscale x 32 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
88 // CHECK-RV64-NEXT: ret void
90 void test_vsseg2e8_v_i8m4x2(int8_t *base, vint8m4x2_t v_tuple, size_t vl) {
91 return __riscv_vsseg2e8_v_i8m4x2(base, v_tuple, vl);
94 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8mf8x2
95 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 1 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
96 // CHECK-RV64-NEXT: entry:
97 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } poison, <vscale x 1 x i8> [[V_TUPLE_COERCE0]], 0
98 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP0]], <vscale x 1 x i8> [[V_TUPLE_COERCE1]], 1
99 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP1]], 0
100 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP1]], 1
101 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv1i8.i64(<vscale x 1 x i8> [[TMP2]], <vscale x 1 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
102 // CHECK-RV64-NEXT: ret void
104 void test_vsseg2e8_v_u8mf8x2(uint8_t *base, vuint8mf8x2_t v_tuple, size_t vl) {
105 return __riscv_vsseg2e8_v_u8mf8x2(base, v_tuple, vl);
108 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8mf4x2
109 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 2 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
110 // CHECK-RV64-NEXT: entry:
111 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } poison, <vscale x 2 x i8> [[V_TUPLE_COERCE0]], 0
112 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP0]], <vscale x 2 x i8> [[V_TUPLE_COERCE1]], 1
113 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP1]], 0
114 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP1]], 1
115 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv2i8.i64(<vscale x 2 x i8> [[TMP2]], <vscale x 2 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
116 // CHECK-RV64-NEXT: ret void
118 void test_vsseg2e8_v_u8mf4x2(uint8_t *base, vuint8mf4x2_t v_tuple, size_t vl) {
119 return __riscv_vsseg2e8_v_u8mf4x2(base, v_tuple, vl);
122 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8mf2x2
123 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 4 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
124 // CHECK-RV64-NEXT: entry:
125 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } poison, <vscale x 4 x i8> [[V_TUPLE_COERCE0]], 0
126 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP0]], <vscale x 4 x i8> [[V_TUPLE_COERCE1]], 1
127 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP1]], 0
128 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP1]], 1
129 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv4i8.i64(<vscale x 4 x i8> [[TMP2]], <vscale x 4 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
130 // CHECK-RV64-NEXT: ret void
132 void test_vsseg2e8_v_u8mf2x2(uint8_t *base, vuint8mf2x2_t v_tuple, size_t vl) {
133 return __riscv_vsseg2e8_v_u8mf2x2(base, v_tuple, vl);
136 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8m1x2
137 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 8 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 8 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
138 // CHECK-RV64-NEXT: entry:
139 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } poison, <vscale x 8 x i8> [[V_TUPLE_COERCE0]], 0
140 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP0]], <vscale x 8 x i8> [[V_TUPLE_COERCE1]], 1
141 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP1]], 0
142 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP1]], 1
143 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv8i8.i64(<vscale x 8 x i8> [[TMP2]], <vscale x 8 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
144 // CHECK-RV64-NEXT: ret void
146 void test_vsseg2e8_v_u8m1x2(uint8_t *base, vuint8m1x2_t v_tuple, size_t vl) {
147 return __riscv_vsseg2e8_v_u8m1x2(base, v_tuple, vl);
150 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8m2x2
151 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 16 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 16 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
152 // CHECK-RV64-NEXT: entry:
153 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } poison, <vscale x 16 x i8> [[V_TUPLE_COERCE0]], 0
154 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP0]], <vscale x 16 x i8> [[V_TUPLE_COERCE1]], 1
155 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP1]], 0
156 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP1]], 1
157 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv16i8.i64(<vscale x 16 x i8> [[TMP2]], <vscale x 16 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
158 // CHECK-RV64-NEXT: ret void
160 void test_vsseg2e8_v_u8m2x2(uint8_t *base, vuint8m2x2_t v_tuple, size_t vl) {
161 return __riscv_vsseg2e8_v_u8m2x2(base, v_tuple, vl);
164 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8m4x2
165 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 32 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 32 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
166 // CHECK-RV64-NEXT: entry:
167 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } poison, <vscale x 32 x i8> [[V_TUPLE_COERCE0]], 0
168 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP0]], <vscale x 32 x i8> [[V_TUPLE_COERCE1]], 1
169 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP1]], 0
170 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP1]], 1
171 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.nxv32i8.i64(<vscale x 32 x i8> [[TMP2]], <vscale x 32 x i8> [[TMP3]], ptr [[BASE]], i64 [[VL]])
172 // CHECK-RV64-NEXT: ret void
174 void test_vsseg2e8_v_u8m4x2(uint8_t *base, vuint8m4x2_t v_tuple, size_t vl) {
175 return __riscv_vsseg2e8_v_u8m4x2(base, v_tuple, vl);
178 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8mf8x2_m
179 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 1 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
180 // CHECK-RV64-NEXT: entry:
181 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } poison, <vscale x 1 x i8> [[V_TUPLE_COERCE0]], 0
182 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP0]], <vscale x 1 x i8> [[V_TUPLE_COERCE1]], 1
183 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP1]], 0
184 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP1]], 1
185 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv1i8.i64(<vscale x 1 x i8> [[TMP2]], <vscale x 1 x i8> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]])
186 // CHECK-RV64-NEXT: ret void
188 void test_vsseg2e8_v_i8mf8x2_m(vbool64_t mask, int8_t *base, vint8mf8x2_t v_tuple, size_t vl) {
189 return __riscv_vsseg2e8_v_i8mf8x2_m(mask, base, v_tuple, vl);
192 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8mf4x2_m
193 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 2 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
194 // CHECK-RV64-NEXT: entry:
195 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } poison, <vscale x 2 x i8> [[V_TUPLE_COERCE0]], 0
196 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP0]], <vscale x 2 x i8> [[V_TUPLE_COERCE1]], 1
197 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP1]], 0
198 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP1]], 1
199 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv2i8.i64(<vscale x 2 x i8> [[TMP2]], <vscale x 2 x i8> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]])
200 // CHECK-RV64-NEXT: ret void
202 void test_vsseg2e8_v_i8mf4x2_m(vbool32_t mask, int8_t *base, vint8mf4x2_t v_tuple, size_t vl) {
203 return __riscv_vsseg2e8_v_i8mf4x2_m(mask, base, v_tuple, vl);
206 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8mf2x2_m
207 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 4 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
208 // CHECK-RV64-NEXT: entry:
209 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } poison, <vscale x 4 x i8> [[V_TUPLE_COERCE0]], 0
210 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP0]], <vscale x 4 x i8> [[V_TUPLE_COERCE1]], 1
211 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP1]], 0
212 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP1]], 1
213 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv4i8.i64(<vscale x 4 x i8> [[TMP2]], <vscale x 4 x i8> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]])
214 // CHECK-RV64-NEXT: ret void
216 void test_vsseg2e8_v_i8mf2x2_m(vbool16_t mask, int8_t *base, vint8mf2x2_t v_tuple, size_t vl) {
217 return __riscv_vsseg2e8_v_i8mf2x2_m(mask, base, v_tuple, vl);
220 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8m1x2_m
221 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 8 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 8 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
222 // CHECK-RV64-NEXT: entry:
223 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } poison, <vscale x 8 x i8> [[V_TUPLE_COERCE0]], 0
224 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP0]], <vscale x 8 x i8> [[V_TUPLE_COERCE1]], 1
225 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP1]], 0
226 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP1]], 1
227 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv8i8.i64(<vscale x 8 x i8> [[TMP2]], <vscale x 8 x i8> [[TMP3]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]])
228 // CHECK-RV64-NEXT: ret void
230 void test_vsseg2e8_v_i8m1x2_m(vbool8_t mask, int8_t *base, vint8m1x2_t v_tuple, size_t vl) {
231 return __riscv_vsseg2e8_v_i8m1x2_m(mask, base, v_tuple, vl);
234 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8m2x2_m
235 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 16 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 16 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
236 // CHECK-RV64-NEXT: entry:
237 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } poison, <vscale x 16 x i8> [[V_TUPLE_COERCE0]], 0
238 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP0]], <vscale x 16 x i8> [[V_TUPLE_COERCE1]], 1
239 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP1]], 0
240 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP1]], 1
241 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv16i8.i64(<vscale x 16 x i8> [[TMP2]], <vscale x 16 x i8> [[TMP3]], ptr [[BASE]], <vscale x 16 x i1> [[MASK]], i64 [[VL]])
242 // CHECK-RV64-NEXT: ret void
244 void test_vsseg2e8_v_i8m2x2_m(vbool4_t mask, int8_t *base, vint8m2x2_t v_tuple, size_t vl) {
245 return __riscv_vsseg2e8_v_i8m2x2_m(mask, base, v_tuple, vl);
248 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_i8m4x2_m
249 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 32 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 32 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
250 // CHECK-RV64-NEXT: entry:
251 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } poison, <vscale x 32 x i8> [[V_TUPLE_COERCE0]], 0
252 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP0]], <vscale x 32 x i8> [[V_TUPLE_COERCE1]], 1
253 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP1]], 0
254 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP1]], 1
255 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv32i8.i64(<vscale x 32 x i8> [[TMP2]], <vscale x 32 x i8> [[TMP3]], ptr [[BASE]], <vscale x 32 x i1> [[MASK]], i64 [[VL]])
256 // CHECK-RV64-NEXT: ret void
258 void test_vsseg2e8_v_i8m4x2_m(vbool2_t mask, int8_t *base, vint8m4x2_t v_tuple, size_t vl) {
259 return __riscv_vsseg2e8_v_i8m4x2_m(mask, base, v_tuple, vl);
262 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8mf8x2_m
263 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 1 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
264 // CHECK-RV64-NEXT: entry:
265 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } poison, <vscale x 1 x i8> [[V_TUPLE_COERCE0]], 0
266 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP0]], <vscale x 1 x i8> [[V_TUPLE_COERCE1]], 1
267 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP1]], 0
268 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 1 x i8>, <vscale x 1 x i8> } [[TMP1]], 1
269 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv1i8.i64(<vscale x 1 x i8> [[TMP2]], <vscale x 1 x i8> [[TMP3]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]])
270 // CHECK-RV64-NEXT: ret void
272 void test_vsseg2e8_v_u8mf8x2_m(vbool64_t mask, uint8_t *base, vuint8mf8x2_t v_tuple, size_t vl) {
273 return __riscv_vsseg2e8_v_u8mf8x2_m(mask, base, v_tuple, vl);
276 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8mf4x2_m
277 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 2 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
278 // CHECK-RV64-NEXT: entry:
279 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } poison, <vscale x 2 x i8> [[V_TUPLE_COERCE0]], 0
280 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP0]], <vscale x 2 x i8> [[V_TUPLE_COERCE1]], 1
281 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP1]], 0
282 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 2 x i8>, <vscale x 2 x i8> } [[TMP1]], 1
283 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv2i8.i64(<vscale x 2 x i8> [[TMP2]], <vscale x 2 x i8> [[TMP3]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]])
284 // CHECK-RV64-NEXT: ret void
286 void test_vsseg2e8_v_u8mf4x2_m(vbool32_t mask, uint8_t *base, vuint8mf4x2_t v_tuple, size_t vl) {
287 return __riscv_vsseg2e8_v_u8mf4x2_m(mask, base, v_tuple, vl);
290 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8mf2x2_m
291 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 4 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
292 // CHECK-RV64-NEXT: entry:
293 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } poison, <vscale x 4 x i8> [[V_TUPLE_COERCE0]], 0
294 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP0]], <vscale x 4 x i8> [[V_TUPLE_COERCE1]], 1
295 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP1]], 0
296 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 4 x i8>, <vscale x 4 x i8> } [[TMP1]], 1
297 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv4i8.i64(<vscale x 4 x i8> [[TMP2]], <vscale x 4 x i8> [[TMP3]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]])
298 // CHECK-RV64-NEXT: ret void
300 void test_vsseg2e8_v_u8mf2x2_m(vbool16_t mask, uint8_t *base, vuint8mf2x2_t v_tuple, size_t vl) {
301 return __riscv_vsseg2e8_v_u8mf2x2_m(mask, base, v_tuple, vl);
304 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8m1x2_m
305 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 8 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 8 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
306 // CHECK-RV64-NEXT: entry:
307 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } poison, <vscale x 8 x i8> [[V_TUPLE_COERCE0]], 0
308 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP0]], <vscale x 8 x i8> [[V_TUPLE_COERCE1]], 1
309 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP1]], 0
310 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 8 x i8>, <vscale x 8 x i8> } [[TMP1]], 1
311 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv8i8.i64(<vscale x 8 x i8> [[TMP2]], <vscale x 8 x i8> [[TMP3]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]])
312 // CHECK-RV64-NEXT: ret void
314 void test_vsseg2e8_v_u8m1x2_m(vbool8_t mask, uint8_t *base, vuint8m1x2_t v_tuple, size_t vl) {
315 return __riscv_vsseg2e8_v_u8m1x2_m(mask, base, v_tuple, vl);
318 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8m2x2_m
319 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 16 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 16 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
320 // CHECK-RV64-NEXT: entry:
321 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } poison, <vscale x 16 x i8> [[V_TUPLE_COERCE0]], 0
322 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP0]], <vscale x 16 x i8> [[V_TUPLE_COERCE1]], 1
323 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP1]], 0
324 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 16 x i8>, <vscale x 16 x i8> } [[TMP1]], 1
325 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv16i8.i64(<vscale x 16 x i8> [[TMP2]], <vscale x 16 x i8> [[TMP3]], ptr [[BASE]], <vscale x 16 x i1> [[MASK]], i64 [[VL]])
326 // CHECK-RV64-NEXT: ret void
328 void test_vsseg2e8_v_u8m2x2_m(vbool4_t mask, uint8_t *base, vuint8m2x2_t v_tuple, size_t vl) {
329 return __riscv_vsseg2e8_v_u8m2x2_m(mask, base, v_tuple, vl);
332 // CHECK-RV64-LABEL: define dso_local void @test_vsseg2e8_v_u8m4x2_m
333 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 32 x i8> [[V_TUPLE_COERCE0:%.*]], <vscale x 32 x i8> [[V_TUPLE_COERCE1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
334 // CHECK-RV64-NEXT: entry:
335 // CHECK-RV64-NEXT: [[TMP0:%.*]] = insertvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } poison, <vscale x 32 x i8> [[V_TUPLE_COERCE0]], 0
336 // CHECK-RV64-NEXT: [[TMP1:%.*]] = insertvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP0]], <vscale x 32 x i8> [[V_TUPLE_COERCE1]], 1
337 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP1]], 0
338 // CHECK-RV64-NEXT: [[TMP3:%.*]] = extractvalue { <vscale x 32 x i8>, <vscale x 32 x i8> } [[TMP1]], 1
339 // CHECK-RV64-NEXT: call void @llvm.riscv.vsseg2.mask.nxv32i8.i64(<vscale x 32 x i8> [[TMP2]], <vscale x 32 x i8> [[TMP3]], ptr [[BASE]], <vscale x 32 x i1> [[MASK]], i64 [[VL]])
340 // CHECK-RV64-NEXT: ret void
342 void test_vsseg2e8_v_u8m4x2_m(vbool2_t mask, uint8_t *base, vuint8m4x2_t v_tuple, size_t vl) {
343 return __riscv_vsseg2e8_v_u8m4x2_m(mask, base, v_tuple, vl);