1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv32 -mattr=+v -verify-machineinstrs < %s | FileCheck %s
3 ; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s
5 define <vscale x 1 x i8> @vsra_vv_nxv1i8(<vscale x 1 x i8> %va, <vscale x 1 x i8> %vb) {
6 ; CHECK-LABEL: vsra_vv_nxv1i8:
8 ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma
9 ; CHECK-NEXT: vsra.vv v8, v8, v9
11 %vc = ashr <vscale x 1 x i8> %va, %vb
12 ret <vscale x 1 x i8> %vc
15 define <vscale x 1 x i8> @vsra_vv_nxv1i8_sext_zext(<vscale x 1 x i8> %va, <vscale x 1 x i8> %vb) {
16 ; CHECK-LABEL: vsra_vv_nxv1i8_sext_zext:
18 ; CHECK-NEXT: li a0, 7
19 ; CHECK-NEXT: vsetvli a1, zero, e8, mf8, ta, ma
20 ; CHECK-NEXT: vmin.vx v9, v8, a0
21 ; CHECK-NEXT: vsra.vv v8, v8, v9
23 %sexted_va = sext <vscale x 1 x i8> %va to <vscale x 1 x i32>
24 %zexted_vb = zext <vscale x 1 x i8> %va to <vscale x 1 x i32>
25 %expand = ashr <vscale x 1 x i32> %sexted_va, %zexted_vb
26 %vc = trunc <vscale x 1 x i32> %expand to <vscale x 1 x i8>
27 ret <vscale x 1 x i8> %vc
30 define <vscale x 1 x i8> @vsra_vx_nxv1i8(<vscale x 1 x i8> %va, i8 signext %b) {
31 ; CHECK-LABEL: vsra_vx_nxv1i8:
33 ; CHECK-NEXT: vsetvli a1, zero, e8, mf8, ta, ma
34 ; CHECK-NEXT: vsra.vx v8, v8, a0
36 %head = insertelement <vscale x 1 x i8> poison, i8 %b, i32 0
37 %splat = shufflevector <vscale x 1 x i8> %head, <vscale x 1 x i8> poison, <vscale x 1 x i32> zeroinitializer
38 %vc = ashr <vscale x 1 x i8> %va, %splat
39 ret <vscale x 1 x i8> %vc
42 define <vscale x 1 x i8> @vsra_vi_nxv1i8_0(<vscale x 1 x i8> %va) {
43 ; CHECK-LABEL: vsra_vi_nxv1i8_0:
45 ; CHECK-NEXT: vsetvli a0, zero, e8, mf8, ta, ma
46 ; CHECK-NEXT: vsra.vi v8, v8, 6
48 %head = insertelement <vscale x 1 x i8> poison, i8 6, i32 0
49 %splat = shufflevector <vscale x 1 x i8> %head, <vscale x 1 x i8> poison, <vscale x 1 x i32> zeroinitializer
50 %vc = ashr <vscale x 1 x i8> %va, %splat
51 ret <vscale x 1 x i8> %vc
54 define <vscale x 2 x i8> @vsra_vv_nxv2i8(<vscale x 2 x i8> %va, <vscale x 2 x i8> %vb) {
55 ; CHECK-LABEL: vsra_vv_nxv2i8:
57 ; CHECK-NEXT: vsetvli a0, zero, e8, mf4, ta, ma
58 ; CHECK-NEXT: vsra.vv v8, v8, v9
60 %vc = ashr <vscale x 2 x i8> %va, %vb
61 ret <vscale x 2 x i8> %vc
64 define <vscale x 2 x i8> @vsra_vv_nxv2i8_sext_zext(<vscale x 2 x i8> %va, <vscale x 2 x i8> %vb) {
65 ; CHECK-LABEL: vsra_vv_nxv2i8_sext_zext:
67 ; CHECK-NEXT: li a0, 7
68 ; CHECK-NEXT: vsetvli a1, zero, e8, mf4, ta, ma
69 ; CHECK-NEXT: vmin.vx v9, v8, a0
70 ; CHECK-NEXT: vsra.vv v8, v8, v9
72 %sexted_va = sext <vscale x 2 x i8> %va to <vscale x 2 x i32>
73 %zexted_vb = zext <vscale x 2 x i8> %va to <vscale x 2 x i32>
74 %expand = ashr <vscale x 2 x i32> %sexted_va, %zexted_vb
75 %vc = trunc <vscale x 2 x i32> %expand to <vscale x 2 x i8>
76 ret <vscale x 2 x i8> %vc
79 define <vscale x 2 x i8> @vsra_vx_nxv2i8(<vscale x 2 x i8> %va, i8 signext %b) {
80 ; CHECK-LABEL: vsra_vx_nxv2i8:
82 ; CHECK-NEXT: vsetvli a1, zero, e8, mf4, ta, ma
83 ; CHECK-NEXT: vsra.vx v8, v8, a0
85 %head = insertelement <vscale x 2 x i8> poison, i8 %b, i32 0
86 %splat = shufflevector <vscale x 2 x i8> %head, <vscale x 2 x i8> poison, <vscale x 2 x i32> zeroinitializer
87 %vc = ashr <vscale x 2 x i8> %va, %splat
88 ret <vscale x 2 x i8> %vc
91 define <vscale x 2 x i8> @vsra_vi_nxv2i8_0(<vscale x 2 x i8> %va) {
92 ; CHECK-LABEL: vsra_vi_nxv2i8_0:
94 ; CHECK-NEXT: vsetvli a0, zero, e8, mf4, ta, ma
95 ; CHECK-NEXT: vsra.vi v8, v8, 6
97 %head = insertelement <vscale x 2 x i8> poison, i8 6, i32 0
98 %splat = shufflevector <vscale x 2 x i8> %head, <vscale x 2 x i8> poison, <vscale x 2 x i32> zeroinitializer
99 %vc = ashr <vscale x 2 x i8> %va, %splat
100 ret <vscale x 2 x i8> %vc
103 define <vscale x 4 x i8> @vsra_vv_nxv4i8(<vscale x 4 x i8> %va, <vscale x 4 x i8> %vb) {
104 ; CHECK-LABEL: vsra_vv_nxv4i8:
106 ; CHECK-NEXT: vsetvli a0, zero, e8, mf2, ta, ma
107 ; CHECK-NEXT: vsra.vv v8, v8, v9
109 %vc = ashr <vscale x 4 x i8> %va, %vb
110 ret <vscale x 4 x i8> %vc
113 define <vscale x 4 x i8> @vsra_vv_nxv4i8_sext_zext(<vscale x 4 x i8> %va, <vscale x 4 x i8> %vb) {
114 ; CHECK-LABEL: vsra_vv_nxv4i8_sext_zext:
116 ; CHECK-NEXT: li a0, 7
117 ; CHECK-NEXT: vsetvli a1, zero, e8, mf2, ta, ma
118 ; CHECK-NEXT: vmin.vx v9, v8, a0
119 ; CHECK-NEXT: vsra.vv v8, v8, v9
121 %sexted_va = sext <vscale x 4 x i8> %va to <vscale x 4 x i32>
122 %zexted_vb = zext <vscale x 4 x i8> %va to <vscale x 4 x i32>
123 %expand = ashr <vscale x 4 x i32> %sexted_va, %zexted_vb
124 %vc = trunc <vscale x 4 x i32> %expand to <vscale x 4 x i8>
125 ret <vscale x 4 x i8> %vc
128 define <vscale x 4 x i8> @vsra_vx_nxv4i8(<vscale x 4 x i8> %va, i8 signext %b) {
129 ; CHECK-LABEL: vsra_vx_nxv4i8:
131 ; CHECK-NEXT: vsetvli a1, zero, e8, mf2, ta, ma
132 ; CHECK-NEXT: vsra.vx v8, v8, a0
134 %head = insertelement <vscale x 4 x i8> poison, i8 %b, i32 0
135 %splat = shufflevector <vscale x 4 x i8> %head, <vscale x 4 x i8> poison, <vscale x 4 x i32> zeroinitializer
136 %vc = ashr <vscale x 4 x i8> %va, %splat
137 ret <vscale x 4 x i8> %vc
140 define <vscale x 4 x i8> @vsra_vi_nxv4i8_0(<vscale x 4 x i8> %va) {
141 ; CHECK-LABEL: vsra_vi_nxv4i8_0:
143 ; CHECK-NEXT: vsetvli a0, zero, e8, mf2, ta, ma
144 ; CHECK-NEXT: vsra.vi v8, v8, 6
146 %head = insertelement <vscale x 4 x i8> poison, i8 6, i32 0
147 %splat = shufflevector <vscale x 4 x i8> %head, <vscale x 4 x i8> poison, <vscale x 4 x i32> zeroinitializer
148 %vc = ashr <vscale x 4 x i8> %va, %splat
149 ret <vscale x 4 x i8> %vc
152 define <vscale x 8 x i8> @vsra_vv_nxv8i8(<vscale x 8 x i8> %va, <vscale x 8 x i8> %vb) {
153 ; CHECK-LABEL: vsra_vv_nxv8i8:
155 ; CHECK-NEXT: vsetvli a0, zero, e8, m1, ta, ma
156 ; CHECK-NEXT: vsra.vv v8, v8, v9
158 %vc = ashr <vscale x 8 x i8> %va, %vb
159 ret <vscale x 8 x i8> %vc
162 define <vscale x 8 x i8> @vsra_vv_nxv8i8_sext_zext(<vscale x 8 x i8> %va, <vscale x 8 x i8> %vb) {
163 ; CHECK-LABEL: vsra_vv_nxv8i8_sext_zext:
165 ; CHECK-NEXT: li a0, 7
166 ; CHECK-NEXT: vsetvli a1, zero, e8, m1, ta, ma
167 ; CHECK-NEXT: vmin.vx v9, v8, a0
168 ; CHECK-NEXT: vsra.vv v8, v8, v9
170 %sexted_va = sext <vscale x 8 x i8> %va to <vscale x 8 x i32>
171 %zexted_vb = zext <vscale x 8 x i8> %va to <vscale x 8 x i32>
172 %expand = ashr <vscale x 8 x i32> %sexted_va, %zexted_vb
173 %vc = trunc <vscale x 8 x i32> %expand to <vscale x 8 x i8>
174 ret <vscale x 8 x i8> %vc
177 define <vscale x 8 x i8> @vsra_vx_nxv8i8(<vscale x 8 x i8> %va, i8 signext %b) {
178 ; CHECK-LABEL: vsra_vx_nxv8i8:
180 ; CHECK-NEXT: vsetvli a1, zero, e8, m1, ta, ma
181 ; CHECK-NEXT: vsra.vx v8, v8, a0
183 %head = insertelement <vscale x 8 x i8> poison, i8 %b, i32 0
184 %splat = shufflevector <vscale x 8 x i8> %head, <vscale x 8 x i8> poison, <vscale x 8 x i32> zeroinitializer
185 %vc = ashr <vscale x 8 x i8> %va, %splat
186 ret <vscale x 8 x i8> %vc
189 define <vscale x 8 x i8> @vsra_vi_nxv8i8_0(<vscale x 8 x i8> %va) {
190 ; CHECK-LABEL: vsra_vi_nxv8i8_0:
192 ; CHECK-NEXT: vsetvli a0, zero, e8, m1, ta, ma
193 ; CHECK-NEXT: vsra.vi v8, v8, 6
195 %head = insertelement <vscale x 8 x i8> poison, i8 6, i32 0
196 %splat = shufflevector <vscale x 8 x i8> %head, <vscale x 8 x i8> poison, <vscale x 8 x i32> zeroinitializer
197 %vc = ashr <vscale x 8 x i8> %va, %splat
198 ret <vscale x 8 x i8> %vc
201 define <vscale x 16 x i8> @vsra_vv_nxv16i8(<vscale x 16 x i8> %va, <vscale x 16 x i8> %vb) {
202 ; CHECK-LABEL: vsra_vv_nxv16i8:
204 ; CHECK-NEXT: vsetvli a0, zero, e8, m2, ta, ma
205 ; CHECK-NEXT: vsra.vv v8, v8, v10
207 %vc = ashr <vscale x 16 x i8> %va, %vb
208 ret <vscale x 16 x i8> %vc
211 define <vscale x 16 x i8> @vsra_vv_nxv16i8_sext_zext(<vscale x 16 x i8> %va, <vscale x 16 x i8> %vb) {
212 ; CHECK-LABEL: vsra_vv_nxv16i8_sext_zext:
214 ; CHECK-NEXT: li a0, 7
215 ; CHECK-NEXT: vsetvli a1, zero, e8, m2, ta, ma
216 ; CHECK-NEXT: vmin.vx v10, v8, a0
217 ; CHECK-NEXT: vsra.vv v8, v8, v10
219 %sexted_va = sext <vscale x 16 x i8> %va to <vscale x 16 x i32>
220 %zexted_vb = zext <vscale x 16 x i8> %va to <vscale x 16 x i32>
221 %expand = ashr <vscale x 16 x i32> %sexted_va, %zexted_vb
222 %vc = trunc <vscale x 16 x i32> %expand to <vscale x 16 x i8>
223 ret <vscale x 16 x i8> %vc
226 define <vscale x 16 x i8> @vsra_vx_nxv16i8(<vscale x 16 x i8> %va, i8 signext %b) {
227 ; CHECK-LABEL: vsra_vx_nxv16i8:
229 ; CHECK-NEXT: vsetvli a1, zero, e8, m2, ta, ma
230 ; CHECK-NEXT: vsra.vx v8, v8, a0
232 %head = insertelement <vscale x 16 x i8> poison, i8 %b, i32 0
233 %splat = shufflevector <vscale x 16 x i8> %head, <vscale x 16 x i8> poison, <vscale x 16 x i32> zeroinitializer
234 %vc = ashr <vscale x 16 x i8> %va, %splat
235 ret <vscale x 16 x i8> %vc
238 define <vscale x 16 x i8> @vsra_vi_nxv16i8_0(<vscale x 16 x i8> %va) {
239 ; CHECK-LABEL: vsra_vi_nxv16i8_0:
241 ; CHECK-NEXT: vsetvli a0, zero, e8, m2, ta, ma
242 ; CHECK-NEXT: vsra.vi v8, v8, 6
244 %head = insertelement <vscale x 16 x i8> poison, i8 6, i32 0
245 %splat = shufflevector <vscale x 16 x i8> %head, <vscale x 16 x i8> poison, <vscale x 16 x i32> zeroinitializer
246 %vc = ashr <vscale x 16 x i8> %va, %splat
247 ret <vscale x 16 x i8> %vc
250 define <vscale x 32 x i8> @vsra_vv_nxv32i8(<vscale x 32 x i8> %va, <vscale x 32 x i8> %vb) {
251 ; CHECK-LABEL: vsra_vv_nxv32i8:
253 ; CHECK-NEXT: vsetvli a0, zero, e8, m4, ta, ma
254 ; CHECK-NEXT: vsra.vv v8, v8, v12
256 %vc = ashr <vscale x 32 x i8> %va, %vb
257 ret <vscale x 32 x i8> %vc
260 define <vscale x 32 x i8> @vsra_vx_nxv32i8(<vscale x 32 x i8> %va, i8 signext %b) {
261 ; CHECK-LABEL: vsra_vx_nxv32i8:
263 ; CHECK-NEXT: vsetvli a1, zero, e8, m4, ta, ma
264 ; CHECK-NEXT: vsra.vx v8, v8, a0
266 %head = insertelement <vscale x 32 x i8> poison, i8 %b, i32 0
267 %splat = shufflevector <vscale x 32 x i8> %head, <vscale x 32 x i8> poison, <vscale x 32 x i32> zeroinitializer
268 %vc = ashr <vscale x 32 x i8> %va, %splat
269 ret <vscale x 32 x i8> %vc
272 define <vscale x 32 x i8> @vsra_vi_nxv32i8_0(<vscale x 32 x i8> %va) {
273 ; CHECK-LABEL: vsra_vi_nxv32i8_0:
275 ; CHECK-NEXT: vsetvli a0, zero, e8, m4, ta, ma
276 ; CHECK-NEXT: vsra.vi v8, v8, 6
278 %head = insertelement <vscale x 32 x i8> poison, i8 6, i32 0
279 %splat = shufflevector <vscale x 32 x i8> %head, <vscale x 32 x i8> poison, <vscale x 32 x i32> zeroinitializer
280 %vc = ashr <vscale x 32 x i8> %va, %splat
281 ret <vscale x 32 x i8> %vc
284 define <vscale x 64 x i8> @vsra_vv_nxv64i8(<vscale x 64 x i8> %va, <vscale x 64 x i8> %vb) {
285 ; CHECK-LABEL: vsra_vv_nxv64i8:
287 ; CHECK-NEXT: vsetvli a0, zero, e8, m8, ta, ma
288 ; CHECK-NEXT: vsra.vv v8, v8, v16
290 %vc = ashr <vscale x 64 x i8> %va, %vb
291 ret <vscale x 64 x i8> %vc
294 define <vscale x 64 x i8> @vsra_vx_nxv64i8(<vscale x 64 x i8> %va, i8 signext %b) {
295 ; CHECK-LABEL: vsra_vx_nxv64i8:
297 ; CHECK-NEXT: vsetvli a1, zero, e8, m8, ta, ma
298 ; CHECK-NEXT: vsra.vx v8, v8, a0
300 %head = insertelement <vscale x 64 x i8> poison, i8 %b, i32 0
301 %splat = shufflevector <vscale x 64 x i8> %head, <vscale x 64 x i8> poison, <vscale x 64 x i32> zeroinitializer
302 %vc = ashr <vscale x 64 x i8> %va, %splat
303 ret <vscale x 64 x i8> %vc
306 define <vscale x 64 x i8> @vsra_vi_nxv64i8_0(<vscale x 64 x i8> %va) {
307 ; CHECK-LABEL: vsra_vi_nxv64i8_0:
309 ; CHECK-NEXT: vsetvli a0, zero, e8, m8, ta, ma
310 ; CHECK-NEXT: vsra.vi v8, v8, 6
312 %head = insertelement <vscale x 64 x i8> poison, i8 6, i32 0
313 %splat = shufflevector <vscale x 64 x i8> %head, <vscale x 64 x i8> poison, <vscale x 64 x i32> zeroinitializer
314 %vc = ashr <vscale x 64 x i8> %va, %splat
315 ret <vscale x 64 x i8> %vc
318 define <vscale x 1 x i16> @vsra_vv_nxv1i16(<vscale x 1 x i16> %va, <vscale x 1 x i16> %vb) {
319 ; CHECK-LABEL: vsra_vv_nxv1i16:
321 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
322 ; CHECK-NEXT: vsra.vv v8, v8, v9
324 %vc = ashr <vscale x 1 x i16> %va, %vb
325 ret <vscale x 1 x i16> %vc
328 define <vscale x 1 x i16> @vsra_vv_nxv1i16_sext_zext(<vscale x 1 x i16> %va, <vscale x 1 x i16> %vb) {
329 ; CHECK-LABEL: vsra_vv_nxv1i16_sext_zext:
331 ; CHECK-NEXT: li a0, 15
332 ; CHECK-NEXT: vsetvli a1, zero, e16, mf4, ta, ma
333 ; CHECK-NEXT: vmin.vx v9, v8, a0
334 ; CHECK-NEXT: vsra.vv v8, v8, v9
336 %sexted_va = sext <vscale x 1 x i16> %va to <vscale x 1 x i32>
337 %zexted_vb = zext <vscale x 1 x i16> %va to <vscale x 1 x i32>
338 %expand = ashr <vscale x 1 x i32> %sexted_va, %zexted_vb
339 %vc = trunc <vscale x 1 x i32> %expand to <vscale x 1 x i16>
340 ret <vscale x 1 x i16> %vc
343 define <vscale x 1 x i16> @vsra_vx_nxv1i16(<vscale x 1 x i16> %va, i16 signext %b) {
344 ; CHECK-LABEL: vsra_vx_nxv1i16:
346 ; CHECK-NEXT: vsetvli a1, zero, e16, mf4, ta, ma
347 ; CHECK-NEXT: vsra.vx v8, v8, a0
349 %head = insertelement <vscale x 1 x i16> poison, i16 %b, i32 0
350 %splat = shufflevector <vscale x 1 x i16> %head, <vscale x 1 x i16> poison, <vscale x 1 x i32> zeroinitializer
351 %vc = ashr <vscale x 1 x i16> %va, %splat
352 ret <vscale x 1 x i16> %vc
355 define <vscale x 1 x i16> @vsra_vi_nxv1i16_0(<vscale x 1 x i16> %va) {
356 ; CHECK-LABEL: vsra_vi_nxv1i16_0:
358 ; CHECK-NEXT: vsetvli a0, zero, e16, mf4, ta, ma
359 ; CHECK-NEXT: vsra.vi v8, v8, 6
361 %head = insertelement <vscale x 1 x i16> poison, i16 6, i32 0
362 %splat = shufflevector <vscale x 1 x i16> %head, <vscale x 1 x i16> poison, <vscale x 1 x i32> zeroinitializer
363 %vc = ashr <vscale x 1 x i16> %va, %splat
364 ret <vscale x 1 x i16> %vc
367 define <vscale x 2 x i16> @vsra_vv_nxv2i16(<vscale x 2 x i16> %va, <vscale x 2 x i16> %vb) {
368 ; CHECK-LABEL: vsra_vv_nxv2i16:
370 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
371 ; CHECK-NEXT: vsra.vv v8, v8, v9
373 %vc = ashr <vscale x 2 x i16> %va, %vb
374 ret <vscale x 2 x i16> %vc
377 define <vscale x 2 x i16> @vsra_vv_nxv2i16_sext_zext(<vscale x 2 x i16> %va, <vscale x 2 x i16> %vb) {
378 ; CHECK-LABEL: vsra_vv_nxv2i16_sext_zext:
380 ; CHECK-NEXT: li a0, 15
381 ; CHECK-NEXT: vsetvli a1, zero, e16, mf2, ta, ma
382 ; CHECK-NEXT: vmin.vx v9, v8, a0
383 ; CHECK-NEXT: vsra.vv v8, v8, v9
385 %sexted_va = sext <vscale x 2 x i16> %va to <vscale x 2 x i32>
386 %zexted_vb = zext <vscale x 2 x i16> %va to <vscale x 2 x i32>
387 %expand = ashr <vscale x 2 x i32> %sexted_va, %zexted_vb
388 %vc = trunc <vscale x 2 x i32> %expand to <vscale x 2 x i16>
389 ret <vscale x 2 x i16> %vc
392 define <vscale x 2 x i16> @vsra_vx_nxv2i16(<vscale x 2 x i16> %va, i16 signext %b) {
393 ; CHECK-LABEL: vsra_vx_nxv2i16:
395 ; CHECK-NEXT: vsetvli a1, zero, e16, mf2, ta, ma
396 ; CHECK-NEXT: vsra.vx v8, v8, a0
398 %head = insertelement <vscale x 2 x i16> poison, i16 %b, i32 0
399 %splat = shufflevector <vscale x 2 x i16> %head, <vscale x 2 x i16> poison, <vscale x 2 x i32> zeroinitializer
400 %vc = ashr <vscale x 2 x i16> %va, %splat
401 ret <vscale x 2 x i16> %vc
404 define <vscale x 2 x i16> @vsra_vi_nxv2i16_0(<vscale x 2 x i16> %va) {
405 ; CHECK-LABEL: vsra_vi_nxv2i16_0:
407 ; CHECK-NEXT: vsetvli a0, zero, e16, mf2, ta, ma
408 ; CHECK-NEXT: vsra.vi v8, v8, 6
410 %head = insertelement <vscale x 2 x i16> poison, i16 6, i32 0
411 %splat = shufflevector <vscale x 2 x i16> %head, <vscale x 2 x i16> poison, <vscale x 2 x i32> zeroinitializer
412 %vc = ashr <vscale x 2 x i16> %va, %splat
413 ret <vscale x 2 x i16> %vc
416 define <vscale x 4 x i16> @vsra_vv_nxv4i16(<vscale x 4 x i16> %va, <vscale x 4 x i16> %vb) {
417 ; CHECK-LABEL: vsra_vv_nxv4i16:
419 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
420 ; CHECK-NEXT: vsra.vv v8, v8, v9
422 %vc = ashr <vscale x 4 x i16> %va, %vb
423 ret <vscale x 4 x i16> %vc
426 define <vscale x 4 x i16> @vsra_vv_nxv4i16_sext_zext(<vscale x 4 x i16> %va, <vscale x 4 x i16> %vb) {
427 ; CHECK-LABEL: vsra_vv_nxv4i16_sext_zext:
429 ; CHECK-NEXT: li a0, 15
430 ; CHECK-NEXT: vsetvli a1, zero, e16, m1, ta, ma
431 ; CHECK-NEXT: vmin.vx v9, v8, a0
432 ; CHECK-NEXT: vsra.vv v8, v8, v9
434 %sexted_va = sext <vscale x 4 x i16> %va to <vscale x 4 x i32>
435 %zexted_vb = zext <vscale x 4 x i16> %va to <vscale x 4 x i32>
436 %expand = ashr <vscale x 4 x i32> %sexted_va, %zexted_vb
437 %vc = trunc <vscale x 4 x i32> %expand to <vscale x 4 x i16>
438 ret <vscale x 4 x i16> %vc
441 define <vscale x 4 x i16> @vsra_vx_nxv4i16(<vscale x 4 x i16> %va, i16 signext %b) {
442 ; CHECK-LABEL: vsra_vx_nxv4i16:
444 ; CHECK-NEXT: vsetvli a1, zero, e16, m1, ta, ma
445 ; CHECK-NEXT: vsra.vx v8, v8, a0
447 %head = insertelement <vscale x 4 x i16> poison, i16 %b, i32 0
448 %splat = shufflevector <vscale x 4 x i16> %head, <vscale x 4 x i16> poison, <vscale x 4 x i32> zeroinitializer
449 %vc = ashr <vscale x 4 x i16> %va, %splat
450 ret <vscale x 4 x i16> %vc
453 define <vscale x 4 x i16> @vsra_vi_nxv4i16_0(<vscale x 4 x i16> %va) {
454 ; CHECK-LABEL: vsra_vi_nxv4i16_0:
456 ; CHECK-NEXT: vsetvli a0, zero, e16, m1, ta, ma
457 ; CHECK-NEXT: vsra.vi v8, v8, 6
459 %head = insertelement <vscale x 4 x i16> poison, i16 6, i32 0
460 %splat = shufflevector <vscale x 4 x i16> %head, <vscale x 4 x i16> poison, <vscale x 4 x i32> zeroinitializer
461 %vc = ashr <vscale x 4 x i16> %va, %splat
462 ret <vscale x 4 x i16> %vc
465 define <vscale x 8 x i16> @vsra_vv_nxv8i16(<vscale x 8 x i16> %va, <vscale x 8 x i16> %vb) {
466 ; CHECK-LABEL: vsra_vv_nxv8i16:
468 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
469 ; CHECK-NEXT: vsra.vv v8, v8, v10
471 %vc = ashr <vscale x 8 x i16> %va, %vb
472 ret <vscale x 8 x i16> %vc
475 define <vscale x 8 x i16> @vsra_vv_nxv8i16_sext_zext(<vscale x 8 x i16> %va, <vscale x 8 x i16> %vb) {
476 ; CHECK-LABEL: vsra_vv_nxv8i16_sext_zext:
478 ; CHECK-NEXT: li a0, 15
479 ; CHECK-NEXT: vsetvli a1, zero, e16, m2, ta, ma
480 ; CHECK-NEXT: vmin.vx v10, v8, a0
481 ; CHECK-NEXT: vsra.vv v8, v8, v10
483 %sexted_va = sext <vscale x 8 x i16> %va to <vscale x 8 x i32>
484 %zexted_vb = zext <vscale x 8 x i16> %va to <vscale x 8 x i32>
485 %expand = ashr <vscale x 8 x i32> %sexted_va, %zexted_vb
486 %vc = trunc <vscale x 8 x i32> %expand to <vscale x 8 x i16>
487 ret <vscale x 8 x i16> %vc
490 define <vscale x 8 x i16> @vsra_vx_nxv8i16(<vscale x 8 x i16> %va, i16 signext %b) {
491 ; CHECK-LABEL: vsra_vx_nxv8i16:
493 ; CHECK-NEXT: vsetvli a1, zero, e16, m2, ta, ma
494 ; CHECK-NEXT: vsra.vx v8, v8, a0
496 %head = insertelement <vscale x 8 x i16> poison, i16 %b, i32 0
497 %splat = shufflevector <vscale x 8 x i16> %head, <vscale x 8 x i16> poison, <vscale x 8 x i32> zeroinitializer
498 %vc = ashr <vscale x 8 x i16> %va, %splat
499 ret <vscale x 8 x i16> %vc
502 define <vscale x 8 x i16> @vsra_vi_nxv8i16_0(<vscale x 8 x i16> %va) {
503 ; CHECK-LABEL: vsra_vi_nxv8i16_0:
505 ; CHECK-NEXT: vsetvli a0, zero, e16, m2, ta, ma
506 ; CHECK-NEXT: vsra.vi v8, v8, 6
508 %head = insertelement <vscale x 8 x i16> poison, i16 6, i32 0
509 %splat = shufflevector <vscale x 8 x i16> %head, <vscale x 8 x i16> poison, <vscale x 8 x i32> zeroinitializer
510 %vc = ashr <vscale x 8 x i16> %va, %splat
511 ret <vscale x 8 x i16> %vc
514 define <vscale x 16 x i16> @vsra_vv_nxv16i16(<vscale x 16 x i16> %va, <vscale x 16 x i16> %vb) {
515 ; CHECK-LABEL: vsra_vv_nxv16i16:
517 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
518 ; CHECK-NEXT: vsra.vv v8, v8, v12
520 %vc = ashr <vscale x 16 x i16> %va, %vb
521 ret <vscale x 16 x i16> %vc
524 define <vscale x 16 x i16> @vsra_vv_nxv16i16_sext_zext(<vscale x 16 x i16> %va, <vscale x 16 x i16> %vb) {
525 ; CHECK-LABEL: vsra_vv_nxv16i16_sext_zext:
527 ; CHECK-NEXT: li a0, 15
528 ; CHECK-NEXT: vsetvli a1, zero, e16, m4, ta, ma
529 ; CHECK-NEXT: vmin.vx v12, v8, a0
530 ; CHECK-NEXT: vsra.vv v8, v8, v12
532 %sexted_va = sext <vscale x 16 x i16> %va to <vscale x 16 x i32>
533 %zexted_vb = zext <vscale x 16 x i16> %va to <vscale x 16 x i32>
534 %expand = ashr <vscale x 16 x i32> %sexted_va, %zexted_vb
535 %vc = trunc <vscale x 16 x i32> %expand to <vscale x 16 x i16>
536 ret <vscale x 16 x i16> %vc
539 define <vscale x 16 x i16> @vsra_vx_nxv16i16(<vscale x 16 x i16> %va, i16 signext %b) {
540 ; CHECK-LABEL: vsra_vx_nxv16i16:
542 ; CHECK-NEXT: vsetvli a1, zero, e16, m4, ta, ma
543 ; CHECK-NEXT: vsra.vx v8, v8, a0
545 %head = insertelement <vscale x 16 x i16> poison, i16 %b, i32 0
546 %splat = shufflevector <vscale x 16 x i16> %head, <vscale x 16 x i16> poison, <vscale x 16 x i32> zeroinitializer
547 %vc = ashr <vscale x 16 x i16> %va, %splat
548 ret <vscale x 16 x i16> %vc
551 define <vscale x 16 x i16> @vsra_vi_nxv16i16_0(<vscale x 16 x i16> %va) {
552 ; CHECK-LABEL: vsra_vi_nxv16i16_0:
554 ; CHECK-NEXT: vsetvli a0, zero, e16, m4, ta, ma
555 ; CHECK-NEXT: vsra.vi v8, v8, 6
557 %head = insertelement <vscale x 16 x i16> poison, i16 6, i32 0
558 %splat = shufflevector <vscale x 16 x i16> %head, <vscale x 16 x i16> poison, <vscale x 16 x i32> zeroinitializer
559 %vc = ashr <vscale x 16 x i16> %va, %splat
560 ret <vscale x 16 x i16> %vc
563 define <vscale x 32 x i16> @vsra_vv_nxv32i16(<vscale x 32 x i16> %va, <vscale x 32 x i16> %vb) {
564 ; CHECK-LABEL: vsra_vv_nxv32i16:
566 ; CHECK-NEXT: vsetvli a0, zero, e16, m8, ta, ma
567 ; CHECK-NEXT: vsra.vv v8, v8, v16
569 %vc = ashr <vscale x 32 x i16> %va, %vb
570 ret <vscale x 32 x i16> %vc
573 define <vscale x 32 x i16> @vsra_vx_nxv32i16(<vscale x 32 x i16> %va, i16 signext %b) {
574 ; CHECK-LABEL: vsra_vx_nxv32i16:
576 ; CHECK-NEXT: vsetvli a1, zero, e16, m8, ta, ma
577 ; CHECK-NEXT: vsra.vx v8, v8, a0
579 %head = insertelement <vscale x 32 x i16> poison, i16 %b, i32 0
580 %splat = shufflevector <vscale x 32 x i16> %head, <vscale x 32 x i16> poison, <vscale x 32 x i32> zeroinitializer
581 %vc = ashr <vscale x 32 x i16> %va, %splat
582 ret <vscale x 32 x i16> %vc
585 define <vscale x 32 x i16> @vsra_vi_nxv32i16_0(<vscale x 32 x i16> %va) {
586 ; CHECK-LABEL: vsra_vi_nxv32i16_0:
588 ; CHECK-NEXT: vsetvli a0, zero, e16, m8, ta, ma
589 ; CHECK-NEXT: vsra.vi v8, v8, 6
591 %head = insertelement <vscale x 32 x i16> poison, i16 6, i32 0
592 %splat = shufflevector <vscale x 32 x i16> %head, <vscale x 32 x i16> poison, <vscale x 32 x i32> zeroinitializer
593 %vc = ashr <vscale x 32 x i16> %va, %splat
594 ret <vscale x 32 x i16> %vc
597 define <vscale x 1 x i32> @vsra_vv_nxv1i32(<vscale x 1 x i32> %va, <vscale x 1 x i32> %vb) {
598 ; CHECK-LABEL: vsra_vv_nxv1i32:
600 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
601 ; CHECK-NEXT: vsra.vv v8, v8, v9
603 %vc = ashr <vscale x 1 x i32> %va, %vb
604 ret <vscale x 1 x i32> %vc
607 define <vscale x 1 x i32> @vsra_vx_nxv1i32(<vscale x 1 x i32> %va, i32 signext %b) {
608 ; CHECK-LABEL: vsra_vx_nxv1i32:
610 ; CHECK-NEXT: vsetvli a1, zero, e32, mf2, ta, ma
611 ; CHECK-NEXT: vsra.vx v8, v8, a0
613 %head = insertelement <vscale x 1 x i32> poison, i32 %b, i32 0
614 %splat = shufflevector <vscale x 1 x i32> %head, <vscale x 1 x i32> poison, <vscale x 1 x i32> zeroinitializer
615 %vc = ashr <vscale x 1 x i32> %va, %splat
616 ret <vscale x 1 x i32> %vc
619 define <vscale x 1 x i32> @vsra_vi_nxv1i32_0(<vscale x 1 x i32> %va) {
620 ; CHECK-LABEL: vsra_vi_nxv1i32_0:
622 ; CHECK-NEXT: vsetvli a0, zero, e32, mf2, ta, ma
623 ; CHECK-NEXT: vsra.vi v8, v8, 31
625 %head = insertelement <vscale x 1 x i32> poison, i32 31, i32 0
626 %splat = shufflevector <vscale x 1 x i32> %head, <vscale x 1 x i32> poison, <vscale x 1 x i32> zeroinitializer
627 %vc = ashr <vscale x 1 x i32> %va, %splat
628 ret <vscale x 1 x i32> %vc
631 define <vscale x 2 x i32> @vsra_vv_nxv2i32(<vscale x 2 x i32> %va, <vscale x 2 x i32> %vb) {
632 ; CHECK-LABEL: vsra_vv_nxv2i32:
634 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
635 ; CHECK-NEXT: vsra.vv v8, v8, v9
637 %vc = ashr <vscale x 2 x i32> %va, %vb
638 ret <vscale x 2 x i32> %vc
641 define <vscale x 2 x i32> @vsra_vx_nxv2i32(<vscale x 2 x i32> %va, i32 signext %b) {
642 ; CHECK-LABEL: vsra_vx_nxv2i32:
644 ; CHECK-NEXT: vsetvli a1, zero, e32, m1, ta, ma
645 ; CHECK-NEXT: vsra.vx v8, v8, a0
647 %head = insertelement <vscale x 2 x i32> poison, i32 %b, i32 0
648 %splat = shufflevector <vscale x 2 x i32> %head, <vscale x 2 x i32> poison, <vscale x 2 x i32> zeroinitializer
649 %vc = ashr <vscale x 2 x i32> %va, %splat
650 ret <vscale x 2 x i32> %vc
653 define <vscale x 2 x i32> @vsra_vi_nxv2i32_0(<vscale x 2 x i32> %va) {
654 ; CHECK-LABEL: vsra_vi_nxv2i32_0:
656 ; CHECK-NEXT: vsetvli a0, zero, e32, m1, ta, ma
657 ; CHECK-NEXT: vsra.vi v8, v8, 31
659 %head = insertelement <vscale x 2 x i32> poison, i32 31, i32 0
660 %splat = shufflevector <vscale x 2 x i32> %head, <vscale x 2 x i32> poison, <vscale x 2 x i32> zeroinitializer
661 %vc = ashr <vscale x 2 x i32> %va, %splat
662 ret <vscale x 2 x i32> %vc
665 define <vscale x 4 x i32> @vsra_vv_nxv4i32(<vscale x 4 x i32> %va, <vscale x 4 x i32> %vb) {
666 ; CHECK-LABEL: vsra_vv_nxv4i32:
668 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
669 ; CHECK-NEXT: vsra.vv v8, v8, v10
671 %vc = ashr <vscale x 4 x i32> %va, %vb
672 ret <vscale x 4 x i32> %vc
675 define <vscale x 4 x i32> @vsra_vx_nxv4i32(<vscale x 4 x i32> %va, i32 signext %b) {
676 ; CHECK-LABEL: vsra_vx_nxv4i32:
678 ; CHECK-NEXT: vsetvli a1, zero, e32, m2, ta, ma
679 ; CHECK-NEXT: vsra.vx v8, v8, a0
681 %head = insertelement <vscale x 4 x i32> poison, i32 %b, i32 0
682 %splat = shufflevector <vscale x 4 x i32> %head, <vscale x 4 x i32> poison, <vscale x 4 x i32> zeroinitializer
683 %vc = ashr <vscale x 4 x i32> %va, %splat
684 ret <vscale x 4 x i32> %vc
687 define <vscale x 4 x i32> @vsra_vi_nxv4i32_0(<vscale x 4 x i32> %va) {
688 ; CHECK-LABEL: vsra_vi_nxv4i32_0:
690 ; CHECK-NEXT: vsetvli a0, zero, e32, m2, ta, ma
691 ; CHECK-NEXT: vsra.vi v8, v8, 31
693 %head = insertelement <vscale x 4 x i32> poison, i32 31, i32 0
694 %splat = shufflevector <vscale x 4 x i32> %head, <vscale x 4 x i32> poison, <vscale x 4 x i32> zeroinitializer
695 %vc = ashr <vscale x 4 x i32> %va, %splat
696 ret <vscale x 4 x i32> %vc
699 define <vscale x 8 x i32> @vsra_vv_nxv8i32(<vscale x 8 x i32> %va, <vscale x 8 x i32> %vb) {
700 ; CHECK-LABEL: vsra_vv_nxv8i32:
702 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
703 ; CHECK-NEXT: vsra.vv v8, v8, v12
705 %vc = ashr <vscale x 8 x i32> %va, %vb
706 ret <vscale x 8 x i32> %vc
709 define <vscale x 8 x i32> @vsra_vx_nxv8i32(<vscale x 8 x i32> %va, i32 signext %b) {
710 ; CHECK-LABEL: vsra_vx_nxv8i32:
712 ; CHECK-NEXT: vsetvli a1, zero, e32, m4, ta, ma
713 ; CHECK-NEXT: vsra.vx v8, v8, a0
715 %head = insertelement <vscale x 8 x i32> poison, i32 %b, i32 0
716 %splat = shufflevector <vscale x 8 x i32> %head, <vscale x 8 x i32> poison, <vscale x 8 x i32> zeroinitializer
717 %vc = ashr <vscale x 8 x i32> %va, %splat
718 ret <vscale x 8 x i32> %vc
721 define <vscale x 8 x i32> @vsra_vi_nxv8i32_0(<vscale x 8 x i32> %va) {
722 ; CHECK-LABEL: vsra_vi_nxv8i32_0:
724 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, ma
725 ; CHECK-NEXT: vsra.vi v8, v8, 31
727 %head = insertelement <vscale x 8 x i32> poison, i32 31, i32 0
728 %splat = shufflevector <vscale x 8 x i32> %head, <vscale x 8 x i32> poison, <vscale x 8 x i32> zeroinitializer
729 %vc = ashr <vscale x 8 x i32> %va, %splat
730 ret <vscale x 8 x i32> %vc
733 define <vscale x 16 x i32> @vsra_vv_nxv16i32(<vscale x 16 x i32> %va, <vscale x 16 x i32> %vb) {
734 ; CHECK-LABEL: vsra_vv_nxv16i32:
736 ; CHECK-NEXT: vsetvli a0, zero, e32, m8, ta, ma
737 ; CHECK-NEXT: vsra.vv v8, v8, v16
739 %vc = ashr <vscale x 16 x i32> %va, %vb
740 ret <vscale x 16 x i32> %vc
743 define <vscale x 16 x i32> @vsra_vx_nxv16i32(<vscale x 16 x i32> %va, i32 signext %b) {
744 ; CHECK-LABEL: vsra_vx_nxv16i32:
746 ; CHECK-NEXT: vsetvli a1, zero, e32, m8, ta, ma
747 ; CHECK-NEXT: vsra.vx v8, v8, a0
749 %head = insertelement <vscale x 16 x i32> poison, i32 %b, i32 0
750 %splat = shufflevector <vscale x 16 x i32> %head, <vscale x 16 x i32> poison, <vscale x 16 x i32> zeroinitializer
751 %vc = ashr <vscale x 16 x i32> %va, %splat
752 ret <vscale x 16 x i32> %vc
755 define <vscale x 16 x i32> @vsra_vi_nxv16i32_0(<vscale x 16 x i32> %va) {
756 ; CHECK-LABEL: vsra_vi_nxv16i32_0:
758 ; CHECK-NEXT: vsetvli a0, zero, e32, m8, ta, ma
759 ; CHECK-NEXT: vsra.vi v8, v8, 31
761 %head = insertelement <vscale x 16 x i32> poison, i32 31, i32 0
762 %splat = shufflevector <vscale x 16 x i32> %head, <vscale x 16 x i32> poison, <vscale x 16 x i32> zeroinitializer
763 %vc = ashr <vscale x 16 x i32> %va, %splat
764 ret <vscale x 16 x i32> %vc
767 define <vscale x 1 x i64> @vsra_vv_nxv1i64(<vscale x 1 x i64> %va, <vscale x 1 x i64> %vb) {
768 ; CHECK-LABEL: vsra_vv_nxv1i64:
770 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma
771 ; CHECK-NEXT: vsra.vv v8, v8, v9
773 %vc = ashr <vscale x 1 x i64> %va, %vb
774 ret <vscale x 1 x i64> %vc
777 define <vscale x 1 x i64> @vsra_vx_nxv1i64(<vscale x 1 x i64> %va, i64 %b) {
778 ; CHECK-LABEL: vsra_vx_nxv1i64:
780 ; CHECK-NEXT: vsetvli a1, zero, e64, m1, ta, ma
781 ; CHECK-NEXT: vsra.vx v8, v8, a0
783 %head = insertelement <vscale x 1 x i64> poison, i64 %b, i32 0
784 %splat = shufflevector <vscale x 1 x i64> %head, <vscale x 1 x i64> poison, <vscale x 1 x i32> zeroinitializer
785 %vc = ashr <vscale x 1 x i64> %va, %splat
786 ret <vscale x 1 x i64> %vc
789 define <vscale x 1 x i64> @vsra_vi_nxv1i64_0(<vscale x 1 x i64> %va) {
790 ; CHECK-LABEL: vsra_vi_nxv1i64_0:
792 ; CHECK-NEXT: vsetvli a0, zero, e64, m1, ta, ma
793 ; CHECK-NEXT: vsra.vi v8, v8, 31
795 %head = insertelement <vscale x 1 x i64> poison, i64 31, i32 0
796 %splat = shufflevector <vscale x 1 x i64> %head, <vscale x 1 x i64> poison, <vscale x 1 x i32> zeroinitializer
797 %vc = ashr <vscale x 1 x i64> %va, %splat
798 ret <vscale x 1 x i64> %vc
801 define <vscale x 1 x i64> @vsra_vi_nxv1i64_1(<vscale x 1 x i64> %va) {
802 ; CHECK-LABEL: vsra_vi_nxv1i64_1:
804 ; CHECK-NEXT: li a0, 32
805 ; CHECK-NEXT: vsetvli a1, zero, e64, m1, ta, ma
806 ; CHECK-NEXT: vsra.vx v8, v8, a0
808 %head = insertelement <vscale x 1 x i64> poison, i64 32, i32 0
809 %splat = shufflevector <vscale x 1 x i64> %head, <vscale x 1 x i64> poison, <vscale x 1 x i32> zeroinitializer
810 %vc = ashr <vscale x 1 x i64> %va, %splat
811 ret <vscale x 1 x i64> %vc
814 define <vscale x 2 x i64> @vsra_vv_nxv2i64(<vscale x 2 x i64> %va, <vscale x 2 x i64> %vb) {
815 ; CHECK-LABEL: vsra_vv_nxv2i64:
817 ; CHECK-NEXT: vsetvli a0, zero, e64, m2, ta, ma
818 ; CHECK-NEXT: vsra.vv v8, v8, v10
820 %vc = ashr <vscale x 2 x i64> %va, %vb
821 ret <vscale x 2 x i64> %vc
824 define <vscale x 2 x i64> @vsra_vx_nxv2i64(<vscale x 2 x i64> %va, i64 %b) {
825 ; CHECK-LABEL: vsra_vx_nxv2i64:
827 ; CHECK-NEXT: vsetvli a1, zero, e64, m2, ta, ma
828 ; CHECK-NEXT: vsra.vx v8, v8, a0
830 %head = insertelement <vscale x 2 x i64> poison, i64 %b, i32 0
831 %splat = shufflevector <vscale x 2 x i64> %head, <vscale x 2 x i64> poison, <vscale x 2 x i32> zeroinitializer
832 %vc = ashr <vscale x 2 x i64> %va, %splat
833 ret <vscale x 2 x i64> %vc
836 define <vscale x 2 x i64> @vsra_vi_nxv2i64_0(<vscale x 2 x i64> %va) {
837 ; CHECK-LABEL: vsra_vi_nxv2i64_0:
839 ; CHECK-NEXT: vsetvli a0, zero, e64, m2, ta, ma
840 ; CHECK-NEXT: vsra.vi v8, v8, 31
842 %head = insertelement <vscale x 2 x i64> poison, i64 31, i32 0
843 %splat = shufflevector <vscale x 2 x i64> %head, <vscale x 2 x i64> poison, <vscale x 2 x i32> zeroinitializer
844 %vc = ashr <vscale x 2 x i64> %va, %splat
845 ret <vscale x 2 x i64> %vc
848 define <vscale x 2 x i64> @vsra_vi_nxv2i64_1(<vscale x 2 x i64> %va) {
849 ; CHECK-LABEL: vsra_vi_nxv2i64_1:
851 ; CHECK-NEXT: li a0, 32
852 ; CHECK-NEXT: vsetvli a1, zero, e64, m2, ta, ma
853 ; CHECK-NEXT: vsra.vx v8, v8, a0
855 %head = insertelement <vscale x 2 x i64> poison, i64 32, i32 0
856 %splat = shufflevector <vscale x 2 x i64> %head, <vscale x 2 x i64> poison, <vscale x 2 x i32> zeroinitializer
857 %vc = ashr <vscale x 2 x i64> %va, %splat
858 ret <vscale x 2 x i64> %vc
861 define <vscale x 4 x i64> @vsra_vv_nxv4i64(<vscale x 4 x i64> %va, <vscale x 4 x i64> %vb) {
862 ; CHECK-LABEL: vsra_vv_nxv4i64:
864 ; CHECK-NEXT: vsetvli a0, zero, e64, m4, ta, ma
865 ; CHECK-NEXT: vsra.vv v8, v8, v12
867 %vc = ashr <vscale x 4 x i64> %va, %vb
868 ret <vscale x 4 x i64> %vc
871 define <vscale x 4 x i64> @vsra_vx_nxv4i64(<vscale x 4 x i64> %va, i64 %b) {
872 ; CHECK-LABEL: vsra_vx_nxv4i64:
874 ; CHECK-NEXT: vsetvli a1, zero, e64, m4, ta, ma
875 ; CHECK-NEXT: vsra.vx v8, v8, a0
877 %head = insertelement <vscale x 4 x i64> poison, i64 %b, i32 0
878 %splat = shufflevector <vscale x 4 x i64> %head, <vscale x 4 x i64> poison, <vscale x 4 x i32> zeroinitializer
879 %vc = ashr <vscale x 4 x i64> %va, %splat
880 ret <vscale x 4 x i64> %vc
883 define <vscale x 4 x i64> @vsra_vi_nxv4i64_0(<vscale x 4 x i64> %va) {
884 ; CHECK-LABEL: vsra_vi_nxv4i64_0:
886 ; CHECK-NEXT: vsetvli a0, zero, e64, m4, ta, ma
887 ; CHECK-NEXT: vsra.vi v8, v8, 31
889 %head = insertelement <vscale x 4 x i64> poison, i64 31, i32 0
890 %splat = shufflevector <vscale x 4 x i64> %head, <vscale x 4 x i64> poison, <vscale x 4 x i32> zeroinitializer
891 %vc = ashr <vscale x 4 x i64> %va, %splat
892 ret <vscale x 4 x i64> %vc
895 define <vscale x 4 x i64> @vsra_vi_nxv4i64_1(<vscale x 4 x i64> %va) {
896 ; CHECK-LABEL: vsra_vi_nxv4i64_1:
898 ; CHECK-NEXT: li a0, 32
899 ; CHECK-NEXT: vsetvli a1, zero, e64, m4, ta, ma
900 ; CHECK-NEXT: vsra.vx v8, v8, a0
902 %head = insertelement <vscale x 4 x i64> poison, i64 32, i32 0
903 %splat = shufflevector <vscale x 4 x i64> %head, <vscale x 4 x i64> poison, <vscale x 4 x i32> zeroinitializer
904 %vc = ashr <vscale x 4 x i64> %va, %splat
905 ret <vscale x 4 x i64> %vc
908 define <vscale x 8 x i64> @vsra_vv_nxv8i64(<vscale x 8 x i64> %va, <vscale x 8 x i64> %vb) {
909 ; CHECK-LABEL: vsra_vv_nxv8i64:
911 ; CHECK-NEXT: vsetvli a0, zero, e64, m8, ta, ma
912 ; CHECK-NEXT: vsra.vv v8, v8, v16
914 %vc = ashr <vscale x 8 x i64> %va, %vb
915 ret <vscale x 8 x i64> %vc
918 define <vscale x 8 x i64> @vsra_vx_nxv8i64(<vscale x 8 x i64> %va, i64 %b) {
919 ; CHECK-LABEL: vsra_vx_nxv8i64:
921 ; CHECK-NEXT: vsetvli a1, zero, e64, m8, ta, ma
922 ; CHECK-NEXT: vsra.vx v8, v8, a0
924 %head = insertelement <vscale x 8 x i64> poison, i64 %b, i32 0
925 %splat = shufflevector <vscale x 8 x i64> %head, <vscale x 8 x i64> poison, <vscale x 8 x i32> zeroinitializer
926 %vc = ashr <vscale x 8 x i64> %va, %splat
927 ret <vscale x 8 x i64> %vc
930 define <vscale x 8 x i64> @vsra_vi_nxv8i64_0(<vscale x 8 x i64> %va) {
931 ; CHECK-LABEL: vsra_vi_nxv8i64_0:
933 ; CHECK-NEXT: vsetvli a0, zero, e64, m8, ta, ma
934 ; CHECK-NEXT: vsra.vi v8, v8, 31
936 %head = insertelement <vscale x 8 x i64> poison, i64 31, i32 0
937 %splat = shufflevector <vscale x 8 x i64> %head, <vscale x 8 x i64> poison, <vscale x 8 x i32> zeroinitializer
938 %vc = ashr <vscale x 8 x i64> %va, %splat
939 ret <vscale x 8 x i64> %vc
942 define <vscale x 8 x i64> @vsra_vi_nxv8i64_1(<vscale x 8 x i64> %va) {
943 ; CHECK-LABEL: vsra_vi_nxv8i64_1:
945 ; CHECK-NEXT: li a0, 32
946 ; CHECK-NEXT: vsetvli a1, zero, e64, m8, ta, ma
947 ; CHECK-NEXT: vsra.vx v8, v8, a0
949 %head = insertelement <vscale x 8 x i64> poison, i64 32, i32 0
950 %splat = shufflevector <vscale x 8 x i64> %head, <vscale x 8 x i64> poison, <vscale x 8 x i32> zeroinitializer
951 %vc = ashr <vscale x 8 x i64> %va, %splat
952 ret <vscale x 8 x i64> %vc
955 define <vscale x 8 x i32> @vsra_vv_mask_nxv4i32(<vscale x 8 x i32> %va, <vscale x 8 x i32> %vb, <vscale x 8 x i1> %mask) {
956 ; CHECK-LABEL: vsra_vv_mask_nxv4i32:
958 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, mu
959 ; CHECK-NEXT: vsra.vv v8, v8, v12, v0.t
961 %vs = select <vscale x 8 x i1> %mask, <vscale x 8 x i32> %vb, <vscale x 8 x i32> zeroinitializer
962 %vc = ashr <vscale x 8 x i32> %va, %vs
963 ret <vscale x 8 x i32> %vc
966 define <vscale x 8 x i32> @vsra_vx_mask_nxv8i32(<vscale x 8 x i32> %va, i32 signext %b, <vscale x 8 x i1> %mask) {
967 ; CHECK-LABEL: vsra_vx_mask_nxv8i32:
969 ; CHECK-NEXT: vsetvli a1, zero, e32, m4, ta, mu
970 ; CHECK-NEXT: vsra.vx v8, v8, a0, v0.t
972 %head = insertelement <vscale x 8 x i32> poison, i32 %b, i32 0
973 %splat = shufflevector <vscale x 8 x i32> %head, <vscale x 8 x i32> poison, <vscale x 8 x i32> zeroinitializer
974 %vs = select <vscale x 8 x i1> %mask, <vscale x 8 x i32> %splat, <vscale x 8 x i32> zeroinitializer
975 %vc = ashr <vscale x 8 x i32> %va, %vs
976 ret <vscale x 8 x i32> %vc
979 define <vscale x 8 x i32> @vsra_vi_mask_nxv8i32(<vscale x 8 x i32> %va, <vscale x 8 x i1> %mask) {
980 ; CHECK-LABEL: vsra_vi_mask_nxv8i32:
982 ; CHECK-NEXT: vsetvli a0, zero, e32, m4, ta, mu
983 ; CHECK-NEXT: vsra.vi v8, v8, 31, v0.t
985 %head = insertelement <vscale x 8 x i32> poison, i32 31, i32 0
986 %splat = shufflevector <vscale x 8 x i32> %head, <vscale x 8 x i32> poison, <vscale x 8 x i32> zeroinitializer
987 %vs = select <vscale x 8 x i1> %mask, <vscale x 8 x i32> %splat, <vscale x 8 x i32> zeroinitializer
988 %vc = ashr <vscale x 8 x i32> %va, %vs
989 ret <vscale x 8 x i32> %vc