1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i686-unknown -mattr=+sse2 | FileCheck %s --check-prefixes=X86-SSE2
3 ; RUN: llc < %s -mtriple=i686-unknown -mattr=+sse2,+avx | FileCheck %s --check-prefixes=X86-AVX,X86-AVX1
4 ; RUN: llc < %s -mtriple=i686-unknown -mattr=+sse2,+avx,+avx2 | FileCheck %s --check-prefixes=X86-AVX,X86-AVX2
5 ; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+sse2 | FileCheck %s --check-prefixes=X64-SSE2
6 ; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+sse2,+avx | FileCheck %s --check-prefixes=X64-AVX,X64-AVX1
7 ; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+sse2,+avx,+avx2 | FileCheck %s --check-prefixes=X64-AVX,X64-AVX2
9 ; The mask is all-ones, potentially shifted.
11 ;------------------------------------------------------------------------------;
12 ; 128-bit vector; 8-bit elements = 16 elements
13 ;------------------------------------------------------------------------------;
17 define <16 x i8> @test_128_i8_x_16_7_mask_lshr_1(<16 x i8> %a0) {
18 ; X86-SSE2-LABEL: test_128_i8_x_16_7_mask_lshr_1:
20 ; X86-SSE2-NEXT: psrlw $1, %xmm0
21 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
22 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
25 ; X86-AVX-LABEL: test_128_i8_x_16_7_mask_lshr_1:
27 ; X86-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
28 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
29 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
32 ; X64-SSE2-LABEL: test_128_i8_x_16_7_mask_lshr_1:
34 ; X64-SSE2-NEXT: psrlw $1, %xmm0
35 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
36 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
39 ; X64-AVX-LABEL: test_128_i8_x_16_7_mask_lshr_1:
41 ; X64-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
42 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
43 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
45 %t0 = and <16 x i8> %a0, <i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7>
46 %t1 = lshr <16 x i8> %t0, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
50 define <16 x i8> @test_128_i8_x_16_28_mask_lshr_1(<16 x i8> %a0) {
51 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_lshr_1:
53 ; X86-SSE2-NEXT: psrlw $1, %xmm0
54 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
57 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_lshr_1:
59 ; X86-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
60 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
63 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_lshr_1:
65 ; X64-SSE2-NEXT: psrlw $1, %xmm0
66 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
69 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_lshr_1:
71 ; X64-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
72 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
74 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
75 %t1 = lshr <16 x i8> %t0, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
78 define <16 x i8> @test_128_i8_x_16_28_mask_lshr_2(<16 x i8> %a0) {
79 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_lshr_2:
81 ; X86-SSE2-NEXT: psrlw $2, %xmm0
82 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
85 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_lshr_2:
87 ; X86-AVX-NEXT: vpsrlw $2, %xmm0, %xmm0
88 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
91 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_lshr_2:
93 ; X64-SSE2-NEXT: psrlw $2, %xmm0
94 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
97 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_lshr_2:
99 ; X64-AVX-NEXT: vpsrlw $2, %xmm0, %xmm0
100 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
102 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
103 %t1 = lshr <16 x i8> %t0, <i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2>
106 define <16 x i8> @test_128_i8_x_16_28_mask_lshr_3(<16 x i8> %a0) {
107 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_lshr_3:
109 ; X86-SSE2-NEXT: psrlw $3, %xmm0
110 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
111 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
112 ; X86-SSE2-NEXT: retl
114 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_lshr_3:
116 ; X86-AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
117 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
118 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
121 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_lshr_3:
123 ; X64-SSE2-NEXT: psrlw $3, %xmm0
124 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
125 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
126 ; X64-SSE2-NEXT: retq
128 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_lshr_3:
130 ; X64-AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
131 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
132 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
134 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
135 %t1 = lshr <16 x i8> %t0, <i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3>
138 define <16 x i8> @test_128_i8_x_16_28_mask_lshr_4(<16 x i8> %a0) {
139 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_lshr_4:
141 ; X86-SSE2-NEXT: psrlw $4, %xmm0
142 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
143 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
144 ; X86-SSE2-NEXT: retl
146 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_lshr_4:
148 ; X86-AVX-NEXT: vpsrlw $4, %xmm0, %xmm0
149 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
150 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
153 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_lshr_4:
155 ; X64-SSE2-NEXT: psrlw $4, %xmm0
156 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
157 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
158 ; X64-SSE2-NEXT: retq
160 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_lshr_4:
162 ; X64-AVX-NEXT: vpsrlw $4, %xmm0, %xmm0
163 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
164 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
166 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
167 %t1 = lshr <16 x i8> %t0, <i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4>
171 define <16 x i8> @test_128_i8_x_16_224_mask_lshr_1(<16 x i8> %a0) {
172 ; X86-SSE2-LABEL: test_128_i8_x_16_224_mask_lshr_1:
174 ; X86-SSE2-NEXT: psrlw $1, %xmm0
175 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
176 ; X86-SSE2-NEXT: retl
178 ; X86-AVX-LABEL: test_128_i8_x_16_224_mask_lshr_1:
180 ; X86-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
181 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
184 ; X64-SSE2-LABEL: test_128_i8_x_16_224_mask_lshr_1:
186 ; X64-SSE2-NEXT: psrlw $1, %xmm0
187 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
188 ; X64-SSE2-NEXT: retq
190 ; X64-AVX-LABEL: test_128_i8_x_16_224_mask_lshr_1:
192 ; X64-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
193 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
195 %t0 = and <16 x i8> %a0, <i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224>
196 %t1 = lshr <16 x i8> %t0, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
199 define <16 x i8> @test_128_i8_x_16_224_mask_lshr_4(<16 x i8> %a0) {
200 ; X86-SSE2-LABEL: test_128_i8_x_16_224_mask_lshr_4:
202 ; X86-SSE2-NEXT: psrlw $4, %xmm0
203 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
204 ; X86-SSE2-NEXT: retl
206 ; X86-AVX-LABEL: test_128_i8_x_16_224_mask_lshr_4:
208 ; X86-AVX-NEXT: vpsrlw $4, %xmm0, %xmm0
209 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
212 ; X64-SSE2-LABEL: test_128_i8_x_16_224_mask_lshr_4:
214 ; X64-SSE2-NEXT: psrlw $4, %xmm0
215 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
216 ; X64-SSE2-NEXT: retq
218 ; X64-AVX-LABEL: test_128_i8_x_16_224_mask_lshr_4:
220 ; X64-AVX-NEXT: vpsrlw $4, %xmm0, %xmm0
221 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
223 %t0 = and <16 x i8> %a0, <i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224>
224 %t1 = lshr <16 x i8> %t0, <i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4>
227 define <16 x i8> @test_128_i8_x_16_224_mask_lshr_5(<16 x i8> %a0) {
228 ; X86-SSE2-LABEL: test_128_i8_x_16_224_mask_lshr_5:
230 ; X86-SSE2-NEXT: psrlw $5, %xmm0
231 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
232 ; X86-SSE2-NEXT: retl
234 ; X86-AVX-LABEL: test_128_i8_x_16_224_mask_lshr_5:
236 ; X86-AVX-NEXT: vpsrlw $5, %xmm0, %xmm0
237 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
240 ; X64-SSE2-LABEL: test_128_i8_x_16_224_mask_lshr_5:
242 ; X64-SSE2-NEXT: psrlw $5, %xmm0
243 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
244 ; X64-SSE2-NEXT: retq
246 ; X64-AVX-LABEL: test_128_i8_x_16_224_mask_lshr_5:
248 ; X64-AVX-NEXT: vpsrlw $5, %xmm0, %xmm0
249 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
251 %t0 = and <16 x i8> %a0, <i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224>
252 %t1 = lshr <16 x i8> %t0, <i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5>
255 define <16 x i8> @test_128_i8_x_16_224_mask_lshr_6(<16 x i8> %a0) {
256 ; X86-SSE2-LABEL: test_128_i8_x_16_224_mask_lshr_6:
258 ; X86-SSE2-NEXT: psrlw $6, %xmm0
259 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
260 ; X86-SSE2-NEXT: retl
262 ; X86-AVX-LABEL: test_128_i8_x_16_224_mask_lshr_6:
264 ; X86-AVX-NEXT: vpsrlw $6, %xmm0, %xmm0
265 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
268 ; X64-SSE2-LABEL: test_128_i8_x_16_224_mask_lshr_6:
270 ; X64-SSE2-NEXT: psrlw $6, %xmm0
271 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
272 ; X64-SSE2-NEXT: retq
274 ; X64-AVX-LABEL: test_128_i8_x_16_224_mask_lshr_6:
276 ; X64-AVX-NEXT: vpsrlw $6, %xmm0, %xmm0
277 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
279 %t0 = and <16 x i8> %a0, <i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224>
280 %t1 = lshr <16 x i8> %t0, <i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6>
286 define <16 x i8> @test_128_i8_x_16_7_mask_ashr_1(<16 x i8> %a0) {
287 ; X86-SSE2-LABEL: test_128_i8_x_16_7_mask_ashr_1:
289 ; X86-SSE2-NEXT: psrlw $1, %xmm0
290 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
291 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
292 ; X86-SSE2-NEXT: retl
294 ; X86-AVX-LABEL: test_128_i8_x_16_7_mask_ashr_1:
296 ; X86-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
297 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
298 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
301 ; X64-SSE2-LABEL: test_128_i8_x_16_7_mask_ashr_1:
303 ; X64-SSE2-NEXT: psrlw $1, %xmm0
304 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
305 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
306 ; X64-SSE2-NEXT: retq
308 ; X64-AVX-LABEL: test_128_i8_x_16_7_mask_ashr_1:
310 ; X64-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
311 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
312 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
314 %t0 = and <16 x i8> %a0, <i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7>
315 %t1 = ashr <16 x i8> %t0, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
319 define <16 x i8> @test_128_i8_x_16_28_mask_ashr_1(<16 x i8> %a0) {
320 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_ashr_1:
322 ; X86-SSE2-NEXT: psrlw $1, %xmm0
323 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
324 ; X86-SSE2-NEXT: retl
326 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_ashr_1:
328 ; X86-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
329 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
332 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_ashr_1:
334 ; X64-SSE2-NEXT: psrlw $1, %xmm0
335 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
336 ; X64-SSE2-NEXT: retq
338 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_ashr_1:
340 ; X64-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
341 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
343 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
344 %t1 = ashr <16 x i8> %t0, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
347 define <16 x i8> @test_128_i8_x_16_28_mask_ashr_2(<16 x i8> %a0) {
348 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_ashr_2:
350 ; X86-SSE2-NEXT: psrlw $2, %xmm0
351 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
352 ; X86-SSE2-NEXT: retl
354 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_ashr_2:
356 ; X86-AVX-NEXT: vpsrlw $2, %xmm0, %xmm0
357 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
360 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_ashr_2:
362 ; X64-SSE2-NEXT: psrlw $2, %xmm0
363 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
364 ; X64-SSE2-NEXT: retq
366 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_ashr_2:
368 ; X64-AVX-NEXT: vpsrlw $2, %xmm0, %xmm0
369 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
371 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
372 %t1 = ashr <16 x i8> %t0, <i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2>
375 define <16 x i8> @test_128_i8_x_16_28_mask_ashr_3(<16 x i8> %a0) {
376 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_ashr_3:
378 ; X86-SSE2-NEXT: psrlw $3, %xmm0
379 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
380 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
381 ; X86-SSE2-NEXT: retl
383 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_ashr_3:
385 ; X86-AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
386 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
387 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
390 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_ashr_3:
392 ; X64-SSE2-NEXT: psrlw $3, %xmm0
393 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
394 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
395 ; X64-SSE2-NEXT: retq
397 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_ashr_3:
399 ; X64-AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
400 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
401 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
403 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
404 %t1 = ashr <16 x i8> %t0, <i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3>
407 define <16 x i8> @test_128_i8_x_16_28_mask_ashr_4(<16 x i8> %a0) {
408 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_ashr_4:
410 ; X86-SSE2-NEXT: psrlw $4, %xmm0
411 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
412 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
413 ; X86-SSE2-NEXT: retl
415 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_ashr_4:
417 ; X86-AVX-NEXT: vpsrlw $4, %xmm0, %xmm0
418 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
419 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
422 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_ashr_4:
424 ; X64-SSE2-NEXT: psrlw $4, %xmm0
425 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
426 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
427 ; X64-SSE2-NEXT: retq
429 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_ashr_4:
431 ; X64-AVX-NEXT: vpsrlw $4, %xmm0, %xmm0
432 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
433 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
435 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
436 %t1 = ashr <16 x i8> %t0, <i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4>
440 define <16 x i8> @test_128_i8_x_16_224_mask_ashr_1(<16 x i8> %a0) {
441 ; X86-SSE2-LABEL: test_128_i8_x_16_224_mask_ashr_1:
443 ; X86-SSE2-NEXT: psrlw $1, %xmm0
444 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
445 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64]
446 ; X86-SSE2-NEXT: pxor %xmm1, %xmm0
447 ; X86-SSE2-NEXT: psubb %xmm1, %xmm0
448 ; X86-SSE2-NEXT: retl
450 ; X86-AVX1-LABEL: test_128_i8_x_16_224_mask_ashr_1:
452 ; X86-AVX1-NEXT: vpsrlw $1, %xmm0, %xmm0
453 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
454 ; X86-AVX1-NEXT: vbroadcastss {{.*#+}} xmm1 = [64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64]
455 ; X86-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
456 ; X86-AVX1-NEXT: vpsubb %xmm1, %xmm0, %xmm0
457 ; X86-AVX1-NEXT: retl
459 ; X86-AVX2-LABEL: test_128_i8_x_16_224_mask_ashr_1:
461 ; X86-AVX2-NEXT: vpsrlw $1, %xmm0, %xmm0
462 ; X86-AVX2-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
463 ; X86-AVX2-NEXT: vpbroadcastb {{.*#+}} xmm1 = [64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64]
464 ; X86-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
465 ; X86-AVX2-NEXT: vpsubb %xmm1, %xmm0, %xmm0
466 ; X86-AVX2-NEXT: retl
468 ; X64-SSE2-LABEL: test_128_i8_x_16_224_mask_ashr_1:
470 ; X64-SSE2-NEXT: psrlw $1, %xmm0
471 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
472 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64]
473 ; X64-SSE2-NEXT: pxor %xmm1, %xmm0
474 ; X64-SSE2-NEXT: psubb %xmm1, %xmm0
475 ; X64-SSE2-NEXT: retq
477 ; X64-AVX1-LABEL: test_128_i8_x_16_224_mask_ashr_1:
479 ; X64-AVX1-NEXT: vpsrlw $1, %xmm0, %xmm0
480 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
481 ; X64-AVX1-NEXT: vbroadcastss {{.*#+}} xmm1 = [64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64]
482 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
483 ; X64-AVX1-NEXT: vpsubb %xmm1, %xmm0, %xmm0
484 ; X64-AVX1-NEXT: retq
486 ; X64-AVX2-LABEL: test_128_i8_x_16_224_mask_ashr_1:
488 ; X64-AVX2-NEXT: vpsrlw $1, %xmm0, %xmm0
489 ; X64-AVX2-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
490 ; X64-AVX2-NEXT: vpbroadcastb {{.*#+}} xmm1 = [64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64]
491 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
492 ; X64-AVX2-NEXT: vpsubb %xmm1, %xmm0, %xmm0
493 ; X64-AVX2-NEXT: retq
494 %t0 = and <16 x i8> %a0, <i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224>
495 %t1 = ashr <16 x i8> %t0, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
498 define <16 x i8> @test_128_i8_x_16_224_mask_ashr_4(<16 x i8> %a0) {
499 ; X86-SSE2-LABEL: test_128_i8_x_16_224_mask_ashr_4:
501 ; X86-SSE2-NEXT: psrlw $4, %xmm0
502 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
503 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
504 ; X86-SSE2-NEXT: pxor %xmm1, %xmm0
505 ; X86-SSE2-NEXT: psubb %xmm1, %xmm0
506 ; X86-SSE2-NEXT: retl
508 ; X86-AVX1-LABEL: test_128_i8_x_16_224_mask_ashr_4:
510 ; X86-AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
511 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
512 ; X86-AVX1-NEXT: vbroadcastss {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
513 ; X86-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
514 ; X86-AVX1-NEXT: vpsubb %xmm1, %xmm0, %xmm0
515 ; X86-AVX1-NEXT: retl
517 ; X86-AVX2-LABEL: test_128_i8_x_16_224_mask_ashr_4:
519 ; X86-AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
520 ; X86-AVX2-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
521 ; X86-AVX2-NEXT: vpbroadcastb {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
522 ; X86-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
523 ; X86-AVX2-NEXT: vpsubb %xmm1, %xmm0, %xmm0
524 ; X86-AVX2-NEXT: retl
526 ; X64-SSE2-LABEL: test_128_i8_x_16_224_mask_ashr_4:
528 ; X64-SSE2-NEXT: psrlw $4, %xmm0
529 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
530 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
531 ; X64-SSE2-NEXT: pxor %xmm1, %xmm0
532 ; X64-SSE2-NEXT: psubb %xmm1, %xmm0
533 ; X64-SSE2-NEXT: retq
535 ; X64-AVX1-LABEL: test_128_i8_x_16_224_mask_ashr_4:
537 ; X64-AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0
538 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
539 ; X64-AVX1-NEXT: vbroadcastss {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
540 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
541 ; X64-AVX1-NEXT: vpsubb %xmm1, %xmm0, %xmm0
542 ; X64-AVX1-NEXT: retq
544 ; X64-AVX2-LABEL: test_128_i8_x_16_224_mask_ashr_4:
546 ; X64-AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0
547 ; X64-AVX2-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
548 ; X64-AVX2-NEXT: vpbroadcastb {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
549 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
550 ; X64-AVX2-NEXT: vpsubb %xmm1, %xmm0, %xmm0
551 ; X64-AVX2-NEXT: retq
552 %t0 = and <16 x i8> %a0, <i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224>
553 %t1 = ashr <16 x i8> %t0, <i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4>
556 define <16 x i8> @test_128_i8_x_16_224_mask_ashr_5(<16 x i8> %a0) {
557 ; X86-SSE2-LABEL: test_128_i8_x_16_224_mask_ashr_5:
559 ; X86-SSE2-NEXT: psrlw $5, %xmm0
560 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
561 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
562 ; X86-SSE2-NEXT: pxor %xmm1, %xmm0
563 ; X86-SSE2-NEXT: psubb %xmm1, %xmm0
564 ; X86-SSE2-NEXT: retl
566 ; X86-AVX1-LABEL: test_128_i8_x_16_224_mask_ashr_5:
568 ; X86-AVX1-NEXT: vpsrlw $5, %xmm0, %xmm0
569 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
570 ; X86-AVX1-NEXT: vbroadcastss {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
571 ; X86-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
572 ; X86-AVX1-NEXT: vpsubb %xmm1, %xmm0, %xmm0
573 ; X86-AVX1-NEXT: retl
575 ; X86-AVX2-LABEL: test_128_i8_x_16_224_mask_ashr_5:
577 ; X86-AVX2-NEXT: vpsrlw $5, %xmm0, %xmm0
578 ; X86-AVX2-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
579 ; X86-AVX2-NEXT: vpbroadcastb {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
580 ; X86-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
581 ; X86-AVX2-NEXT: vpsubb %xmm1, %xmm0, %xmm0
582 ; X86-AVX2-NEXT: retl
584 ; X64-SSE2-LABEL: test_128_i8_x_16_224_mask_ashr_5:
586 ; X64-SSE2-NEXT: psrlw $5, %xmm0
587 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
588 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
589 ; X64-SSE2-NEXT: pxor %xmm1, %xmm0
590 ; X64-SSE2-NEXT: psubb %xmm1, %xmm0
591 ; X64-SSE2-NEXT: retq
593 ; X64-AVX1-LABEL: test_128_i8_x_16_224_mask_ashr_5:
595 ; X64-AVX1-NEXT: vpsrlw $5, %xmm0, %xmm0
596 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
597 ; X64-AVX1-NEXT: vbroadcastss {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
598 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
599 ; X64-AVX1-NEXT: vpsubb %xmm1, %xmm0, %xmm0
600 ; X64-AVX1-NEXT: retq
602 ; X64-AVX2-LABEL: test_128_i8_x_16_224_mask_ashr_5:
604 ; X64-AVX2-NEXT: vpsrlw $5, %xmm0, %xmm0
605 ; X64-AVX2-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
606 ; X64-AVX2-NEXT: vpbroadcastb {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
607 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
608 ; X64-AVX2-NEXT: vpsubb %xmm1, %xmm0, %xmm0
609 ; X64-AVX2-NEXT: retq
610 %t0 = and <16 x i8> %a0, <i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224>
611 %t1 = ashr <16 x i8> %t0, <i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5>
614 define <16 x i8> @test_128_i8_x_16_224_mask_ashr_6(<16 x i8> %a0) {
615 ; X86-SSE2-LABEL: test_128_i8_x_16_224_mask_ashr_6:
617 ; X86-SSE2-NEXT: psrlw $6, %xmm0
618 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
619 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2]
620 ; X86-SSE2-NEXT: pxor %xmm1, %xmm0
621 ; X86-SSE2-NEXT: psubb %xmm1, %xmm0
622 ; X86-SSE2-NEXT: retl
624 ; X86-AVX1-LABEL: test_128_i8_x_16_224_mask_ashr_6:
626 ; X86-AVX1-NEXT: vpsrlw $6, %xmm0, %xmm0
627 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
628 ; X86-AVX1-NEXT: vbroadcastss {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2]
629 ; X86-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
630 ; X86-AVX1-NEXT: vpsubb %xmm1, %xmm0, %xmm0
631 ; X86-AVX1-NEXT: retl
633 ; X86-AVX2-LABEL: test_128_i8_x_16_224_mask_ashr_6:
635 ; X86-AVX2-NEXT: vpsrlw $6, %xmm0, %xmm0
636 ; X86-AVX2-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
637 ; X86-AVX2-NEXT: vpbroadcastb {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2]
638 ; X86-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
639 ; X86-AVX2-NEXT: vpsubb %xmm1, %xmm0, %xmm0
640 ; X86-AVX2-NEXT: retl
642 ; X64-SSE2-LABEL: test_128_i8_x_16_224_mask_ashr_6:
644 ; X64-SSE2-NEXT: psrlw $6, %xmm0
645 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
646 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2]
647 ; X64-SSE2-NEXT: pxor %xmm1, %xmm0
648 ; X64-SSE2-NEXT: psubb %xmm1, %xmm0
649 ; X64-SSE2-NEXT: retq
651 ; X64-AVX1-LABEL: test_128_i8_x_16_224_mask_ashr_6:
653 ; X64-AVX1-NEXT: vpsrlw $6, %xmm0, %xmm0
654 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
655 ; X64-AVX1-NEXT: vbroadcastss {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2]
656 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
657 ; X64-AVX1-NEXT: vpsubb %xmm1, %xmm0, %xmm0
658 ; X64-AVX1-NEXT: retq
660 ; X64-AVX2-LABEL: test_128_i8_x_16_224_mask_ashr_6:
662 ; X64-AVX2-NEXT: vpsrlw $6, %xmm0, %xmm0
663 ; X64-AVX2-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
664 ; X64-AVX2-NEXT: vpbroadcastb {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2]
665 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
666 ; X64-AVX2-NEXT: vpsubb %xmm1, %xmm0, %xmm0
667 ; X64-AVX2-NEXT: retq
668 %t0 = and <16 x i8> %a0, <i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224>
669 %t1 = ashr <16 x i8> %t0, <i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6>
675 define <16 x i8> @test_128_i8_x_16_7_mask_shl_1(<16 x i8> %a0) {
676 ; X86-SSE2-LABEL: test_128_i8_x_16_7_mask_shl_1:
678 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
679 ; X86-SSE2-NEXT: paddb %xmm0, %xmm0
680 ; X86-SSE2-NEXT: retl
682 ; X86-AVX-LABEL: test_128_i8_x_16_7_mask_shl_1:
684 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
685 ; X86-AVX-NEXT: vpaddb %xmm0, %xmm0, %xmm0
688 ; X64-SSE2-LABEL: test_128_i8_x_16_7_mask_shl_1:
690 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
691 ; X64-SSE2-NEXT: paddb %xmm0, %xmm0
692 ; X64-SSE2-NEXT: retq
694 ; X64-AVX-LABEL: test_128_i8_x_16_7_mask_shl_1:
696 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
697 ; X64-AVX-NEXT: vpaddb %xmm0, %xmm0, %xmm0
699 %t0 = and <16 x i8> %a0, <i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7>
700 %t1 = shl <16 x i8> %t0, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
703 define <16 x i8> @test_128_i8_x_16_7_mask_shl_4(<16 x i8> %a0) {
704 ; X86-SSE2-LABEL: test_128_i8_x_16_7_mask_shl_4:
706 ; X86-SSE2-NEXT: psllw $4, %xmm0
707 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
708 ; X86-SSE2-NEXT: retl
710 ; X86-AVX-LABEL: test_128_i8_x_16_7_mask_shl_4:
712 ; X86-AVX-NEXT: vpsllw $4, %xmm0, %xmm0
713 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
716 ; X64-SSE2-LABEL: test_128_i8_x_16_7_mask_shl_4:
718 ; X64-SSE2-NEXT: psllw $4, %xmm0
719 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
720 ; X64-SSE2-NEXT: retq
722 ; X64-AVX-LABEL: test_128_i8_x_16_7_mask_shl_4:
724 ; X64-AVX-NEXT: vpsllw $4, %xmm0, %xmm0
725 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
727 %t0 = and <16 x i8> %a0, <i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7>
728 %t1 = shl <16 x i8> %t0, <i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4>
731 define <16 x i8> @test_128_i8_x_16_7_mask_shl_5(<16 x i8> %a0) {
732 ; X86-SSE2-LABEL: test_128_i8_x_16_7_mask_shl_5:
734 ; X86-SSE2-NEXT: psllw $5, %xmm0
735 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
736 ; X86-SSE2-NEXT: retl
738 ; X86-AVX-LABEL: test_128_i8_x_16_7_mask_shl_5:
740 ; X86-AVX-NEXT: vpsllw $5, %xmm0, %xmm0
741 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
744 ; X64-SSE2-LABEL: test_128_i8_x_16_7_mask_shl_5:
746 ; X64-SSE2-NEXT: psllw $5, %xmm0
747 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
748 ; X64-SSE2-NEXT: retq
750 ; X64-AVX-LABEL: test_128_i8_x_16_7_mask_shl_5:
752 ; X64-AVX-NEXT: vpsllw $5, %xmm0, %xmm0
753 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
755 %t0 = and <16 x i8> %a0, <i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7>
756 %t1 = shl <16 x i8> %t0, <i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5, i8 5>
759 define <16 x i8> @test_128_i8_x_16_7_mask_shl_6(<16 x i8> %a0) {
760 ; X86-SSE2-LABEL: test_128_i8_x_16_7_mask_shl_6:
762 ; X86-SSE2-NEXT: psllw $6, %xmm0
763 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
764 ; X86-SSE2-NEXT: retl
766 ; X86-AVX-LABEL: test_128_i8_x_16_7_mask_shl_6:
768 ; X86-AVX-NEXT: vpsllw $6, %xmm0, %xmm0
769 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
772 ; X64-SSE2-LABEL: test_128_i8_x_16_7_mask_shl_6:
774 ; X64-SSE2-NEXT: psllw $6, %xmm0
775 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
776 ; X64-SSE2-NEXT: retq
778 ; X64-AVX-LABEL: test_128_i8_x_16_7_mask_shl_6:
780 ; X64-AVX-NEXT: vpsllw $6, %xmm0, %xmm0
781 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
783 %t0 = and <16 x i8> %a0, <i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7>
784 %t1 = shl <16 x i8> %t0, <i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6, i8 6>
788 define <16 x i8> @test_128_i8_x_16_28_mask_shl_1(<16 x i8> %a0) {
789 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_shl_1:
791 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
792 ; X86-SSE2-NEXT: paddb %xmm0, %xmm0
793 ; X86-SSE2-NEXT: retl
795 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_shl_1:
797 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
798 ; X86-AVX-NEXT: vpaddb %xmm0, %xmm0, %xmm0
801 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_shl_1:
803 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
804 ; X64-SSE2-NEXT: paddb %xmm0, %xmm0
805 ; X64-SSE2-NEXT: retq
807 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_shl_1:
809 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
810 ; X64-AVX-NEXT: vpaddb %xmm0, %xmm0, %xmm0
812 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
813 %t1 = shl <16 x i8> %t0, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
816 define <16 x i8> @test_128_i8_x_16_28_mask_shl_2(<16 x i8> %a0) {
817 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_shl_2:
819 ; X86-SSE2-NEXT: psllw $2, %xmm0
820 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
821 ; X86-SSE2-NEXT: retl
823 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_shl_2:
825 ; X86-AVX-NEXT: vpsllw $2, %xmm0, %xmm0
826 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
829 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_shl_2:
831 ; X64-SSE2-NEXT: psllw $2, %xmm0
832 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
833 ; X64-SSE2-NEXT: retq
835 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_shl_2:
837 ; X64-AVX-NEXT: vpsllw $2, %xmm0, %xmm0
838 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
840 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
841 %t1 = shl <16 x i8> %t0, <i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2, i8 2>
844 define <16 x i8> @test_128_i8_x_16_28_mask_shl_3(<16 x i8> %a0) {
845 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_shl_3:
847 ; X86-SSE2-NEXT: psllw $3, %xmm0
848 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
849 ; X86-SSE2-NEXT: retl
851 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_shl_3:
853 ; X86-AVX-NEXT: vpsllw $3, %xmm0, %xmm0
854 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
857 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_shl_3:
859 ; X64-SSE2-NEXT: psllw $3, %xmm0
860 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
861 ; X64-SSE2-NEXT: retq
863 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_shl_3:
865 ; X64-AVX-NEXT: vpsllw $3, %xmm0, %xmm0
866 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
868 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
869 %t1 = shl <16 x i8> %t0, <i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3>
872 define <16 x i8> @test_128_i8_x_16_28_mask_shl_4(<16 x i8> %a0) {
873 ; X86-SSE2-LABEL: test_128_i8_x_16_28_mask_shl_4:
875 ; X86-SSE2-NEXT: psllw $4, %xmm0
876 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
877 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
878 ; X86-SSE2-NEXT: retl
880 ; X86-AVX-LABEL: test_128_i8_x_16_28_mask_shl_4:
882 ; X86-AVX-NEXT: vpsllw $4, %xmm0, %xmm0
883 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
884 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
887 ; X64-SSE2-LABEL: test_128_i8_x_16_28_mask_shl_4:
889 ; X64-SSE2-NEXT: psllw $4, %xmm0
890 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
891 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
892 ; X64-SSE2-NEXT: retq
894 ; X64-AVX-LABEL: test_128_i8_x_16_28_mask_shl_4:
896 ; X64-AVX-NEXT: vpsllw $4, %xmm0, %xmm0
897 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
898 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
900 %t0 = and <16 x i8> %a0, <i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28, i8 28>
901 %t1 = shl <16 x i8> %t0, <i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4>
905 define <16 x i8> @test_128_i8_x_16_224_mask_shl_1(<16 x i8> %a0) {
906 ; X86-SSE2-LABEL: test_128_i8_x_16_224_mask_shl_1:
908 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
909 ; X86-SSE2-NEXT: paddb %xmm0, %xmm0
910 ; X86-SSE2-NEXT: retl
912 ; X86-AVX-LABEL: test_128_i8_x_16_224_mask_shl_1:
914 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
915 ; X86-AVX-NEXT: vpaddb %xmm0, %xmm0, %xmm0
918 ; X64-SSE2-LABEL: test_128_i8_x_16_224_mask_shl_1:
920 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
921 ; X64-SSE2-NEXT: paddb %xmm0, %xmm0
922 ; X64-SSE2-NEXT: retq
924 ; X64-AVX-LABEL: test_128_i8_x_16_224_mask_shl_1:
926 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
927 ; X64-AVX-NEXT: vpaddb %xmm0, %xmm0, %xmm0
929 %t0 = and <16 x i8> %a0, <i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224, i8 224>
930 %t1 = shl <16 x i8> %t0, <i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1, i8 1>
934 ;------------------------------------------------------------------------------;
935 ; 128-bit vector; 16-bit elements = 8 elements
936 ;------------------------------------------------------------------------------;
940 define <8 x i16> @test_128_i16_x_8_127_mask_lshr_1(<8 x i16> %a0) {
941 ; X86-SSE2-LABEL: test_128_i16_x_8_127_mask_lshr_1:
943 ; X86-SSE2-NEXT: psrlw $1, %xmm0
944 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
945 ; X86-SSE2-NEXT: retl
947 ; X86-AVX-LABEL: test_128_i16_x_8_127_mask_lshr_1:
949 ; X86-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
950 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
953 ; X64-SSE2-LABEL: test_128_i16_x_8_127_mask_lshr_1:
955 ; X64-SSE2-NEXT: psrlw $1, %xmm0
956 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
957 ; X64-SSE2-NEXT: retq
959 ; X64-AVX-LABEL: test_128_i16_x_8_127_mask_lshr_1:
961 ; X64-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
962 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
964 %t0 = and <8 x i16> %a0, <i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127>
965 %t1 = lshr <8 x i16> %t0, <i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1>
969 define <8 x i16> @test_128_i16_x_8_2032_mask_lshr_3(<8 x i16> %a0) {
970 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_lshr_3:
972 ; X86-SSE2-NEXT: psrlw $3, %xmm0
973 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
974 ; X86-SSE2-NEXT: retl
976 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_lshr_3:
978 ; X86-AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
979 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
982 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_lshr_3:
984 ; X64-SSE2-NEXT: psrlw $3, %xmm0
985 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
986 ; X64-SSE2-NEXT: retq
988 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_lshr_3:
990 ; X64-AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
991 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
993 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
994 %t1 = lshr <8 x i16> %t0, <i16 3, i16 3, i16 3, i16 3, i16 3, i16 3, i16 3, i16 3>
997 define <8 x i16> @test_128_i16_x_8_2032_mask_lshr_4(<8 x i16> %a0) {
998 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_lshr_4:
1000 ; X86-SSE2-NEXT: psrlw $4, %xmm0
1001 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1002 ; X86-SSE2-NEXT: retl
1004 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_lshr_4:
1006 ; X86-AVX-NEXT: vpsrlw $4, %xmm0, %xmm0
1007 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1008 ; X86-AVX-NEXT: retl
1010 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_lshr_4:
1011 ; X64-SSE2: # %bb.0:
1012 ; X64-SSE2-NEXT: psrlw $4, %xmm0
1013 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1014 ; X64-SSE2-NEXT: retq
1016 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_lshr_4:
1018 ; X64-AVX-NEXT: vpsrlw $4, %xmm0, %xmm0
1019 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1020 ; X64-AVX-NEXT: retq
1021 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1022 %t1 = lshr <8 x i16> %t0, <i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4>
1025 define <8 x i16> @test_128_i16_x_8_2032_mask_lshr_5(<8 x i16> %a0) {
1026 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_lshr_5:
1027 ; X86-SSE2: # %bb.0:
1028 ; X86-SSE2-NEXT: psrlw $5, %xmm0
1029 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1030 ; X86-SSE2-NEXT: retl
1032 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_lshr_5:
1034 ; X86-AVX-NEXT: vpsrlw $5, %xmm0, %xmm0
1035 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1036 ; X86-AVX-NEXT: retl
1038 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_lshr_5:
1039 ; X64-SSE2: # %bb.0:
1040 ; X64-SSE2-NEXT: psrlw $5, %xmm0
1041 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1042 ; X64-SSE2-NEXT: retq
1044 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_lshr_5:
1046 ; X64-AVX-NEXT: vpsrlw $5, %xmm0, %xmm0
1047 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1048 ; X64-AVX-NEXT: retq
1049 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1050 %t1 = lshr <8 x i16> %t0, <i16 5, i16 5, i16 5, i16 5, i16 5, i16 5, i16 5, i16 5>
1053 define <8 x i16> @test_128_i16_x_8_2032_mask_lshr_6(<8 x i16> %a0) {
1054 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_lshr_6:
1055 ; X86-SSE2: # %bb.0:
1056 ; X86-SSE2-NEXT: psrlw $6, %xmm0
1057 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1058 ; X86-SSE2-NEXT: retl
1060 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_lshr_6:
1062 ; X86-AVX-NEXT: vpsrlw $6, %xmm0, %xmm0
1063 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1064 ; X86-AVX-NEXT: retl
1066 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_lshr_6:
1067 ; X64-SSE2: # %bb.0:
1068 ; X64-SSE2-NEXT: psrlw $6, %xmm0
1069 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1070 ; X64-SSE2-NEXT: retq
1072 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_lshr_6:
1074 ; X64-AVX-NEXT: vpsrlw $6, %xmm0, %xmm0
1075 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1076 ; X64-AVX-NEXT: retq
1077 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1078 %t1 = lshr <8 x i16> %t0, <i16 6, i16 6, i16 6, i16 6, i16 6, i16 6, i16 6, i16 6>
1082 define <8 x i16> @test_128_i16_x_8_65024_mask_lshr_1(<8 x i16> %a0) {
1083 ; X86-SSE2-LABEL: test_128_i16_x_8_65024_mask_lshr_1:
1084 ; X86-SSE2: # %bb.0:
1085 ; X86-SSE2-NEXT: psrlw $1, %xmm0
1086 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1087 ; X86-SSE2-NEXT: retl
1089 ; X86-AVX-LABEL: test_128_i16_x_8_65024_mask_lshr_1:
1091 ; X86-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
1092 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1093 ; X86-AVX-NEXT: retl
1095 ; X64-SSE2-LABEL: test_128_i16_x_8_65024_mask_lshr_1:
1096 ; X64-SSE2: # %bb.0:
1097 ; X64-SSE2-NEXT: psrlw $1, %xmm0
1098 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1099 ; X64-SSE2-NEXT: retq
1101 ; X64-AVX-LABEL: test_128_i16_x_8_65024_mask_lshr_1:
1103 ; X64-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
1104 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1105 ; X64-AVX-NEXT: retq
1106 %t0 = and <8 x i16> %a0, <i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024>
1107 %t1 = lshr <8 x i16> %t0, <i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1>
1110 define <8 x i16> @test_128_i16_x_8_65024_mask_lshr_8(<8 x i16> %a0) {
1111 ; X86-SSE2-LABEL: test_128_i16_x_8_65024_mask_lshr_8:
1112 ; X86-SSE2: # %bb.0:
1113 ; X86-SSE2-NEXT: psrlw $8, %xmm0
1114 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1115 ; X86-SSE2-NEXT: retl
1117 ; X86-AVX-LABEL: test_128_i16_x_8_65024_mask_lshr_8:
1119 ; X86-AVX-NEXT: vpsrlw $8, %xmm0, %xmm0
1120 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1121 ; X86-AVX-NEXT: retl
1123 ; X64-SSE2-LABEL: test_128_i16_x_8_65024_mask_lshr_8:
1124 ; X64-SSE2: # %bb.0:
1125 ; X64-SSE2-NEXT: psrlw $8, %xmm0
1126 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1127 ; X64-SSE2-NEXT: retq
1129 ; X64-AVX-LABEL: test_128_i16_x_8_65024_mask_lshr_8:
1131 ; X64-AVX-NEXT: vpsrlw $8, %xmm0, %xmm0
1132 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1133 ; X64-AVX-NEXT: retq
1134 %t0 = and <8 x i16> %a0, <i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024>
1135 %t1 = lshr <8 x i16> %t0, <i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8>
1138 define <8 x i16> @test_128_i16_x_8_65024_mask_lshr_9(<8 x i16> %a0) {
1139 ; X86-SSE2-LABEL: test_128_i16_x_8_65024_mask_lshr_9:
1140 ; X86-SSE2: # %bb.0:
1141 ; X86-SSE2-NEXT: psrlw $9, %xmm0
1142 ; X86-SSE2-NEXT: retl
1144 ; X86-AVX-LABEL: test_128_i16_x_8_65024_mask_lshr_9:
1146 ; X86-AVX-NEXT: vpsrlw $9, %xmm0, %xmm0
1147 ; X86-AVX-NEXT: retl
1149 ; X64-SSE2-LABEL: test_128_i16_x_8_65024_mask_lshr_9:
1150 ; X64-SSE2: # %bb.0:
1151 ; X64-SSE2-NEXT: psrlw $9, %xmm0
1152 ; X64-SSE2-NEXT: retq
1154 ; X64-AVX-LABEL: test_128_i16_x_8_65024_mask_lshr_9:
1156 ; X64-AVX-NEXT: vpsrlw $9, %xmm0, %xmm0
1157 ; X64-AVX-NEXT: retq
1158 %t0 = and <8 x i16> %a0, <i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024>
1159 %t1 = lshr <8 x i16> %t0, <i16 9, i16 9, i16 9, i16 9, i16 9, i16 9, i16 9, i16 9>
1162 define <8 x i16> @test_128_i16_x_8_65024_mask_lshr_10(<8 x i16> %a0) {
1163 ; X86-SSE2-LABEL: test_128_i16_x_8_65024_mask_lshr_10:
1164 ; X86-SSE2: # %bb.0:
1165 ; X86-SSE2-NEXT: psrlw $10, %xmm0
1166 ; X86-SSE2-NEXT: retl
1168 ; X86-AVX-LABEL: test_128_i16_x_8_65024_mask_lshr_10:
1170 ; X86-AVX-NEXT: vpsrlw $10, %xmm0, %xmm0
1171 ; X86-AVX-NEXT: retl
1173 ; X64-SSE2-LABEL: test_128_i16_x_8_65024_mask_lshr_10:
1174 ; X64-SSE2: # %bb.0:
1175 ; X64-SSE2-NEXT: psrlw $10, %xmm0
1176 ; X64-SSE2-NEXT: retq
1178 ; X64-AVX-LABEL: test_128_i16_x_8_65024_mask_lshr_10:
1180 ; X64-AVX-NEXT: vpsrlw $10, %xmm0, %xmm0
1181 ; X64-AVX-NEXT: retq
1182 %t0 = and <8 x i16> %a0, <i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024>
1183 %t1 = lshr <8 x i16> %t0, <i16 10, i16 10, i16 10, i16 10, i16 10, i16 10, i16 10, i16 10>
1189 define <8 x i16> @test_128_i16_x_8_127_mask_ashr_1(<8 x i16> %a0) {
1190 ; X86-SSE2-LABEL: test_128_i16_x_8_127_mask_ashr_1:
1191 ; X86-SSE2: # %bb.0:
1192 ; X86-SSE2-NEXT: psrlw $1, %xmm0
1193 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1194 ; X86-SSE2-NEXT: retl
1196 ; X86-AVX-LABEL: test_128_i16_x_8_127_mask_ashr_1:
1198 ; X86-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
1199 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1200 ; X86-AVX-NEXT: retl
1202 ; X64-SSE2-LABEL: test_128_i16_x_8_127_mask_ashr_1:
1203 ; X64-SSE2: # %bb.0:
1204 ; X64-SSE2-NEXT: psrlw $1, %xmm0
1205 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1206 ; X64-SSE2-NEXT: retq
1208 ; X64-AVX-LABEL: test_128_i16_x_8_127_mask_ashr_1:
1210 ; X64-AVX-NEXT: vpsrlw $1, %xmm0, %xmm0
1211 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1212 ; X64-AVX-NEXT: retq
1213 %t0 = and <8 x i16> %a0, <i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127>
1214 %t1 = ashr <8 x i16> %t0, <i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1>
1218 define <8 x i16> @test_128_i16_x_8_2032_mask_ashr_3(<8 x i16> %a0) {
1219 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_ashr_3:
1220 ; X86-SSE2: # %bb.0:
1221 ; X86-SSE2-NEXT: psrlw $3, %xmm0
1222 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1223 ; X86-SSE2-NEXT: retl
1225 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_ashr_3:
1227 ; X86-AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
1228 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1229 ; X86-AVX-NEXT: retl
1231 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_ashr_3:
1232 ; X64-SSE2: # %bb.0:
1233 ; X64-SSE2-NEXT: psrlw $3, %xmm0
1234 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1235 ; X64-SSE2-NEXT: retq
1237 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_ashr_3:
1239 ; X64-AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
1240 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1241 ; X64-AVX-NEXT: retq
1242 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1243 %t1 = ashr <8 x i16> %t0, <i16 3, i16 3, i16 3, i16 3, i16 3, i16 3, i16 3, i16 3>
1246 define <8 x i16> @test_128_i16_x_8_2032_mask_ashr_4(<8 x i16> %a0) {
1247 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_ashr_4:
1248 ; X86-SSE2: # %bb.0:
1249 ; X86-SSE2-NEXT: psrlw $4, %xmm0
1250 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1251 ; X86-SSE2-NEXT: retl
1253 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_ashr_4:
1255 ; X86-AVX-NEXT: vpsrlw $4, %xmm0, %xmm0
1256 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1257 ; X86-AVX-NEXT: retl
1259 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_ashr_4:
1260 ; X64-SSE2: # %bb.0:
1261 ; X64-SSE2-NEXT: psrlw $4, %xmm0
1262 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1263 ; X64-SSE2-NEXT: retq
1265 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_ashr_4:
1267 ; X64-AVX-NEXT: vpsrlw $4, %xmm0, %xmm0
1268 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1269 ; X64-AVX-NEXT: retq
1270 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1271 %t1 = ashr <8 x i16> %t0, <i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4>
1274 define <8 x i16> @test_128_i16_x_8_2032_mask_ashr_5(<8 x i16> %a0) {
1275 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_ashr_5:
1276 ; X86-SSE2: # %bb.0:
1277 ; X86-SSE2-NEXT: psrlw $5, %xmm0
1278 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1279 ; X86-SSE2-NEXT: retl
1281 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_ashr_5:
1283 ; X86-AVX-NEXT: vpsrlw $5, %xmm0, %xmm0
1284 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1285 ; X86-AVX-NEXT: retl
1287 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_ashr_5:
1288 ; X64-SSE2: # %bb.0:
1289 ; X64-SSE2-NEXT: psrlw $5, %xmm0
1290 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1291 ; X64-SSE2-NEXT: retq
1293 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_ashr_5:
1295 ; X64-AVX-NEXT: vpsrlw $5, %xmm0, %xmm0
1296 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1297 ; X64-AVX-NEXT: retq
1298 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1299 %t1 = ashr <8 x i16> %t0, <i16 5, i16 5, i16 5, i16 5, i16 5, i16 5, i16 5, i16 5>
1302 define <8 x i16> @test_128_i16_x_8_2032_mask_ashr_6(<8 x i16> %a0) {
1303 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_ashr_6:
1304 ; X86-SSE2: # %bb.0:
1305 ; X86-SSE2-NEXT: psrlw $6, %xmm0
1306 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1307 ; X86-SSE2-NEXT: retl
1309 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_ashr_6:
1311 ; X86-AVX-NEXT: vpsrlw $6, %xmm0, %xmm0
1312 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1313 ; X86-AVX-NEXT: retl
1315 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_ashr_6:
1316 ; X64-SSE2: # %bb.0:
1317 ; X64-SSE2-NEXT: psrlw $6, %xmm0
1318 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1319 ; X64-SSE2-NEXT: retq
1321 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_ashr_6:
1323 ; X64-AVX-NEXT: vpsrlw $6, %xmm0, %xmm0
1324 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1325 ; X64-AVX-NEXT: retq
1326 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1327 %t1 = ashr <8 x i16> %t0, <i16 6, i16 6, i16 6, i16 6, i16 6, i16 6, i16 6, i16 6>
1331 define <8 x i16> @test_128_i16_x_8_65024_mask_ashr_1(<8 x i16> %a0) {
1332 ; X86-SSE2-LABEL: test_128_i16_x_8_65024_mask_ashr_1:
1333 ; X86-SSE2: # %bb.0:
1334 ; X86-SSE2-NEXT: psraw $1, %xmm0
1335 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1336 ; X86-SSE2-NEXT: retl
1338 ; X86-AVX-LABEL: test_128_i16_x_8_65024_mask_ashr_1:
1340 ; X86-AVX-NEXT: vpsraw $1, %xmm0, %xmm0
1341 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1342 ; X86-AVX-NEXT: retl
1344 ; X64-SSE2-LABEL: test_128_i16_x_8_65024_mask_ashr_1:
1345 ; X64-SSE2: # %bb.0:
1346 ; X64-SSE2-NEXT: psraw $1, %xmm0
1347 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1348 ; X64-SSE2-NEXT: retq
1350 ; X64-AVX-LABEL: test_128_i16_x_8_65024_mask_ashr_1:
1352 ; X64-AVX-NEXT: vpsraw $1, %xmm0, %xmm0
1353 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1354 ; X64-AVX-NEXT: retq
1355 %t0 = and <8 x i16> %a0, <i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024>
1356 %t1 = ashr <8 x i16> %t0, <i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1>
1359 define <8 x i16> @test_128_i16_x_8_65024_mask_ashr_8(<8 x i16> %a0) {
1360 ; X86-SSE2-LABEL: test_128_i16_x_8_65024_mask_ashr_8:
1361 ; X86-SSE2: # %bb.0:
1362 ; X86-SSE2-NEXT: psraw $8, %xmm0
1363 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1364 ; X86-SSE2-NEXT: retl
1366 ; X86-AVX-LABEL: test_128_i16_x_8_65024_mask_ashr_8:
1368 ; X86-AVX-NEXT: vpsraw $8, %xmm0, %xmm0
1369 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1370 ; X86-AVX-NEXT: retl
1372 ; X64-SSE2-LABEL: test_128_i16_x_8_65024_mask_ashr_8:
1373 ; X64-SSE2: # %bb.0:
1374 ; X64-SSE2-NEXT: psraw $8, %xmm0
1375 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1376 ; X64-SSE2-NEXT: retq
1378 ; X64-AVX-LABEL: test_128_i16_x_8_65024_mask_ashr_8:
1380 ; X64-AVX-NEXT: vpsraw $8, %xmm0, %xmm0
1381 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1382 ; X64-AVX-NEXT: retq
1383 %t0 = and <8 x i16> %a0, <i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024>
1384 %t1 = ashr <8 x i16> %t0, <i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8>
1387 define <8 x i16> @test_128_i16_x_8_65024_mask_ashr_9(<8 x i16> %a0) {
1388 ; X86-SSE2-LABEL: test_128_i16_x_8_65024_mask_ashr_9:
1389 ; X86-SSE2: # %bb.0:
1390 ; X86-SSE2-NEXT: psraw $9, %xmm0
1391 ; X86-SSE2-NEXT: retl
1393 ; X86-AVX-LABEL: test_128_i16_x_8_65024_mask_ashr_9:
1395 ; X86-AVX-NEXT: vpsraw $9, %xmm0, %xmm0
1396 ; X86-AVX-NEXT: retl
1398 ; X64-SSE2-LABEL: test_128_i16_x_8_65024_mask_ashr_9:
1399 ; X64-SSE2: # %bb.0:
1400 ; X64-SSE2-NEXT: psraw $9, %xmm0
1401 ; X64-SSE2-NEXT: retq
1403 ; X64-AVX-LABEL: test_128_i16_x_8_65024_mask_ashr_9:
1405 ; X64-AVX-NEXT: vpsraw $9, %xmm0, %xmm0
1406 ; X64-AVX-NEXT: retq
1407 %t0 = and <8 x i16> %a0, <i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024>
1408 %t1 = ashr <8 x i16> %t0, <i16 9, i16 9, i16 9, i16 9, i16 9, i16 9, i16 9, i16 9>
1411 define <8 x i16> @test_128_i16_x_8_65024_mask_ashr_10(<8 x i16> %a0) {
1412 ; X86-SSE2-LABEL: test_128_i16_x_8_65024_mask_ashr_10:
1413 ; X86-SSE2: # %bb.0:
1414 ; X86-SSE2-NEXT: psraw $10, %xmm0
1415 ; X86-SSE2-NEXT: retl
1417 ; X86-AVX-LABEL: test_128_i16_x_8_65024_mask_ashr_10:
1419 ; X86-AVX-NEXT: vpsraw $10, %xmm0, %xmm0
1420 ; X86-AVX-NEXT: retl
1422 ; X64-SSE2-LABEL: test_128_i16_x_8_65024_mask_ashr_10:
1423 ; X64-SSE2: # %bb.0:
1424 ; X64-SSE2-NEXT: psraw $10, %xmm0
1425 ; X64-SSE2-NEXT: retq
1427 ; X64-AVX-LABEL: test_128_i16_x_8_65024_mask_ashr_10:
1429 ; X64-AVX-NEXT: vpsraw $10, %xmm0, %xmm0
1430 ; X64-AVX-NEXT: retq
1431 %t0 = and <8 x i16> %a0, <i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024>
1432 %t1 = ashr <8 x i16> %t0, <i16 10, i16 10, i16 10, i16 10, i16 10, i16 10, i16 10, i16 10>
1438 define <8 x i16> @test_128_i16_x_8_127_mask_shl_1(<8 x i16> %a0) {
1439 ; X86-SSE2-LABEL: test_128_i16_x_8_127_mask_shl_1:
1440 ; X86-SSE2: # %bb.0:
1441 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1442 ; X86-SSE2-NEXT: paddw %xmm0, %xmm0
1443 ; X86-SSE2-NEXT: retl
1445 ; X86-AVX-LABEL: test_128_i16_x_8_127_mask_shl_1:
1447 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1448 ; X86-AVX-NEXT: vpaddw %xmm0, %xmm0, %xmm0
1449 ; X86-AVX-NEXT: retl
1451 ; X64-SSE2-LABEL: test_128_i16_x_8_127_mask_shl_1:
1452 ; X64-SSE2: # %bb.0:
1453 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1454 ; X64-SSE2-NEXT: paddw %xmm0, %xmm0
1455 ; X64-SSE2-NEXT: retq
1457 ; X64-AVX-LABEL: test_128_i16_x_8_127_mask_shl_1:
1459 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1460 ; X64-AVX-NEXT: vpaddw %xmm0, %xmm0, %xmm0
1461 ; X64-AVX-NEXT: retq
1462 %t0 = and <8 x i16> %a0, <i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127>
1463 %t1 = shl <8 x i16> %t0, <i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1>
1466 define <8 x i16> @test_128_i16_x_8_127_mask_shl_8(<8 x i16> %a0) {
1467 ; X86-SSE2-LABEL: test_128_i16_x_8_127_mask_shl_8:
1468 ; X86-SSE2: # %bb.0:
1469 ; X86-SSE2-NEXT: psllw $8, %xmm0
1470 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1471 ; X86-SSE2-NEXT: retl
1473 ; X86-AVX-LABEL: test_128_i16_x_8_127_mask_shl_8:
1475 ; X86-AVX-NEXT: vpsllw $8, %xmm0, %xmm0
1476 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1477 ; X86-AVX-NEXT: retl
1479 ; X64-SSE2-LABEL: test_128_i16_x_8_127_mask_shl_8:
1480 ; X64-SSE2: # %bb.0:
1481 ; X64-SSE2-NEXT: psllw $8, %xmm0
1482 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1483 ; X64-SSE2-NEXT: retq
1485 ; X64-AVX-LABEL: test_128_i16_x_8_127_mask_shl_8:
1487 ; X64-AVX-NEXT: vpsllw $8, %xmm0, %xmm0
1488 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1489 ; X64-AVX-NEXT: retq
1490 %t0 = and <8 x i16> %a0, <i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127>
1491 %t1 = shl <8 x i16> %t0, <i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8>
1494 define <8 x i16> @test_128_i16_x_8_127_mask_shl_9(<8 x i16> %a0) {
1495 ; X86-SSE2-LABEL: test_128_i16_x_8_127_mask_shl_9:
1496 ; X86-SSE2: # %bb.0:
1497 ; X86-SSE2-NEXT: psllw $9, %xmm0
1498 ; X86-SSE2-NEXT: retl
1500 ; X86-AVX-LABEL: test_128_i16_x_8_127_mask_shl_9:
1502 ; X86-AVX-NEXT: vpsllw $9, %xmm0, %xmm0
1503 ; X86-AVX-NEXT: retl
1505 ; X64-SSE2-LABEL: test_128_i16_x_8_127_mask_shl_9:
1506 ; X64-SSE2: # %bb.0:
1507 ; X64-SSE2-NEXT: psllw $9, %xmm0
1508 ; X64-SSE2-NEXT: retq
1510 ; X64-AVX-LABEL: test_128_i16_x_8_127_mask_shl_9:
1512 ; X64-AVX-NEXT: vpsllw $9, %xmm0, %xmm0
1513 ; X64-AVX-NEXT: retq
1514 %t0 = and <8 x i16> %a0, <i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127>
1515 %t1 = shl <8 x i16> %t0, <i16 9, i16 9, i16 9, i16 9, i16 9, i16 9, i16 9, i16 9>
1518 define <8 x i16> @test_128_i16_x_8_127_mask_shl_10(<8 x i16> %a0) {
1519 ; X86-SSE2-LABEL: test_128_i16_x_8_127_mask_shl_10:
1520 ; X86-SSE2: # %bb.0:
1521 ; X86-SSE2-NEXT: psllw $10, %xmm0
1522 ; X86-SSE2-NEXT: retl
1524 ; X86-AVX-LABEL: test_128_i16_x_8_127_mask_shl_10:
1526 ; X86-AVX-NEXT: vpsllw $10, %xmm0, %xmm0
1527 ; X86-AVX-NEXT: retl
1529 ; X64-SSE2-LABEL: test_128_i16_x_8_127_mask_shl_10:
1530 ; X64-SSE2: # %bb.0:
1531 ; X64-SSE2-NEXT: psllw $10, %xmm0
1532 ; X64-SSE2-NEXT: retq
1534 ; X64-AVX-LABEL: test_128_i16_x_8_127_mask_shl_10:
1536 ; X64-AVX-NEXT: vpsllw $10, %xmm0, %xmm0
1537 ; X64-AVX-NEXT: retq
1538 %t0 = and <8 x i16> %a0, <i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127, i16 127>
1539 %t1 = shl <8 x i16> %t0, <i16 10, i16 10, i16 10, i16 10, i16 10, i16 10, i16 10, i16 10>
1543 define <8 x i16> @test_128_i16_x_8_2032_mask_shl_3(<8 x i16> %a0) {
1544 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_shl_3:
1545 ; X86-SSE2: # %bb.0:
1546 ; X86-SSE2-NEXT: psllw $3, %xmm0
1547 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1548 ; X86-SSE2-NEXT: retl
1550 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_shl_3:
1552 ; X86-AVX-NEXT: vpsllw $3, %xmm0, %xmm0
1553 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1554 ; X86-AVX-NEXT: retl
1556 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_shl_3:
1557 ; X64-SSE2: # %bb.0:
1558 ; X64-SSE2-NEXT: psllw $3, %xmm0
1559 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1560 ; X64-SSE2-NEXT: retq
1562 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_shl_3:
1564 ; X64-AVX-NEXT: vpsllw $3, %xmm0, %xmm0
1565 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1566 ; X64-AVX-NEXT: retq
1567 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1568 %t1 = shl <8 x i16> %t0, <i16 3, i16 3, i16 3, i16 3, i16 3, i16 3, i16 3, i16 3>
1571 define <8 x i16> @test_128_i16_x_8_2032_mask_shl_4(<8 x i16> %a0) {
1572 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_shl_4:
1573 ; X86-SSE2: # %bb.0:
1574 ; X86-SSE2-NEXT: psllw $4, %xmm0
1575 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1576 ; X86-SSE2-NEXT: retl
1578 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_shl_4:
1580 ; X86-AVX-NEXT: vpsllw $4, %xmm0, %xmm0
1581 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1582 ; X86-AVX-NEXT: retl
1584 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_shl_4:
1585 ; X64-SSE2: # %bb.0:
1586 ; X64-SSE2-NEXT: psllw $4, %xmm0
1587 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1588 ; X64-SSE2-NEXT: retq
1590 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_shl_4:
1592 ; X64-AVX-NEXT: vpsllw $4, %xmm0, %xmm0
1593 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1594 ; X64-AVX-NEXT: retq
1595 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1596 %t1 = shl <8 x i16> %t0, <i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4, i16 4>
1599 define <8 x i16> @test_128_i16_x_8_2032_mask_shl_5(<8 x i16> %a0) {
1600 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_shl_5:
1601 ; X86-SSE2: # %bb.0:
1602 ; X86-SSE2-NEXT: psllw $5, %xmm0
1603 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1604 ; X86-SSE2-NEXT: retl
1606 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_shl_5:
1608 ; X86-AVX-NEXT: vpsllw $5, %xmm0, %xmm0
1609 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1610 ; X86-AVX-NEXT: retl
1612 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_shl_5:
1613 ; X64-SSE2: # %bb.0:
1614 ; X64-SSE2-NEXT: psllw $5, %xmm0
1615 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1616 ; X64-SSE2-NEXT: retq
1618 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_shl_5:
1620 ; X64-AVX-NEXT: vpsllw $5, %xmm0, %xmm0
1621 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1622 ; X64-AVX-NEXT: retq
1623 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1624 %t1 = shl <8 x i16> %t0, <i16 5, i16 5, i16 5, i16 5, i16 5, i16 5, i16 5, i16 5>
1627 define <8 x i16> @test_128_i16_x_8_2032_mask_shl_6(<8 x i16> %a0) {
1628 ; X86-SSE2-LABEL: test_128_i16_x_8_2032_mask_shl_6:
1629 ; X86-SSE2: # %bb.0:
1630 ; X86-SSE2-NEXT: psllw $6, %xmm0
1631 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1632 ; X86-SSE2-NEXT: retl
1634 ; X86-AVX-LABEL: test_128_i16_x_8_2032_mask_shl_6:
1636 ; X86-AVX-NEXT: vpsllw $6, %xmm0, %xmm0
1637 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1638 ; X86-AVX-NEXT: retl
1640 ; X64-SSE2-LABEL: test_128_i16_x_8_2032_mask_shl_6:
1641 ; X64-SSE2: # %bb.0:
1642 ; X64-SSE2-NEXT: psllw $6, %xmm0
1643 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1644 ; X64-SSE2-NEXT: retq
1646 ; X64-AVX-LABEL: test_128_i16_x_8_2032_mask_shl_6:
1648 ; X64-AVX-NEXT: vpsllw $6, %xmm0, %xmm0
1649 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1650 ; X64-AVX-NEXT: retq
1651 %t0 = and <8 x i16> %a0, <i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032, i16 2032>
1652 %t1 = shl <8 x i16> %t0, <i16 6, i16 6, i16 6, i16 6, i16 6, i16 6, i16 6, i16 6>
1656 define <8 x i16> @test_128_i16_x_8_65024_mask_shl_1(<8 x i16> %a0) {
1657 ; X86-SSE2-LABEL: test_128_i16_x_8_65024_mask_shl_1:
1658 ; X86-SSE2: # %bb.0:
1659 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1660 ; X86-SSE2-NEXT: paddw %xmm0, %xmm0
1661 ; X86-SSE2-NEXT: retl
1663 ; X86-AVX-LABEL: test_128_i16_x_8_65024_mask_shl_1:
1665 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1666 ; X86-AVX-NEXT: vpaddw %xmm0, %xmm0, %xmm0
1667 ; X86-AVX-NEXT: retl
1669 ; X64-SSE2-LABEL: test_128_i16_x_8_65024_mask_shl_1:
1670 ; X64-SSE2: # %bb.0:
1671 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1672 ; X64-SSE2-NEXT: paddw %xmm0, %xmm0
1673 ; X64-SSE2-NEXT: retq
1675 ; X64-AVX-LABEL: test_128_i16_x_8_65024_mask_shl_1:
1677 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1678 ; X64-AVX-NEXT: vpaddw %xmm0, %xmm0, %xmm0
1679 ; X64-AVX-NEXT: retq
1680 %t0 = and <8 x i16> %a0, <i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024, i16 65024>
1681 %t1 = shl <8 x i16> %t0, <i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1, i16 1>
1685 ;------------------------------------------------------------------------------;
1686 ; 128-bit vector; 32-bit elements = 4 elements
1687 ;------------------------------------------------------------------------------;
1691 define <4 x i32> @test_128_i32_x_4_32767_mask_lshr_1(<4 x i32> %a0) {
1692 ; X86-SSE2-LABEL: test_128_i32_x_4_32767_mask_lshr_1:
1693 ; X86-SSE2: # %bb.0:
1694 ; X86-SSE2-NEXT: psrld $1, %xmm0
1695 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1696 ; X86-SSE2-NEXT: retl
1698 ; X86-AVX1-LABEL: test_128_i32_x_4_32767_mask_lshr_1:
1699 ; X86-AVX1: # %bb.0:
1700 ; X86-AVX1-NEXT: vpsrld $1, %xmm0, %xmm0
1701 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1702 ; X86-AVX1-NEXT: retl
1704 ; X86-AVX2-LABEL: test_128_i32_x_4_32767_mask_lshr_1:
1705 ; X86-AVX2: # %bb.0:
1706 ; X86-AVX2-NEXT: vpsrld $1, %xmm0, %xmm0
1707 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16383,16383,16383,16383]
1708 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1709 ; X86-AVX2-NEXT: retl
1711 ; X64-SSE2-LABEL: test_128_i32_x_4_32767_mask_lshr_1:
1712 ; X64-SSE2: # %bb.0:
1713 ; X64-SSE2-NEXT: psrld $1, %xmm0
1714 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1715 ; X64-SSE2-NEXT: retq
1717 ; X64-AVX1-LABEL: test_128_i32_x_4_32767_mask_lshr_1:
1718 ; X64-AVX1: # %bb.0:
1719 ; X64-AVX1-NEXT: vpsrld $1, %xmm0, %xmm0
1720 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1721 ; X64-AVX1-NEXT: retq
1723 ; X64-AVX2-LABEL: test_128_i32_x_4_32767_mask_lshr_1:
1724 ; X64-AVX2: # %bb.0:
1725 ; X64-AVX2-NEXT: vpsrld $1, %xmm0, %xmm0
1726 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16383,16383,16383,16383]
1727 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1728 ; X64-AVX2-NEXT: retq
1729 %t0 = and <4 x i32> %a0, <i32 32767, i32 32767, i32 32767, i32 32767>
1730 %t1 = lshr <4 x i32> %t0, <i32 1, i32 1, i32 1, i32 1>
1734 define <4 x i32> @test_128_i32_x_4_8388352_mask_lshr_7(<4 x i32> %a0) {
1735 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_lshr_7:
1736 ; X86-SSE2: # %bb.0:
1737 ; X86-SSE2-NEXT: psrld $7, %xmm0
1738 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1739 ; X86-SSE2-NEXT: retl
1741 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_lshr_7:
1742 ; X86-AVX1: # %bb.0:
1743 ; X86-AVX1-NEXT: vpsrld $7, %xmm0, %xmm0
1744 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1745 ; X86-AVX1-NEXT: retl
1747 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_lshr_7:
1748 ; X86-AVX2: # %bb.0:
1749 ; X86-AVX2-NEXT: vpsrld $7, %xmm0, %xmm0
1750 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [65534,65534,65534,65534]
1751 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1752 ; X86-AVX2-NEXT: retl
1754 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_lshr_7:
1755 ; X64-SSE2: # %bb.0:
1756 ; X64-SSE2-NEXT: psrld $7, %xmm0
1757 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1758 ; X64-SSE2-NEXT: retq
1760 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_lshr_7:
1761 ; X64-AVX1: # %bb.0:
1762 ; X64-AVX1-NEXT: vpsrld $7, %xmm0, %xmm0
1763 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1764 ; X64-AVX1-NEXT: retq
1766 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_lshr_7:
1767 ; X64-AVX2: # %bb.0:
1768 ; X64-AVX2-NEXT: vpsrld $7, %xmm0, %xmm0
1769 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [65534,65534,65534,65534]
1770 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1771 ; X64-AVX2-NEXT: retq
1772 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
1773 %t1 = lshr <4 x i32> %t0, <i32 7, i32 7, i32 7, i32 7>
1776 define <4 x i32> @test_128_i32_x_4_8388352_mask_lshr_8(<4 x i32> %a0) {
1777 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_lshr_8:
1778 ; X86-SSE2: # %bb.0:
1779 ; X86-SSE2-NEXT: psrld $8, %xmm0
1780 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1781 ; X86-SSE2-NEXT: retl
1783 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_lshr_8:
1784 ; X86-AVX1: # %bb.0:
1785 ; X86-AVX1-NEXT: vpsrld $8, %xmm0, %xmm0
1786 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1787 ; X86-AVX1-NEXT: retl
1789 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_lshr_8:
1790 ; X86-AVX2: # %bb.0:
1791 ; X86-AVX2-NEXT: vpsrld $8, %xmm0, %xmm0
1792 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [32767,32767,32767,32767]
1793 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1794 ; X86-AVX2-NEXT: retl
1796 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_lshr_8:
1797 ; X64-SSE2: # %bb.0:
1798 ; X64-SSE2-NEXT: psrld $8, %xmm0
1799 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1800 ; X64-SSE2-NEXT: retq
1802 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_lshr_8:
1803 ; X64-AVX1: # %bb.0:
1804 ; X64-AVX1-NEXT: vpsrld $8, %xmm0, %xmm0
1805 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1806 ; X64-AVX1-NEXT: retq
1808 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_lshr_8:
1809 ; X64-AVX2: # %bb.0:
1810 ; X64-AVX2-NEXT: vpsrld $8, %xmm0, %xmm0
1811 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [32767,32767,32767,32767]
1812 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1813 ; X64-AVX2-NEXT: retq
1814 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
1815 %t1 = lshr <4 x i32> %t0, <i32 8, i32 8, i32 8, i32 8>
1818 define <4 x i32> @test_128_i32_x_4_8388352_mask_lshr_9(<4 x i32> %a0) {
1819 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_lshr_9:
1820 ; X86-SSE2: # %bb.0:
1821 ; X86-SSE2-NEXT: psrld $9, %xmm0
1822 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1823 ; X86-SSE2-NEXT: retl
1825 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_lshr_9:
1826 ; X86-AVX1: # %bb.0:
1827 ; X86-AVX1-NEXT: vpsrld $9, %xmm0, %xmm0
1828 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1829 ; X86-AVX1-NEXT: retl
1831 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_lshr_9:
1832 ; X86-AVX2: # %bb.0:
1833 ; X86-AVX2-NEXT: vpsrld $9, %xmm0, %xmm0
1834 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16383,16383,16383,16383]
1835 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1836 ; X86-AVX2-NEXT: retl
1838 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_lshr_9:
1839 ; X64-SSE2: # %bb.0:
1840 ; X64-SSE2-NEXT: psrld $9, %xmm0
1841 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1842 ; X64-SSE2-NEXT: retq
1844 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_lshr_9:
1845 ; X64-AVX1: # %bb.0:
1846 ; X64-AVX1-NEXT: vpsrld $9, %xmm0, %xmm0
1847 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1848 ; X64-AVX1-NEXT: retq
1850 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_lshr_9:
1851 ; X64-AVX2: # %bb.0:
1852 ; X64-AVX2-NEXT: vpsrld $9, %xmm0, %xmm0
1853 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16383,16383,16383,16383]
1854 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1855 ; X64-AVX2-NEXT: retq
1856 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
1857 %t1 = lshr <4 x i32> %t0, <i32 9, i32 9, i32 9, i32 9>
1860 define <4 x i32> @test_128_i32_x_4_8388352_mask_lshr_10(<4 x i32> %a0) {
1861 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_lshr_10:
1862 ; X86-SSE2: # %bb.0:
1863 ; X86-SSE2-NEXT: psrld $10, %xmm0
1864 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1865 ; X86-SSE2-NEXT: retl
1867 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_lshr_10:
1868 ; X86-AVX1: # %bb.0:
1869 ; X86-AVX1-NEXT: vpsrld $10, %xmm0, %xmm0
1870 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1871 ; X86-AVX1-NEXT: retl
1873 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_lshr_10:
1874 ; X86-AVX2: # %bb.0:
1875 ; X86-AVX2-NEXT: vpsrld $10, %xmm0, %xmm0
1876 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8191,8191,8191,8191]
1877 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1878 ; X86-AVX2-NEXT: retl
1880 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_lshr_10:
1881 ; X64-SSE2: # %bb.0:
1882 ; X64-SSE2-NEXT: psrld $10, %xmm0
1883 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1884 ; X64-SSE2-NEXT: retq
1886 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_lshr_10:
1887 ; X64-AVX1: # %bb.0:
1888 ; X64-AVX1-NEXT: vpsrld $10, %xmm0, %xmm0
1889 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1890 ; X64-AVX1-NEXT: retq
1892 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_lshr_10:
1893 ; X64-AVX2: # %bb.0:
1894 ; X64-AVX2-NEXT: vpsrld $10, %xmm0, %xmm0
1895 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8191,8191,8191,8191]
1896 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1897 ; X64-AVX2-NEXT: retq
1898 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
1899 %t1 = lshr <4 x i32> %t0, <i32 10, i32 10, i32 10, i32 10>
1903 define <4 x i32> @test_128_i32_x_4_4294836224_mask_lshr_1(<4 x i32> %a0) {
1904 ; X86-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_1:
1905 ; X86-SSE2: # %bb.0:
1906 ; X86-SSE2-NEXT: psrld $1, %xmm0
1907 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1908 ; X86-SSE2-NEXT: retl
1910 ; X86-AVX1-LABEL: test_128_i32_x_4_4294836224_mask_lshr_1:
1911 ; X86-AVX1: # %bb.0:
1912 ; X86-AVX1-NEXT: vpsrld $1, %xmm0, %xmm0
1913 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1914 ; X86-AVX1-NEXT: retl
1916 ; X86-AVX2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_1:
1917 ; X86-AVX2: # %bb.0:
1918 ; X86-AVX2-NEXT: vpsrld $1, %xmm0, %xmm0
1919 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2147418112,2147418112,2147418112,2147418112]
1920 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1921 ; X86-AVX2-NEXT: retl
1923 ; X64-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_1:
1924 ; X64-SSE2: # %bb.0:
1925 ; X64-SSE2-NEXT: psrld $1, %xmm0
1926 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1927 ; X64-SSE2-NEXT: retq
1929 ; X64-AVX1-LABEL: test_128_i32_x_4_4294836224_mask_lshr_1:
1930 ; X64-AVX1: # %bb.0:
1931 ; X64-AVX1-NEXT: vpsrld $1, %xmm0, %xmm0
1932 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1933 ; X64-AVX1-NEXT: retq
1935 ; X64-AVX2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_1:
1936 ; X64-AVX2: # %bb.0:
1937 ; X64-AVX2-NEXT: vpsrld $1, %xmm0, %xmm0
1938 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2147418112,2147418112,2147418112,2147418112]
1939 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1940 ; X64-AVX2-NEXT: retq
1941 %t0 = and <4 x i32> %a0, <i32 4294836224, i32 4294836224, i32 4294836224, i32 4294836224>
1942 %t1 = lshr <4 x i32> %t0, <i32 1, i32 1, i32 1, i32 1>
1945 define <4 x i32> @test_128_i32_x_4_4294836224_mask_lshr_16(<4 x i32> %a0) {
1946 ; X86-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_16:
1947 ; X86-SSE2: # %bb.0:
1948 ; X86-SSE2-NEXT: psrld $16, %xmm0
1949 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
1950 ; X86-SSE2-NEXT: retl
1952 ; X86-AVX1-LABEL: test_128_i32_x_4_4294836224_mask_lshr_16:
1953 ; X86-AVX1: # %bb.0:
1954 ; X86-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
1955 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1956 ; X86-AVX1-NEXT: retl
1958 ; X86-AVX2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_16:
1959 ; X86-AVX2: # %bb.0:
1960 ; X86-AVX2-NEXT: vpsrld $16, %xmm0, %xmm0
1961 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [65534,65534,65534,65534]
1962 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1963 ; X86-AVX2-NEXT: retl
1965 ; X64-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_16:
1966 ; X64-SSE2: # %bb.0:
1967 ; X64-SSE2-NEXT: psrld $16, %xmm0
1968 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
1969 ; X64-SSE2-NEXT: retq
1971 ; X64-AVX1-LABEL: test_128_i32_x_4_4294836224_mask_lshr_16:
1972 ; X64-AVX1: # %bb.0:
1973 ; X64-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
1974 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1975 ; X64-AVX1-NEXT: retq
1977 ; X64-AVX2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_16:
1978 ; X64-AVX2: # %bb.0:
1979 ; X64-AVX2-NEXT: vpsrld $16, %xmm0, %xmm0
1980 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [65534,65534,65534,65534]
1981 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1982 ; X64-AVX2-NEXT: retq
1983 %t0 = and <4 x i32> %a0, <i32 4294836224, i32 4294836224, i32 4294836224, i32 4294836224>
1984 %t1 = lshr <4 x i32> %t0, <i32 16, i32 16, i32 16, i32 16>
1987 define <4 x i32> @test_128_i32_x_4_4294836224_mask_lshr_17(<4 x i32> %a0) {
1988 ; X86-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_17:
1989 ; X86-SSE2: # %bb.0:
1990 ; X86-SSE2-NEXT: psrld $17, %xmm0
1991 ; X86-SSE2-NEXT: retl
1993 ; X86-AVX-LABEL: test_128_i32_x_4_4294836224_mask_lshr_17:
1995 ; X86-AVX-NEXT: vpsrld $17, %xmm0, %xmm0
1996 ; X86-AVX-NEXT: retl
1998 ; X64-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_17:
1999 ; X64-SSE2: # %bb.0:
2000 ; X64-SSE2-NEXT: psrld $17, %xmm0
2001 ; X64-SSE2-NEXT: retq
2003 ; X64-AVX-LABEL: test_128_i32_x_4_4294836224_mask_lshr_17:
2005 ; X64-AVX-NEXT: vpsrld $17, %xmm0, %xmm0
2006 ; X64-AVX-NEXT: retq
2007 %t0 = and <4 x i32> %a0, <i32 4294836224, i32 4294836224, i32 4294836224, i32 4294836224>
2008 %t1 = lshr <4 x i32> %t0, <i32 17, i32 17, i32 17, i32 17>
2011 define <4 x i32> @test_128_i32_x_4_4294836224_mask_lshr_18(<4 x i32> %a0) {
2012 ; X86-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_18:
2013 ; X86-SSE2: # %bb.0:
2014 ; X86-SSE2-NEXT: psrld $18, %xmm0
2015 ; X86-SSE2-NEXT: retl
2017 ; X86-AVX-LABEL: test_128_i32_x_4_4294836224_mask_lshr_18:
2019 ; X86-AVX-NEXT: vpsrld $18, %xmm0, %xmm0
2020 ; X86-AVX-NEXT: retl
2022 ; X64-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_lshr_18:
2023 ; X64-SSE2: # %bb.0:
2024 ; X64-SSE2-NEXT: psrld $18, %xmm0
2025 ; X64-SSE2-NEXT: retq
2027 ; X64-AVX-LABEL: test_128_i32_x_4_4294836224_mask_lshr_18:
2029 ; X64-AVX-NEXT: vpsrld $18, %xmm0, %xmm0
2030 ; X64-AVX-NEXT: retq
2031 %t0 = and <4 x i32> %a0, <i32 4294836224, i32 4294836224, i32 4294836224, i32 4294836224>
2032 %t1 = lshr <4 x i32> %t0, <i32 18, i32 18, i32 18, i32 18>
2038 define <4 x i32> @test_128_i32_x_4_32767_mask_ashr_1(<4 x i32> %a0) {
2039 ; X86-SSE2-LABEL: test_128_i32_x_4_32767_mask_ashr_1:
2040 ; X86-SSE2: # %bb.0:
2041 ; X86-SSE2-NEXT: psrld $1, %xmm0
2042 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2043 ; X86-SSE2-NEXT: retl
2045 ; X86-AVX1-LABEL: test_128_i32_x_4_32767_mask_ashr_1:
2046 ; X86-AVX1: # %bb.0:
2047 ; X86-AVX1-NEXT: vpsrld $1, %xmm0, %xmm0
2048 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2049 ; X86-AVX1-NEXT: retl
2051 ; X86-AVX2-LABEL: test_128_i32_x_4_32767_mask_ashr_1:
2052 ; X86-AVX2: # %bb.0:
2053 ; X86-AVX2-NEXT: vpsrld $1, %xmm0, %xmm0
2054 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16383,16383,16383,16383]
2055 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2056 ; X86-AVX2-NEXT: retl
2058 ; X64-SSE2-LABEL: test_128_i32_x_4_32767_mask_ashr_1:
2059 ; X64-SSE2: # %bb.0:
2060 ; X64-SSE2-NEXT: psrld $1, %xmm0
2061 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2062 ; X64-SSE2-NEXT: retq
2064 ; X64-AVX1-LABEL: test_128_i32_x_4_32767_mask_ashr_1:
2065 ; X64-AVX1: # %bb.0:
2066 ; X64-AVX1-NEXT: vpsrld $1, %xmm0, %xmm0
2067 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2068 ; X64-AVX1-NEXT: retq
2070 ; X64-AVX2-LABEL: test_128_i32_x_4_32767_mask_ashr_1:
2071 ; X64-AVX2: # %bb.0:
2072 ; X64-AVX2-NEXT: vpsrld $1, %xmm0, %xmm0
2073 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16383,16383,16383,16383]
2074 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2075 ; X64-AVX2-NEXT: retq
2076 %t0 = and <4 x i32> %a0, <i32 32767, i32 32767, i32 32767, i32 32767>
2077 %t1 = ashr <4 x i32> %t0, <i32 1, i32 1, i32 1, i32 1>
2081 define <4 x i32> @test_128_i32_x_4_8388352_mask_ashr_7(<4 x i32> %a0) {
2082 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_ashr_7:
2083 ; X86-SSE2: # %bb.0:
2084 ; X86-SSE2-NEXT: psrld $7, %xmm0
2085 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2086 ; X86-SSE2-NEXT: retl
2088 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_ashr_7:
2089 ; X86-AVX1: # %bb.0:
2090 ; X86-AVX1-NEXT: vpsrld $7, %xmm0, %xmm0
2091 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2092 ; X86-AVX1-NEXT: retl
2094 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_ashr_7:
2095 ; X86-AVX2: # %bb.0:
2096 ; X86-AVX2-NEXT: vpsrld $7, %xmm0, %xmm0
2097 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [65534,65534,65534,65534]
2098 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2099 ; X86-AVX2-NEXT: retl
2101 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_ashr_7:
2102 ; X64-SSE2: # %bb.0:
2103 ; X64-SSE2-NEXT: psrld $7, %xmm0
2104 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2105 ; X64-SSE2-NEXT: retq
2107 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_ashr_7:
2108 ; X64-AVX1: # %bb.0:
2109 ; X64-AVX1-NEXT: vpsrld $7, %xmm0, %xmm0
2110 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2111 ; X64-AVX1-NEXT: retq
2113 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_ashr_7:
2114 ; X64-AVX2: # %bb.0:
2115 ; X64-AVX2-NEXT: vpsrld $7, %xmm0, %xmm0
2116 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [65534,65534,65534,65534]
2117 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2118 ; X64-AVX2-NEXT: retq
2119 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
2120 %t1 = ashr <4 x i32> %t0, <i32 7, i32 7, i32 7, i32 7>
2123 define <4 x i32> @test_128_i32_x_4_8388352_mask_ashr_8(<4 x i32> %a0) {
2124 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_ashr_8:
2125 ; X86-SSE2: # %bb.0:
2126 ; X86-SSE2-NEXT: psrld $8, %xmm0
2127 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2128 ; X86-SSE2-NEXT: retl
2130 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_ashr_8:
2131 ; X86-AVX1: # %bb.0:
2132 ; X86-AVX1-NEXT: vpsrld $8, %xmm0, %xmm0
2133 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2134 ; X86-AVX1-NEXT: retl
2136 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_ashr_8:
2137 ; X86-AVX2: # %bb.0:
2138 ; X86-AVX2-NEXT: vpsrld $8, %xmm0, %xmm0
2139 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [32767,32767,32767,32767]
2140 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2141 ; X86-AVX2-NEXT: retl
2143 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_ashr_8:
2144 ; X64-SSE2: # %bb.0:
2145 ; X64-SSE2-NEXT: psrld $8, %xmm0
2146 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2147 ; X64-SSE2-NEXT: retq
2149 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_ashr_8:
2150 ; X64-AVX1: # %bb.0:
2151 ; X64-AVX1-NEXT: vpsrld $8, %xmm0, %xmm0
2152 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2153 ; X64-AVX1-NEXT: retq
2155 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_ashr_8:
2156 ; X64-AVX2: # %bb.0:
2157 ; X64-AVX2-NEXT: vpsrld $8, %xmm0, %xmm0
2158 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [32767,32767,32767,32767]
2159 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2160 ; X64-AVX2-NEXT: retq
2161 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
2162 %t1 = ashr <4 x i32> %t0, <i32 8, i32 8, i32 8, i32 8>
2165 define <4 x i32> @test_128_i32_x_4_8388352_mask_ashr_9(<4 x i32> %a0) {
2166 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_ashr_9:
2167 ; X86-SSE2: # %bb.0:
2168 ; X86-SSE2-NEXT: psrld $9, %xmm0
2169 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2170 ; X86-SSE2-NEXT: retl
2172 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_ashr_9:
2173 ; X86-AVX1: # %bb.0:
2174 ; X86-AVX1-NEXT: vpsrld $9, %xmm0, %xmm0
2175 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2176 ; X86-AVX1-NEXT: retl
2178 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_ashr_9:
2179 ; X86-AVX2: # %bb.0:
2180 ; X86-AVX2-NEXT: vpsrld $9, %xmm0, %xmm0
2181 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16383,16383,16383,16383]
2182 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2183 ; X86-AVX2-NEXT: retl
2185 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_ashr_9:
2186 ; X64-SSE2: # %bb.0:
2187 ; X64-SSE2-NEXT: psrld $9, %xmm0
2188 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2189 ; X64-SSE2-NEXT: retq
2191 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_ashr_9:
2192 ; X64-AVX1: # %bb.0:
2193 ; X64-AVX1-NEXT: vpsrld $9, %xmm0, %xmm0
2194 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2195 ; X64-AVX1-NEXT: retq
2197 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_ashr_9:
2198 ; X64-AVX2: # %bb.0:
2199 ; X64-AVX2-NEXT: vpsrld $9, %xmm0, %xmm0
2200 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16383,16383,16383,16383]
2201 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2202 ; X64-AVX2-NEXT: retq
2203 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
2204 %t1 = ashr <4 x i32> %t0, <i32 9, i32 9, i32 9, i32 9>
2207 define <4 x i32> @test_128_i32_x_4_8388352_mask_ashr_10(<4 x i32> %a0) {
2208 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_ashr_10:
2209 ; X86-SSE2: # %bb.0:
2210 ; X86-SSE2-NEXT: psrld $10, %xmm0
2211 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2212 ; X86-SSE2-NEXT: retl
2214 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_ashr_10:
2215 ; X86-AVX1: # %bb.0:
2216 ; X86-AVX1-NEXT: vpsrld $10, %xmm0, %xmm0
2217 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2218 ; X86-AVX1-NEXT: retl
2220 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_ashr_10:
2221 ; X86-AVX2: # %bb.0:
2222 ; X86-AVX2-NEXT: vpsrld $10, %xmm0, %xmm0
2223 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8191,8191,8191,8191]
2224 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2225 ; X86-AVX2-NEXT: retl
2227 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_ashr_10:
2228 ; X64-SSE2: # %bb.0:
2229 ; X64-SSE2-NEXT: psrld $10, %xmm0
2230 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2231 ; X64-SSE2-NEXT: retq
2233 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_ashr_10:
2234 ; X64-AVX1: # %bb.0:
2235 ; X64-AVX1-NEXT: vpsrld $10, %xmm0, %xmm0
2236 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2237 ; X64-AVX1-NEXT: retq
2239 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_ashr_10:
2240 ; X64-AVX2: # %bb.0:
2241 ; X64-AVX2-NEXT: vpsrld $10, %xmm0, %xmm0
2242 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8191,8191,8191,8191]
2243 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2244 ; X64-AVX2-NEXT: retq
2245 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
2246 %t1 = ashr <4 x i32> %t0, <i32 10, i32 10, i32 10, i32 10>
2250 define <4 x i32> @test_128_i32_x_4_4294836224_mask_ashr_1(<4 x i32> %a0) {
2251 ; X86-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_ashr_1:
2252 ; X86-SSE2: # %bb.0:
2253 ; X86-SSE2-NEXT: psrad $1, %xmm0
2254 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2255 ; X86-SSE2-NEXT: retl
2257 ; X86-AVX-LABEL: test_128_i32_x_4_4294836224_mask_ashr_1:
2259 ; X86-AVX-NEXT: vpsrad $1, %xmm0, %xmm0
2260 ; X86-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
2261 ; X86-AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1],xmm1[2],xmm0[3],xmm1[4],xmm0[5],xmm1[6],xmm0[7]
2262 ; X86-AVX-NEXT: retl
2264 ; X64-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_ashr_1:
2265 ; X64-SSE2: # %bb.0:
2266 ; X64-SSE2-NEXT: psrad $1, %xmm0
2267 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2268 ; X64-SSE2-NEXT: retq
2270 ; X64-AVX-LABEL: test_128_i32_x_4_4294836224_mask_ashr_1:
2272 ; X64-AVX-NEXT: vpsrad $1, %xmm0, %xmm0
2273 ; X64-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
2274 ; X64-AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1],xmm1[2],xmm0[3],xmm1[4],xmm0[5],xmm1[6],xmm0[7]
2275 ; X64-AVX-NEXT: retq
2276 %t0 = and <4 x i32> %a0, <i32 4294836224, i32 4294836224, i32 4294836224, i32 4294836224>
2277 %t1 = ashr <4 x i32> %t0, <i32 1, i32 1, i32 1, i32 1>
2280 define <4 x i32> @test_128_i32_x_4_4294836224_mask_ashr_16(<4 x i32> %a0) {
2281 ; X86-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_ashr_16:
2282 ; X86-SSE2: # %bb.0:
2283 ; X86-SSE2-NEXT: psrad $16, %xmm0
2284 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2285 ; X86-SSE2-NEXT: retl
2287 ; X86-AVX1-LABEL: test_128_i32_x_4_4294836224_mask_ashr_16:
2288 ; X86-AVX1: # %bb.0:
2289 ; X86-AVX1-NEXT: vpsrad $16, %xmm0, %xmm0
2290 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2291 ; X86-AVX1-NEXT: retl
2293 ; X86-AVX2-LABEL: test_128_i32_x_4_4294836224_mask_ashr_16:
2294 ; X86-AVX2: # %bb.0:
2295 ; X86-AVX2-NEXT: vpsrad $16, %xmm0, %xmm0
2296 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4294967294,4294967294,4294967294,4294967294]
2297 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2298 ; X86-AVX2-NEXT: retl
2300 ; X64-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_ashr_16:
2301 ; X64-SSE2: # %bb.0:
2302 ; X64-SSE2-NEXT: psrad $16, %xmm0
2303 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2304 ; X64-SSE2-NEXT: retq
2306 ; X64-AVX1-LABEL: test_128_i32_x_4_4294836224_mask_ashr_16:
2307 ; X64-AVX1: # %bb.0:
2308 ; X64-AVX1-NEXT: vpsrad $16, %xmm0, %xmm0
2309 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2310 ; X64-AVX1-NEXT: retq
2312 ; X64-AVX2-LABEL: test_128_i32_x_4_4294836224_mask_ashr_16:
2313 ; X64-AVX2: # %bb.0:
2314 ; X64-AVX2-NEXT: vpsrad $16, %xmm0, %xmm0
2315 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4294967294,4294967294,4294967294,4294967294]
2316 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2317 ; X64-AVX2-NEXT: retq
2318 %t0 = and <4 x i32> %a0, <i32 4294836224, i32 4294836224, i32 4294836224, i32 4294836224>
2319 %t1 = ashr <4 x i32> %t0, <i32 16, i32 16, i32 16, i32 16>
2322 define <4 x i32> @test_128_i32_x_4_4294836224_mask_ashr_17(<4 x i32> %a0) {
2323 ; X86-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_ashr_17:
2324 ; X86-SSE2: # %bb.0:
2325 ; X86-SSE2-NEXT: psrad $17, %xmm0
2326 ; X86-SSE2-NEXT: retl
2328 ; X86-AVX-LABEL: test_128_i32_x_4_4294836224_mask_ashr_17:
2330 ; X86-AVX-NEXT: vpsrad $17, %xmm0, %xmm0
2331 ; X86-AVX-NEXT: retl
2333 ; X64-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_ashr_17:
2334 ; X64-SSE2: # %bb.0:
2335 ; X64-SSE2-NEXT: psrad $17, %xmm0
2336 ; X64-SSE2-NEXT: retq
2338 ; X64-AVX-LABEL: test_128_i32_x_4_4294836224_mask_ashr_17:
2340 ; X64-AVX-NEXT: vpsrad $17, %xmm0, %xmm0
2341 ; X64-AVX-NEXT: retq
2342 %t0 = and <4 x i32> %a0, <i32 4294836224, i32 4294836224, i32 4294836224, i32 4294836224>
2343 %t1 = ashr <4 x i32> %t0, <i32 17, i32 17, i32 17, i32 17>
2346 define <4 x i32> @test_128_i32_x_4_4294836224_mask_ashr_18(<4 x i32> %a0) {
2347 ; X86-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_ashr_18:
2348 ; X86-SSE2: # %bb.0:
2349 ; X86-SSE2-NEXT: psrad $18, %xmm0
2350 ; X86-SSE2-NEXT: retl
2352 ; X86-AVX-LABEL: test_128_i32_x_4_4294836224_mask_ashr_18:
2354 ; X86-AVX-NEXT: vpsrad $18, %xmm0, %xmm0
2355 ; X86-AVX-NEXT: retl
2357 ; X64-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_ashr_18:
2358 ; X64-SSE2: # %bb.0:
2359 ; X64-SSE2-NEXT: psrad $18, %xmm0
2360 ; X64-SSE2-NEXT: retq
2362 ; X64-AVX-LABEL: test_128_i32_x_4_4294836224_mask_ashr_18:
2364 ; X64-AVX-NEXT: vpsrad $18, %xmm0, %xmm0
2365 ; X64-AVX-NEXT: retq
2366 %t0 = and <4 x i32> %a0, <i32 4294836224, i32 4294836224, i32 4294836224, i32 4294836224>
2367 %t1 = ashr <4 x i32> %t0, <i32 18, i32 18, i32 18, i32 18>
2373 define <4 x i32> @test_128_i32_x_4_32767_mask_shl_1(<4 x i32> %a0) {
2374 ; X86-SSE2-LABEL: test_128_i32_x_4_32767_mask_shl_1:
2375 ; X86-SSE2: # %bb.0:
2376 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2377 ; X86-SSE2-NEXT: paddd %xmm0, %xmm0
2378 ; X86-SSE2-NEXT: retl
2380 ; X86-AVX1-LABEL: test_128_i32_x_4_32767_mask_shl_1:
2381 ; X86-AVX1: # %bb.0:
2382 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2383 ; X86-AVX1-NEXT: vpaddd %xmm0, %xmm0, %xmm0
2384 ; X86-AVX1-NEXT: retl
2386 ; X86-AVX2-LABEL: test_128_i32_x_4_32767_mask_shl_1:
2387 ; X86-AVX2: # %bb.0:
2388 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [32767,32767,32767,32767]
2389 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2390 ; X86-AVX2-NEXT: vpaddd %xmm0, %xmm0, %xmm0
2391 ; X86-AVX2-NEXT: retl
2393 ; X64-SSE2-LABEL: test_128_i32_x_4_32767_mask_shl_1:
2394 ; X64-SSE2: # %bb.0:
2395 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2396 ; X64-SSE2-NEXT: paddd %xmm0, %xmm0
2397 ; X64-SSE2-NEXT: retq
2399 ; X64-AVX1-LABEL: test_128_i32_x_4_32767_mask_shl_1:
2400 ; X64-AVX1: # %bb.0:
2401 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2402 ; X64-AVX1-NEXT: vpaddd %xmm0, %xmm0, %xmm0
2403 ; X64-AVX1-NEXT: retq
2405 ; X64-AVX2-LABEL: test_128_i32_x_4_32767_mask_shl_1:
2406 ; X64-AVX2: # %bb.0:
2407 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [32767,32767,32767,32767]
2408 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2409 ; X64-AVX2-NEXT: vpaddd %xmm0, %xmm0, %xmm0
2410 ; X64-AVX2-NEXT: retq
2411 %t0 = and <4 x i32> %a0, <i32 32767, i32 32767, i32 32767, i32 32767>
2412 %t1 = shl <4 x i32> %t0, <i32 1, i32 1, i32 1, i32 1>
2415 define <4 x i32> @test_128_i32_x_4_32767_mask_shl_16(<4 x i32> %a0) {
2416 ; X86-SSE2-LABEL: test_128_i32_x_4_32767_mask_shl_16:
2417 ; X86-SSE2: # %bb.0:
2418 ; X86-SSE2-NEXT: pslld $16, %xmm0
2419 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2420 ; X86-SSE2-NEXT: retl
2422 ; X86-AVX1-LABEL: test_128_i32_x_4_32767_mask_shl_16:
2423 ; X86-AVX1: # %bb.0:
2424 ; X86-AVX1-NEXT: vpslld $16, %xmm0, %xmm0
2425 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2426 ; X86-AVX1-NEXT: retl
2428 ; X86-AVX2-LABEL: test_128_i32_x_4_32767_mask_shl_16:
2429 ; X86-AVX2: # %bb.0:
2430 ; X86-AVX2-NEXT: vpslld $16, %xmm0, %xmm0
2431 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2147418112,2147418112,2147418112,2147418112]
2432 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2433 ; X86-AVX2-NEXT: retl
2435 ; X64-SSE2-LABEL: test_128_i32_x_4_32767_mask_shl_16:
2436 ; X64-SSE2: # %bb.0:
2437 ; X64-SSE2-NEXT: pslld $16, %xmm0
2438 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2439 ; X64-SSE2-NEXT: retq
2441 ; X64-AVX1-LABEL: test_128_i32_x_4_32767_mask_shl_16:
2442 ; X64-AVX1: # %bb.0:
2443 ; X64-AVX1-NEXT: vpslld $16, %xmm0, %xmm0
2444 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2445 ; X64-AVX1-NEXT: retq
2447 ; X64-AVX2-LABEL: test_128_i32_x_4_32767_mask_shl_16:
2448 ; X64-AVX2: # %bb.0:
2449 ; X64-AVX2-NEXT: vpslld $16, %xmm0, %xmm0
2450 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2147418112,2147418112,2147418112,2147418112]
2451 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2452 ; X64-AVX2-NEXT: retq
2453 %t0 = and <4 x i32> %a0, <i32 32767, i32 32767, i32 32767, i32 32767>
2454 %t1 = shl <4 x i32> %t0, <i32 16, i32 16, i32 16, i32 16>
2457 define <4 x i32> @test_128_i32_x_4_32767_mask_shl_17(<4 x i32> %a0) {
2458 ; X86-SSE2-LABEL: test_128_i32_x_4_32767_mask_shl_17:
2459 ; X86-SSE2: # %bb.0:
2460 ; X86-SSE2-NEXT: pslld $17, %xmm0
2461 ; X86-SSE2-NEXT: retl
2463 ; X86-AVX-LABEL: test_128_i32_x_4_32767_mask_shl_17:
2465 ; X86-AVX-NEXT: vpslld $17, %xmm0, %xmm0
2466 ; X86-AVX-NEXT: retl
2468 ; X64-SSE2-LABEL: test_128_i32_x_4_32767_mask_shl_17:
2469 ; X64-SSE2: # %bb.0:
2470 ; X64-SSE2-NEXT: pslld $17, %xmm0
2471 ; X64-SSE2-NEXT: retq
2473 ; X64-AVX-LABEL: test_128_i32_x_4_32767_mask_shl_17:
2475 ; X64-AVX-NEXT: vpslld $17, %xmm0, %xmm0
2476 ; X64-AVX-NEXT: retq
2477 %t0 = and <4 x i32> %a0, <i32 32767, i32 32767, i32 32767, i32 32767>
2478 %t1 = shl <4 x i32> %t0, <i32 17, i32 17, i32 17, i32 17>
2481 define <4 x i32> @test_128_i32_x_4_32767_mask_shl_18(<4 x i32> %a0) {
2482 ; X86-SSE2-LABEL: test_128_i32_x_4_32767_mask_shl_18:
2483 ; X86-SSE2: # %bb.0:
2484 ; X86-SSE2-NEXT: pslld $18, %xmm0
2485 ; X86-SSE2-NEXT: retl
2487 ; X86-AVX-LABEL: test_128_i32_x_4_32767_mask_shl_18:
2489 ; X86-AVX-NEXT: vpslld $18, %xmm0, %xmm0
2490 ; X86-AVX-NEXT: retl
2492 ; X64-SSE2-LABEL: test_128_i32_x_4_32767_mask_shl_18:
2493 ; X64-SSE2: # %bb.0:
2494 ; X64-SSE2-NEXT: pslld $18, %xmm0
2495 ; X64-SSE2-NEXT: retq
2497 ; X64-AVX-LABEL: test_128_i32_x_4_32767_mask_shl_18:
2499 ; X64-AVX-NEXT: vpslld $18, %xmm0, %xmm0
2500 ; X64-AVX-NEXT: retq
2501 %t0 = and <4 x i32> %a0, <i32 32767, i32 32767, i32 32767, i32 32767>
2502 %t1 = shl <4 x i32> %t0, <i32 18, i32 18, i32 18, i32 18>
2506 define <4 x i32> @test_128_i32_x_4_8388352_mask_shl_7(<4 x i32> %a0) {
2507 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_shl_7:
2508 ; X86-SSE2: # %bb.0:
2509 ; X86-SSE2-NEXT: pslld $7, %xmm0
2510 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2511 ; X86-SSE2-NEXT: retl
2513 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_shl_7:
2514 ; X86-AVX1: # %bb.0:
2515 ; X86-AVX1-NEXT: vpslld $7, %xmm0, %xmm0
2516 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2517 ; X86-AVX1-NEXT: retl
2519 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_shl_7:
2520 ; X86-AVX2: # %bb.0:
2521 ; X86-AVX2-NEXT: vpslld $7, %xmm0, %xmm0
2522 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [1073709056,1073709056,1073709056,1073709056]
2523 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2524 ; X86-AVX2-NEXT: retl
2526 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_shl_7:
2527 ; X64-SSE2: # %bb.0:
2528 ; X64-SSE2-NEXT: pslld $7, %xmm0
2529 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2530 ; X64-SSE2-NEXT: retq
2532 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_shl_7:
2533 ; X64-AVX1: # %bb.0:
2534 ; X64-AVX1-NEXT: vpslld $7, %xmm0, %xmm0
2535 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2536 ; X64-AVX1-NEXT: retq
2538 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_shl_7:
2539 ; X64-AVX2: # %bb.0:
2540 ; X64-AVX2-NEXT: vpslld $7, %xmm0, %xmm0
2541 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [1073709056,1073709056,1073709056,1073709056]
2542 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2543 ; X64-AVX2-NEXT: retq
2544 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
2545 %t1 = shl <4 x i32> %t0, <i32 7, i32 7, i32 7, i32 7>
2548 define <4 x i32> @test_128_i32_x_4_8388352_mask_shl_8(<4 x i32> %a0) {
2549 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_shl_8:
2550 ; X86-SSE2: # %bb.0:
2551 ; X86-SSE2-NEXT: pslld $8, %xmm0
2552 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2553 ; X86-SSE2-NEXT: retl
2555 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_shl_8:
2556 ; X86-AVX1: # %bb.0:
2557 ; X86-AVX1-NEXT: vpslld $8, %xmm0, %xmm0
2558 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2559 ; X86-AVX1-NEXT: retl
2561 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_shl_8:
2562 ; X86-AVX2: # %bb.0:
2563 ; X86-AVX2-NEXT: vpslld $8, %xmm0, %xmm0
2564 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2147418112,2147418112,2147418112,2147418112]
2565 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2566 ; X86-AVX2-NEXT: retl
2568 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_shl_8:
2569 ; X64-SSE2: # %bb.0:
2570 ; X64-SSE2-NEXT: pslld $8, %xmm0
2571 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2572 ; X64-SSE2-NEXT: retq
2574 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_shl_8:
2575 ; X64-AVX1: # %bb.0:
2576 ; X64-AVX1-NEXT: vpslld $8, %xmm0, %xmm0
2577 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2578 ; X64-AVX1-NEXT: retq
2580 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_shl_8:
2581 ; X64-AVX2: # %bb.0:
2582 ; X64-AVX2-NEXT: vpslld $8, %xmm0, %xmm0
2583 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2147418112,2147418112,2147418112,2147418112]
2584 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2585 ; X64-AVX2-NEXT: retq
2586 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
2587 %t1 = shl <4 x i32> %t0, <i32 8, i32 8, i32 8, i32 8>
2590 define <4 x i32> @test_128_i32_x_4_8388352_mask_shl_9(<4 x i32> %a0) {
2591 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_shl_9:
2592 ; X86-SSE2: # %bb.0:
2593 ; X86-SSE2-NEXT: pslld $9, %xmm0
2594 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2595 ; X86-SSE2-NEXT: retl
2597 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_shl_9:
2598 ; X86-AVX1: # %bb.0:
2599 ; X86-AVX1-NEXT: vpslld $9, %xmm0, %xmm0
2600 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2601 ; X86-AVX1-NEXT: retl
2603 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_shl_9:
2604 ; X86-AVX2: # %bb.0:
2605 ; X86-AVX2-NEXT: vpslld $9, %xmm0, %xmm0
2606 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4294836224,4294836224,4294836224,4294836224]
2607 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2608 ; X86-AVX2-NEXT: retl
2610 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_shl_9:
2611 ; X64-SSE2: # %bb.0:
2612 ; X64-SSE2-NEXT: pslld $9, %xmm0
2613 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2614 ; X64-SSE2-NEXT: retq
2616 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_shl_9:
2617 ; X64-AVX1: # %bb.0:
2618 ; X64-AVX1-NEXT: vpslld $9, %xmm0, %xmm0
2619 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2620 ; X64-AVX1-NEXT: retq
2622 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_shl_9:
2623 ; X64-AVX2: # %bb.0:
2624 ; X64-AVX2-NEXT: vpslld $9, %xmm0, %xmm0
2625 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4294836224,4294836224,4294836224,4294836224]
2626 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2627 ; X64-AVX2-NEXT: retq
2628 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
2629 %t1 = shl <4 x i32> %t0, <i32 9, i32 9, i32 9, i32 9>
2632 define <4 x i32> @test_128_i32_x_4_8388352_mask_shl_10(<4 x i32> %a0) {
2633 ; X86-SSE2-LABEL: test_128_i32_x_4_8388352_mask_shl_10:
2634 ; X86-SSE2: # %bb.0:
2635 ; X86-SSE2-NEXT: pslld $10, %xmm0
2636 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2637 ; X86-SSE2-NEXT: retl
2639 ; X86-AVX1-LABEL: test_128_i32_x_4_8388352_mask_shl_10:
2640 ; X86-AVX1: # %bb.0:
2641 ; X86-AVX1-NEXT: vpslld $10, %xmm0, %xmm0
2642 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2643 ; X86-AVX1-NEXT: retl
2645 ; X86-AVX2-LABEL: test_128_i32_x_4_8388352_mask_shl_10:
2646 ; X86-AVX2: # %bb.0:
2647 ; X86-AVX2-NEXT: vpslld $10, %xmm0, %xmm0
2648 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4294705152,4294705152,4294705152,4294705152]
2649 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2650 ; X86-AVX2-NEXT: retl
2652 ; X64-SSE2-LABEL: test_128_i32_x_4_8388352_mask_shl_10:
2653 ; X64-SSE2: # %bb.0:
2654 ; X64-SSE2-NEXT: pslld $10, %xmm0
2655 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2656 ; X64-SSE2-NEXT: retq
2658 ; X64-AVX1-LABEL: test_128_i32_x_4_8388352_mask_shl_10:
2659 ; X64-AVX1: # %bb.0:
2660 ; X64-AVX1-NEXT: vpslld $10, %xmm0, %xmm0
2661 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2662 ; X64-AVX1-NEXT: retq
2664 ; X64-AVX2-LABEL: test_128_i32_x_4_8388352_mask_shl_10:
2665 ; X64-AVX2: # %bb.0:
2666 ; X64-AVX2-NEXT: vpslld $10, %xmm0, %xmm0
2667 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4294705152,4294705152,4294705152,4294705152]
2668 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2669 ; X64-AVX2-NEXT: retq
2670 %t0 = and <4 x i32> %a0, <i32 8388352, i32 8388352, i32 8388352, i32 8388352>
2671 %t1 = shl <4 x i32> %t0, <i32 10, i32 10, i32 10, i32 10>
2675 define <4 x i32> @test_128_i32_x_4_4294836224_mask_shl_1(<4 x i32> %a0) {
2676 ; X86-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_shl_1:
2677 ; X86-SSE2: # %bb.0:
2678 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2679 ; X86-SSE2-NEXT: paddd %xmm0, %xmm0
2680 ; X86-SSE2-NEXT: retl
2682 ; X86-AVX1-LABEL: test_128_i32_x_4_4294836224_mask_shl_1:
2683 ; X86-AVX1: # %bb.0:
2684 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2685 ; X86-AVX1-NEXT: vpaddd %xmm0, %xmm0, %xmm0
2686 ; X86-AVX1-NEXT: retl
2688 ; X86-AVX2-LABEL: test_128_i32_x_4_4294836224_mask_shl_1:
2689 ; X86-AVX2: # %bb.0:
2690 ; X86-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4294836224,4294836224,4294836224,4294836224]
2691 ; X86-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2692 ; X86-AVX2-NEXT: vpaddd %xmm0, %xmm0, %xmm0
2693 ; X86-AVX2-NEXT: retl
2695 ; X64-SSE2-LABEL: test_128_i32_x_4_4294836224_mask_shl_1:
2696 ; X64-SSE2: # %bb.0:
2697 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2698 ; X64-SSE2-NEXT: paddd %xmm0, %xmm0
2699 ; X64-SSE2-NEXT: retq
2701 ; X64-AVX1-LABEL: test_128_i32_x_4_4294836224_mask_shl_1:
2702 ; X64-AVX1: # %bb.0:
2703 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2704 ; X64-AVX1-NEXT: vpaddd %xmm0, %xmm0, %xmm0
2705 ; X64-AVX1-NEXT: retq
2707 ; X64-AVX2-LABEL: test_128_i32_x_4_4294836224_mask_shl_1:
2708 ; X64-AVX2: # %bb.0:
2709 ; X64-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4294836224,4294836224,4294836224,4294836224]
2710 ; X64-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
2711 ; X64-AVX2-NEXT: vpaddd %xmm0, %xmm0, %xmm0
2712 ; X64-AVX2-NEXT: retq
2713 %t0 = and <4 x i32> %a0, <i32 4294836224, i32 4294836224, i32 4294836224, i32 4294836224>
2714 %t1 = shl <4 x i32> %t0, <i32 1, i32 1, i32 1, i32 1>
2718 ;------------------------------------------------------------------------------;
2719 ; 128-bit vector; 64-bit elements = 2 elements
2720 ;------------------------------------------------------------------------------;
2724 define <2 x i64> @test_128_i64_x_2_2147483647_mask_lshr_1(<2 x i64> %a0) {
2725 ; X86-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_lshr_1:
2726 ; X86-SSE2: # %bb.0:
2727 ; X86-SSE2-NEXT: psrlq $1, %xmm0
2728 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2729 ; X86-SSE2-NEXT: retl
2731 ; X86-AVX-LABEL: test_128_i64_x_2_2147483647_mask_lshr_1:
2733 ; X86-AVX-NEXT: vpsrlq $1, %xmm0, %xmm0
2734 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2735 ; X86-AVX-NEXT: retl
2737 ; X64-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_lshr_1:
2738 ; X64-SSE2: # %bb.0:
2739 ; X64-SSE2-NEXT: psrlq $1, %xmm0
2740 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2741 ; X64-SSE2-NEXT: retq
2743 ; X64-AVX-LABEL: test_128_i64_x_2_2147483647_mask_lshr_1:
2745 ; X64-AVX-NEXT: vpsrlq $1, %xmm0, %xmm0
2746 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2747 ; X64-AVX-NEXT: retq
2748 %t0 = and <2 x i64> %a0, <i64 2147483647, i64 2147483647>
2749 %t1 = lshr <2 x i64> %t0, <i64 1, i64 1>
2753 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_lshr_15(<2 x i64> %a0) {
2754 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_15:
2755 ; X86-SSE2: # %bb.0:
2756 ; X86-SSE2-NEXT: psrlq $15, %xmm0
2757 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2758 ; X86-SSE2-NEXT: retl
2760 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_15:
2762 ; X86-AVX-NEXT: vpsrlq $15, %xmm0, %xmm0
2763 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2764 ; X86-AVX-NEXT: retl
2766 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_15:
2767 ; X64-SSE2: # %bb.0:
2768 ; X64-SSE2-NEXT: psrlq $15, %xmm0
2769 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2770 ; X64-SSE2-NEXT: retq
2772 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_15:
2774 ; X64-AVX-NEXT: vpsrlq $15, %xmm0, %xmm0
2775 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2776 ; X64-AVX-NEXT: retq
2777 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
2778 %t1 = lshr <2 x i64> %t0, <i64 15, i64 15>
2781 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_lshr_16(<2 x i64> %a0) {
2782 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_16:
2783 ; X86-SSE2: # %bb.0:
2784 ; X86-SSE2-NEXT: psrlq $16, %xmm0
2785 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2786 ; X86-SSE2-NEXT: retl
2788 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_16:
2790 ; X86-AVX-NEXT: vpsrlq $16, %xmm0, %xmm0
2791 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2792 ; X86-AVX-NEXT: retl
2794 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_16:
2795 ; X64-SSE2: # %bb.0:
2796 ; X64-SSE2-NEXT: psrlq $16, %xmm0
2797 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2798 ; X64-SSE2-NEXT: retq
2800 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_16:
2802 ; X64-AVX-NEXT: vpsrlq $16, %xmm0, %xmm0
2803 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2804 ; X64-AVX-NEXT: retq
2805 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
2806 %t1 = lshr <2 x i64> %t0, <i64 16, i64 16>
2809 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_lshr_17(<2 x i64> %a0) {
2810 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_17:
2811 ; X86-SSE2: # %bb.0:
2812 ; X86-SSE2-NEXT: psrlq $17, %xmm0
2813 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2814 ; X86-SSE2-NEXT: retl
2816 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_17:
2818 ; X86-AVX-NEXT: vpsrlq $17, %xmm0, %xmm0
2819 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2820 ; X86-AVX-NEXT: retl
2822 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_17:
2823 ; X64-SSE2: # %bb.0:
2824 ; X64-SSE2-NEXT: psrlq $17, %xmm0
2825 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2826 ; X64-SSE2-NEXT: retq
2828 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_17:
2830 ; X64-AVX-NEXT: vpsrlq $17, %xmm0, %xmm0
2831 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2832 ; X64-AVX-NEXT: retq
2833 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
2834 %t1 = lshr <2 x i64> %t0, <i64 17, i64 17>
2837 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_lshr_18(<2 x i64> %a0) {
2838 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_18:
2839 ; X86-SSE2: # %bb.0:
2840 ; X86-SSE2-NEXT: psrlq $18, %xmm0
2841 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2842 ; X86-SSE2-NEXT: retl
2844 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_18:
2846 ; X86-AVX-NEXT: vpsrlq $18, %xmm0, %xmm0
2847 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2848 ; X86-AVX-NEXT: retl
2850 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_18:
2851 ; X64-SSE2: # %bb.0:
2852 ; X64-SSE2-NEXT: psrlq $18, %xmm0
2853 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2854 ; X64-SSE2-NEXT: retq
2856 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_lshr_18:
2858 ; X64-AVX-NEXT: vpsrlq $18, %xmm0, %xmm0
2859 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2860 ; X64-AVX-NEXT: retq
2861 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
2862 %t1 = lshr <2 x i64> %t0, <i64 18, i64 18>
2866 define <2 x i64> @test_128_i64_x_2_18446744065119617024_mask_lshr_1(<2 x i64> %a0) {
2867 ; X86-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_1:
2868 ; X86-SSE2: # %bb.0:
2869 ; X86-SSE2-NEXT: psrlq $1, %xmm0
2870 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2871 ; X86-SSE2-NEXT: retl
2873 ; X86-AVX-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_1:
2875 ; X86-AVX-NEXT: vpsrlq $1, %xmm0, %xmm0
2876 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2877 ; X86-AVX-NEXT: retl
2879 ; X64-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_1:
2880 ; X64-SSE2: # %bb.0:
2881 ; X64-SSE2-NEXT: psrlq $1, %xmm0
2882 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2883 ; X64-SSE2-NEXT: retq
2885 ; X64-AVX-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_1:
2887 ; X64-AVX-NEXT: vpsrlq $1, %xmm0, %xmm0
2888 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2889 ; X64-AVX-NEXT: retq
2890 %t0 = and <2 x i64> %a0, <i64 18446744065119617024, i64 18446744065119617024>
2891 %t1 = lshr <2 x i64> %t0, <i64 1, i64 1>
2894 define <2 x i64> @test_128_i64_x_2_18446744065119617024_mask_lshr_32(<2 x i64> %a0) {
2895 ; X86-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_32:
2896 ; X86-SSE2: # %bb.0:
2897 ; X86-SSE2-NEXT: psrlq $32, %xmm0
2898 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2899 ; X86-SSE2-NEXT: retl
2901 ; X86-AVX-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_32:
2903 ; X86-AVX-NEXT: vpsrlq $32, %xmm0, %xmm0
2904 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2905 ; X86-AVX-NEXT: retl
2907 ; X64-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_32:
2908 ; X64-SSE2: # %bb.0:
2909 ; X64-SSE2-NEXT: psrlq $32, %xmm0
2910 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2911 ; X64-SSE2-NEXT: retq
2913 ; X64-AVX-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_32:
2915 ; X64-AVX-NEXT: vpsrlq $32, %xmm0, %xmm0
2916 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2917 ; X64-AVX-NEXT: retq
2918 %t0 = and <2 x i64> %a0, <i64 18446744065119617024, i64 18446744065119617024>
2919 %t1 = lshr <2 x i64> %t0, <i64 32, i64 32>
2922 define <2 x i64> @test_128_i64_x_2_18446744065119617024_mask_lshr_33(<2 x i64> %a0) {
2923 ; X86-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_33:
2924 ; X86-SSE2: # %bb.0:
2925 ; X86-SSE2-NEXT: psrlq $33, %xmm0
2926 ; X86-SSE2-NEXT: retl
2928 ; X86-AVX-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_33:
2930 ; X86-AVX-NEXT: vpsrlq $33, %xmm0, %xmm0
2931 ; X86-AVX-NEXT: retl
2933 ; X64-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_33:
2934 ; X64-SSE2: # %bb.0:
2935 ; X64-SSE2-NEXT: psrlq $33, %xmm0
2936 ; X64-SSE2-NEXT: retq
2938 ; X64-AVX-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_33:
2940 ; X64-AVX-NEXT: vpsrlq $33, %xmm0, %xmm0
2941 ; X64-AVX-NEXT: retq
2942 %t0 = and <2 x i64> %a0, <i64 18446744065119617024, i64 18446744065119617024>
2943 %t1 = lshr <2 x i64> %t0, <i64 33, i64 33>
2946 define <2 x i64> @test_128_i64_x_2_18446744065119617024_mask_lshr_34(<2 x i64> %a0) {
2947 ; X86-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_34:
2948 ; X86-SSE2: # %bb.0:
2949 ; X86-SSE2-NEXT: psrlq $34, %xmm0
2950 ; X86-SSE2-NEXT: retl
2952 ; X86-AVX-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_34:
2954 ; X86-AVX-NEXT: vpsrlq $34, %xmm0, %xmm0
2955 ; X86-AVX-NEXT: retl
2957 ; X64-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_34:
2958 ; X64-SSE2: # %bb.0:
2959 ; X64-SSE2-NEXT: psrlq $34, %xmm0
2960 ; X64-SSE2-NEXT: retq
2962 ; X64-AVX-LABEL: test_128_i64_x_2_18446744065119617024_mask_lshr_34:
2964 ; X64-AVX-NEXT: vpsrlq $34, %xmm0, %xmm0
2965 ; X64-AVX-NEXT: retq
2966 %t0 = and <2 x i64> %a0, <i64 18446744065119617024, i64 18446744065119617024>
2967 %t1 = lshr <2 x i64> %t0, <i64 34, i64 34>
2973 define <2 x i64> @test_128_i64_x_2_2147483647_mask_ashr_1(<2 x i64> %a0) {
2974 ; X86-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_ashr_1:
2975 ; X86-SSE2: # %bb.0:
2976 ; X86-SSE2-NEXT: psrlq $1, %xmm0
2977 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
2978 ; X86-SSE2-NEXT: retl
2980 ; X86-AVX-LABEL: test_128_i64_x_2_2147483647_mask_ashr_1:
2982 ; X86-AVX-NEXT: vpsrlq $1, %xmm0, %xmm0
2983 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
2984 ; X86-AVX-NEXT: retl
2986 ; X64-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_ashr_1:
2987 ; X64-SSE2: # %bb.0:
2988 ; X64-SSE2-NEXT: psrlq $1, %xmm0
2989 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
2990 ; X64-SSE2-NEXT: retq
2992 ; X64-AVX-LABEL: test_128_i64_x_2_2147483647_mask_ashr_1:
2994 ; X64-AVX-NEXT: vpsrlq $1, %xmm0, %xmm0
2995 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2996 ; X64-AVX-NEXT: retq
2997 %t0 = and <2 x i64> %a0, <i64 2147483647, i64 2147483647>
2998 %t1 = ashr <2 x i64> %t0, <i64 1, i64 1>
3002 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_ashr_15(<2 x i64> %a0) {
3003 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_15:
3004 ; X86-SSE2: # %bb.0:
3005 ; X86-SSE2-NEXT: psrlq $15, %xmm0
3006 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3007 ; X86-SSE2-NEXT: retl
3009 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_15:
3011 ; X86-AVX-NEXT: vpsrlq $15, %xmm0, %xmm0
3012 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3013 ; X86-AVX-NEXT: retl
3015 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_15:
3016 ; X64-SSE2: # %bb.0:
3017 ; X64-SSE2-NEXT: psrlq $15, %xmm0
3018 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3019 ; X64-SSE2-NEXT: retq
3021 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_15:
3023 ; X64-AVX-NEXT: vpsrlq $15, %xmm0, %xmm0
3024 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3025 ; X64-AVX-NEXT: retq
3026 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
3027 %t1 = ashr <2 x i64> %t0, <i64 15, i64 15>
3030 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_ashr_16(<2 x i64> %a0) {
3031 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_16:
3032 ; X86-SSE2: # %bb.0:
3033 ; X86-SSE2-NEXT: psrlq $16, %xmm0
3034 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3035 ; X86-SSE2-NEXT: retl
3037 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_16:
3039 ; X86-AVX-NEXT: vpsrlq $16, %xmm0, %xmm0
3040 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3041 ; X86-AVX-NEXT: retl
3043 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_16:
3044 ; X64-SSE2: # %bb.0:
3045 ; X64-SSE2-NEXT: psrlq $16, %xmm0
3046 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3047 ; X64-SSE2-NEXT: retq
3049 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_16:
3051 ; X64-AVX-NEXT: vpsrlq $16, %xmm0, %xmm0
3052 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3053 ; X64-AVX-NEXT: retq
3054 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
3055 %t1 = ashr <2 x i64> %t0, <i64 16, i64 16>
3058 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_ashr_17(<2 x i64> %a0) {
3059 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_17:
3060 ; X86-SSE2: # %bb.0:
3061 ; X86-SSE2-NEXT: psrlq $17, %xmm0
3062 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3063 ; X86-SSE2-NEXT: retl
3065 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_17:
3067 ; X86-AVX-NEXT: vpsrlq $17, %xmm0, %xmm0
3068 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3069 ; X86-AVX-NEXT: retl
3071 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_17:
3072 ; X64-SSE2: # %bb.0:
3073 ; X64-SSE2-NEXT: psrlq $17, %xmm0
3074 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3075 ; X64-SSE2-NEXT: retq
3077 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_17:
3079 ; X64-AVX-NEXT: vpsrlq $17, %xmm0, %xmm0
3080 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3081 ; X64-AVX-NEXT: retq
3082 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
3083 %t1 = ashr <2 x i64> %t0, <i64 17, i64 17>
3086 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_ashr_18(<2 x i64> %a0) {
3087 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_18:
3088 ; X86-SSE2: # %bb.0:
3089 ; X86-SSE2-NEXT: psrlq $18, %xmm0
3090 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3091 ; X86-SSE2-NEXT: retl
3093 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_18:
3095 ; X86-AVX-NEXT: vpsrlq $18, %xmm0, %xmm0
3096 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3097 ; X86-AVX-NEXT: retl
3099 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_18:
3100 ; X64-SSE2: # %bb.0:
3101 ; X64-SSE2-NEXT: psrlq $18, %xmm0
3102 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3103 ; X64-SSE2-NEXT: retq
3105 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_ashr_18:
3107 ; X64-AVX-NEXT: vpsrlq $18, %xmm0, %xmm0
3108 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3109 ; X64-AVX-NEXT: retq
3110 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
3111 %t1 = ashr <2 x i64> %t0, <i64 18, i64 18>
3115 define <2 x i64> @test_128_i64_x_2_18446744065119617024_mask_ashr_1(<2 x i64> %a0) {
3116 ; X86-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_1:
3117 ; X86-SSE2: # %bb.0:
3118 ; X86-SSE2-NEXT: psrad $1, %xmm0
3119 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3120 ; X86-SSE2-NEXT: retl
3122 ; X86-AVX1-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_1:
3123 ; X86-AVX1: # %bb.0:
3124 ; X86-AVX1-NEXT: vpsrad $1, %xmm0, %xmm0
3125 ; X86-AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
3126 ; X86-AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3],xmm1[4,5],xmm0[6,7]
3127 ; X86-AVX1-NEXT: retl
3129 ; X86-AVX2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_1:
3130 ; X86-AVX2: # %bb.0:
3131 ; X86-AVX2-NEXT: vpsrad $1, %xmm0, %xmm0
3132 ; X86-AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
3133 ; X86-AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0],xmm0[1],xmm1[2],xmm0[3]
3134 ; X86-AVX2-NEXT: retl
3136 ; X64-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_1:
3137 ; X64-SSE2: # %bb.0:
3138 ; X64-SSE2-NEXT: psrad $1, %xmm0
3139 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3140 ; X64-SSE2-NEXT: retq
3142 ; X64-AVX1-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_1:
3143 ; X64-AVX1: # %bb.0:
3144 ; X64-AVX1-NEXT: vpsrad $1, %xmm0, %xmm0
3145 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
3146 ; X64-AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3],xmm1[4,5],xmm0[6,7]
3147 ; X64-AVX1-NEXT: retq
3149 ; X64-AVX2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_1:
3150 ; X64-AVX2: # %bb.0:
3151 ; X64-AVX2-NEXT: vpsrad $1, %xmm0, %xmm0
3152 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
3153 ; X64-AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0],xmm0[1],xmm1[2],xmm0[3]
3154 ; X64-AVX2-NEXT: retq
3155 %t0 = and <2 x i64> %a0, <i64 18446744065119617024, i64 18446744065119617024>
3156 %t1 = ashr <2 x i64> %t0, <i64 1, i64 1>
3159 define <2 x i64> @test_128_i64_x_2_18446744065119617024_mask_ashr_32(<2 x i64> %a0) {
3160 ; X86-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_32:
3161 ; X86-SSE2: # %bb.0:
3162 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3163 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
3164 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
3165 ; X86-SSE2-NEXT: psrad $31, %xmm1
3166 ; X86-SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
3167 ; X86-SSE2-NEXT: retl
3169 ; X86-AVX1-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_32:
3170 ; X86-AVX1: # %bb.0:
3171 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3172 ; X86-AVX1-NEXT: vpsrad $31, %xmm0, %xmm1
3173 ; X86-AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3174 ; X86-AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5],xmm1[6,7]
3175 ; X86-AVX1-NEXT: retl
3177 ; X86-AVX2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_32:
3178 ; X86-AVX2: # %bb.0:
3179 ; X86-AVX2-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3180 ; X86-AVX2-NEXT: vpsrad $31, %xmm0, %xmm1
3181 ; X86-AVX2-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3182 ; X86-AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3]
3183 ; X86-AVX2-NEXT: retl
3185 ; X64-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_32:
3186 ; X64-SSE2: # %bb.0:
3187 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3188 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
3189 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
3190 ; X64-SSE2-NEXT: psrad $31, %xmm1
3191 ; X64-SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
3192 ; X64-SSE2-NEXT: retq
3194 ; X64-AVX1-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_32:
3195 ; X64-AVX1: # %bb.0:
3196 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3197 ; X64-AVX1-NEXT: vpsrad $31, %xmm0, %xmm1
3198 ; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3199 ; X64-AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5],xmm1[6,7]
3200 ; X64-AVX1-NEXT: retq
3202 ; X64-AVX2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_32:
3203 ; X64-AVX2: # %bb.0:
3204 ; X64-AVX2-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3205 ; X64-AVX2-NEXT: vpsrad $31, %xmm0, %xmm1
3206 ; X64-AVX2-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3207 ; X64-AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3]
3208 ; X64-AVX2-NEXT: retq
3209 %t0 = and <2 x i64> %a0, <i64 18446744065119617024, i64 18446744065119617024>
3210 %t1 = ashr <2 x i64> %t0, <i64 32, i64 32>
3213 define <2 x i64> @test_128_i64_x_2_18446744065119617024_mask_ashr_33(<2 x i64> %a0) {
3214 ; X86-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_33:
3215 ; X86-SSE2: # %bb.0:
3216 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
3217 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
3218 ; X86-SSE2-NEXT: psrad $31, %xmm1
3219 ; X86-SSE2-NEXT: psrad $1, %xmm0
3220 ; X86-SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
3221 ; X86-SSE2-NEXT: retl
3223 ; X86-AVX1-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_33:
3224 ; X86-AVX1: # %bb.0:
3225 ; X86-AVX1-NEXT: vpsrad $31, %xmm0, %xmm1
3226 ; X86-AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3227 ; X86-AVX1-NEXT: vpsrad $1, %xmm0, %xmm0
3228 ; X86-AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5],xmm1[6,7]
3229 ; X86-AVX1-NEXT: retl
3231 ; X86-AVX2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_33:
3232 ; X86-AVX2: # %bb.0:
3233 ; X86-AVX2-NEXT: vpsrad $31, %xmm0, %xmm1
3234 ; X86-AVX2-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3235 ; X86-AVX2-NEXT: vpsrad $1, %xmm0, %xmm0
3236 ; X86-AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3]
3237 ; X86-AVX2-NEXT: retl
3239 ; X64-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_33:
3240 ; X64-SSE2: # %bb.0:
3241 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
3242 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
3243 ; X64-SSE2-NEXT: psrad $31, %xmm1
3244 ; X64-SSE2-NEXT: psrad $1, %xmm0
3245 ; X64-SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
3246 ; X64-SSE2-NEXT: retq
3248 ; X64-AVX1-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_33:
3249 ; X64-AVX1: # %bb.0:
3250 ; X64-AVX1-NEXT: vpsrad $31, %xmm0, %xmm1
3251 ; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3252 ; X64-AVX1-NEXT: vpsrad $1, %xmm0, %xmm0
3253 ; X64-AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5],xmm1[6,7]
3254 ; X64-AVX1-NEXT: retq
3256 ; X64-AVX2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_33:
3257 ; X64-AVX2: # %bb.0:
3258 ; X64-AVX2-NEXT: vpsrad $31, %xmm0, %xmm1
3259 ; X64-AVX2-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3260 ; X64-AVX2-NEXT: vpsrad $1, %xmm0, %xmm0
3261 ; X64-AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3]
3262 ; X64-AVX2-NEXT: retq
3263 %t0 = and <2 x i64> %a0, <i64 18446744065119617024, i64 18446744065119617024>
3264 %t1 = ashr <2 x i64> %t0, <i64 33, i64 33>
3267 define <2 x i64> @test_128_i64_x_2_18446744065119617024_mask_ashr_34(<2 x i64> %a0) {
3268 ; X86-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_34:
3269 ; X86-SSE2: # %bb.0:
3270 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
3271 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
3272 ; X86-SSE2-NEXT: psrad $31, %xmm1
3273 ; X86-SSE2-NEXT: psrad $2, %xmm0
3274 ; X86-SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
3275 ; X86-SSE2-NEXT: retl
3277 ; X86-AVX1-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_34:
3278 ; X86-AVX1: # %bb.0:
3279 ; X86-AVX1-NEXT: vpsrad $31, %xmm0, %xmm1
3280 ; X86-AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3281 ; X86-AVX1-NEXT: vpsrad $2, %xmm0, %xmm0
3282 ; X86-AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5],xmm1[6,7]
3283 ; X86-AVX1-NEXT: retl
3285 ; X86-AVX2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_34:
3286 ; X86-AVX2: # %bb.0:
3287 ; X86-AVX2-NEXT: vpsrad $31, %xmm0, %xmm1
3288 ; X86-AVX2-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3289 ; X86-AVX2-NEXT: vpsrad $2, %xmm0, %xmm0
3290 ; X86-AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3]
3291 ; X86-AVX2-NEXT: retl
3293 ; X64-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_34:
3294 ; X64-SSE2: # %bb.0:
3295 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
3296 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
3297 ; X64-SSE2-NEXT: psrad $31, %xmm1
3298 ; X64-SSE2-NEXT: psrad $2, %xmm0
3299 ; X64-SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
3300 ; X64-SSE2-NEXT: retq
3302 ; X64-AVX1-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_34:
3303 ; X64-AVX1: # %bb.0:
3304 ; X64-AVX1-NEXT: vpsrad $31, %xmm0, %xmm1
3305 ; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3306 ; X64-AVX1-NEXT: vpsrad $2, %xmm0, %xmm0
3307 ; X64-AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5],xmm1[6,7]
3308 ; X64-AVX1-NEXT: retq
3310 ; X64-AVX2-LABEL: test_128_i64_x_2_18446744065119617024_mask_ashr_34:
3311 ; X64-AVX2: # %bb.0:
3312 ; X64-AVX2-NEXT: vpsrad $31, %xmm0, %xmm1
3313 ; X64-AVX2-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
3314 ; X64-AVX2-NEXT: vpsrad $2, %xmm0, %xmm0
3315 ; X64-AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3]
3316 ; X64-AVX2-NEXT: retq
3317 %t0 = and <2 x i64> %a0, <i64 18446744065119617024, i64 18446744065119617024>
3318 %t1 = ashr <2 x i64> %t0, <i64 34, i64 34>
3324 define <2 x i64> @test_128_i64_x_2_2147483647_mask_shl_1(<2 x i64> %a0) {
3325 ; X86-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_shl_1:
3326 ; X86-SSE2: # %bb.0:
3327 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3328 ; X86-SSE2-NEXT: paddq %xmm0, %xmm0
3329 ; X86-SSE2-NEXT: retl
3331 ; X86-AVX-LABEL: test_128_i64_x_2_2147483647_mask_shl_1:
3333 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3334 ; X86-AVX-NEXT: vpaddq %xmm0, %xmm0, %xmm0
3335 ; X86-AVX-NEXT: retl
3337 ; X64-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_shl_1:
3338 ; X64-SSE2: # %bb.0:
3339 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3340 ; X64-SSE2-NEXT: paddq %xmm0, %xmm0
3341 ; X64-SSE2-NEXT: retq
3343 ; X64-AVX-LABEL: test_128_i64_x_2_2147483647_mask_shl_1:
3345 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3346 ; X64-AVX-NEXT: vpaddq %xmm0, %xmm0, %xmm0
3347 ; X64-AVX-NEXT: retq
3348 %t0 = and <2 x i64> %a0, <i64 2147483647, i64 2147483647>
3349 %t1 = shl <2 x i64> %t0, <i64 1, i64 1>
3352 define <2 x i64> @test_128_i64_x_2_2147483647_mask_shl_32(<2 x i64> %a0) {
3353 ; X86-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_shl_32:
3354 ; X86-SSE2: # %bb.0:
3355 ; X86-SSE2-NEXT: psllq $32, %xmm0
3356 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3357 ; X86-SSE2-NEXT: retl
3359 ; X86-AVX-LABEL: test_128_i64_x_2_2147483647_mask_shl_32:
3361 ; X86-AVX-NEXT: vpsllq $32, %xmm0, %xmm0
3362 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3363 ; X86-AVX-NEXT: retl
3365 ; X64-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_shl_32:
3366 ; X64-SSE2: # %bb.0:
3367 ; X64-SSE2-NEXT: psllq $32, %xmm0
3368 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3369 ; X64-SSE2-NEXT: retq
3371 ; X64-AVX-LABEL: test_128_i64_x_2_2147483647_mask_shl_32:
3373 ; X64-AVX-NEXT: vpsllq $32, %xmm0, %xmm0
3374 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3375 ; X64-AVX-NEXT: retq
3376 %t0 = and <2 x i64> %a0, <i64 2147483647, i64 2147483647>
3377 %t1 = shl <2 x i64> %t0, <i64 32, i64 32>
3380 define <2 x i64> @test_128_i64_x_2_2147483647_mask_shl_33(<2 x i64> %a0) {
3381 ; X86-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_shl_33:
3382 ; X86-SSE2: # %bb.0:
3383 ; X86-SSE2-NEXT: psllq $33, %xmm0
3384 ; X86-SSE2-NEXT: retl
3386 ; X86-AVX-LABEL: test_128_i64_x_2_2147483647_mask_shl_33:
3388 ; X86-AVX-NEXT: vpsllq $33, %xmm0, %xmm0
3389 ; X86-AVX-NEXT: retl
3391 ; X64-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_shl_33:
3392 ; X64-SSE2: # %bb.0:
3393 ; X64-SSE2-NEXT: psllq $33, %xmm0
3394 ; X64-SSE2-NEXT: retq
3396 ; X64-AVX-LABEL: test_128_i64_x_2_2147483647_mask_shl_33:
3398 ; X64-AVX-NEXT: vpsllq $33, %xmm0, %xmm0
3399 ; X64-AVX-NEXT: retq
3400 %t0 = and <2 x i64> %a0, <i64 2147483647, i64 2147483647>
3401 %t1 = shl <2 x i64> %t0, <i64 33, i64 33>
3404 define <2 x i64> @test_128_i64_x_2_2147483647_mask_shl_34(<2 x i64> %a0) {
3405 ; X86-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_shl_34:
3406 ; X86-SSE2: # %bb.0:
3407 ; X86-SSE2-NEXT: psllq $34, %xmm0
3408 ; X86-SSE2-NEXT: retl
3410 ; X86-AVX-LABEL: test_128_i64_x_2_2147483647_mask_shl_34:
3412 ; X86-AVX-NEXT: vpsllq $34, %xmm0, %xmm0
3413 ; X86-AVX-NEXT: retl
3415 ; X64-SSE2-LABEL: test_128_i64_x_2_2147483647_mask_shl_34:
3416 ; X64-SSE2: # %bb.0:
3417 ; X64-SSE2-NEXT: psllq $34, %xmm0
3418 ; X64-SSE2-NEXT: retq
3420 ; X64-AVX-LABEL: test_128_i64_x_2_2147483647_mask_shl_34:
3422 ; X64-AVX-NEXT: vpsllq $34, %xmm0, %xmm0
3423 ; X64-AVX-NEXT: retq
3424 %t0 = and <2 x i64> %a0, <i64 2147483647, i64 2147483647>
3425 %t1 = shl <2 x i64> %t0, <i64 34, i64 34>
3429 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_shl_15(<2 x i64> %a0) {
3430 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_shl_15:
3431 ; X86-SSE2: # %bb.0:
3432 ; X86-SSE2-NEXT: psllq $15, %xmm0
3433 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3434 ; X86-SSE2-NEXT: retl
3436 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_shl_15:
3438 ; X86-AVX-NEXT: vpsllq $15, %xmm0, %xmm0
3439 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3440 ; X86-AVX-NEXT: retl
3442 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_shl_15:
3443 ; X64-SSE2: # %bb.0:
3444 ; X64-SSE2-NEXT: psllq $15, %xmm0
3445 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3446 ; X64-SSE2-NEXT: retq
3448 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_shl_15:
3450 ; X64-AVX-NEXT: vpsllq $15, %xmm0, %xmm0
3451 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3452 ; X64-AVX-NEXT: retq
3453 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
3454 %t1 = shl <2 x i64> %t0, <i64 15, i64 15>
3457 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_shl_16(<2 x i64> %a0) {
3458 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_shl_16:
3459 ; X86-SSE2: # %bb.0:
3460 ; X86-SSE2-NEXT: psllq $16, %xmm0
3461 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3462 ; X86-SSE2-NEXT: retl
3464 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_shl_16:
3466 ; X86-AVX-NEXT: vpsllq $16, %xmm0, %xmm0
3467 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3468 ; X86-AVX-NEXT: retl
3470 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_shl_16:
3471 ; X64-SSE2: # %bb.0:
3472 ; X64-SSE2-NEXT: psllq $16, %xmm0
3473 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3474 ; X64-SSE2-NEXT: retq
3476 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_shl_16:
3478 ; X64-AVX-NEXT: vpsllq $16, %xmm0, %xmm0
3479 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3480 ; X64-AVX-NEXT: retq
3481 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
3482 %t1 = shl <2 x i64> %t0, <i64 16, i64 16>
3485 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_shl_17(<2 x i64> %a0) {
3486 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_shl_17:
3487 ; X86-SSE2: # %bb.0:
3488 ; X86-SSE2-NEXT: psllq $17, %xmm0
3489 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3490 ; X86-SSE2-NEXT: retl
3492 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_shl_17:
3494 ; X86-AVX-NEXT: vpsllq $17, %xmm0, %xmm0
3495 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3496 ; X86-AVX-NEXT: retl
3498 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_shl_17:
3499 ; X64-SSE2: # %bb.0:
3500 ; X64-SSE2-NEXT: psllq $17, %xmm0
3501 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3502 ; X64-SSE2-NEXT: retq
3504 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_shl_17:
3506 ; X64-AVX-NEXT: vpsllq $17, %xmm0, %xmm0
3507 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3508 ; X64-AVX-NEXT: retq
3509 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
3510 %t1 = shl <2 x i64> %t0, <i64 17, i64 17>
3513 define <2 x i64> @test_128_i64_x_2_140737488289792_mask_shl_18(<2 x i64> %a0) {
3514 ; X86-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_shl_18:
3515 ; X86-SSE2: # %bb.0:
3516 ; X86-SSE2-NEXT: psllq $18, %xmm0
3517 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3518 ; X86-SSE2-NEXT: retl
3520 ; X86-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_shl_18:
3522 ; X86-AVX-NEXT: vpsllq $18, %xmm0, %xmm0
3523 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3524 ; X86-AVX-NEXT: retl
3526 ; X64-SSE2-LABEL: test_128_i64_x_2_140737488289792_mask_shl_18:
3527 ; X64-SSE2: # %bb.0:
3528 ; X64-SSE2-NEXT: psllq $18, %xmm0
3529 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3530 ; X64-SSE2-NEXT: retq
3532 ; X64-AVX-LABEL: test_128_i64_x_2_140737488289792_mask_shl_18:
3534 ; X64-AVX-NEXT: vpsllq $18, %xmm0, %xmm0
3535 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3536 ; X64-AVX-NEXT: retq
3537 %t0 = and <2 x i64> %a0, <i64 140737488289792, i64 140737488289792>
3538 %t1 = shl <2 x i64> %t0, <i64 18, i64 18>
3542 define <2 x i64> @test_128_i64_x_2_18446744065119617024_mask_shl_1(<2 x i64> %a0) {
3543 ; X86-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_shl_1:
3544 ; X86-SSE2: # %bb.0:
3545 ; X86-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
3546 ; X86-SSE2-NEXT: paddq %xmm0, %xmm0
3547 ; X86-SSE2-NEXT: retl
3549 ; X86-AVX-LABEL: test_128_i64_x_2_18446744065119617024_mask_shl_1:
3551 ; X86-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
3552 ; X86-AVX-NEXT: vpaddq %xmm0, %xmm0, %xmm0
3553 ; X86-AVX-NEXT: retl
3555 ; X64-SSE2-LABEL: test_128_i64_x_2_18446744065119617024_mask_shl_1:
3556 ; X64-SSE2: # %bb.0:
3557 ; X64-SSE2-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
3558 ; X64-SSE2-NEXT: paddq %xmm0, %xmm0
3559 ; X64-SSE2-NEXT: retq
3561 ; X64-AVX-LABEL: test_128_i64_x_2_18446744065119617024_mask_shl_1:
3563 ; X64-AVX-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
3564 ; X64-AVX-NEXT: vpaddq %xmm0, %xmm0, %xmm0
3565 ; X64-AVX-NEXT: retq
3566 %t0 = and <2 x i64> %a0, <i64 18446744065119617024, i64 18446744065119617024>
3567 %t1 = shl <2 x i64> %t0, <i64 1, i64 1>