1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=x86_64-unknown-unknown -mattr=+sse2 | FileCheck %s --check-prefix=ALL --check-prefix=SSE --check-prefix=SSE2
3 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=x86_64-unknown-unknown -mattr=+sse4.1 | FileCheck %s --check-prefix=ALL --check-prefix=SSE --check-prefix=SSE41
4 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefix=ALL --check-prefix=AVX --check-prefix=AVX1
5 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefix=ALL --check-prefix=AVX --check-prefix=AVX2
6 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=x86_64-unknown-unknown -mattr=+xop,+avx | FileCheck %s --check-prefix=ALL --check-prefix=XOP --check-prefix=XOPAVX1
7 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=x86_64-unknown-unknown -mattr=+xop,+avx2 | FileCheck %s --check-prefix=ALL --check-prefix=XOP --check-prefix=XOPAVX2
8 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=x86_64-unknown-unknown -mattr=+avx512dq | FileCheck %s --check-prefix=ALL --check-prefix=AVX512 --check-prefix=AVX512DQ
9 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s --check-prefix=ALL --check-prefix=AVX512 --check-prefix=AVX512BW
10 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=x86_64-unknown-unknown -mattr=+avx512dq,+avx512vl | FileCheck %s --check-prefix=ALL --check-prefix=AVX512VL --check-prefix=AVX512DQVL
11 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=x86_64-unknown-unknown -mattr=+avx512bw,+avx512vl | FileCheck %s --check-prefix=ALL --check-prefix=AVX512VL --check-prefix=AVX512BWVL
13 ; Just one 32-bit run to make sure we do reasonable things for i64 shifts.
14 ; RUN: llc < %s -x86-experimental-vector-widening-legalization -mtriple=i686-unknown-unknown -mattr=+sse2 | FileCheck %s --check-prefix=ALL --check-prefix=X32-SSE --check-prefix=X32-SSE2
20 define <2 x i32> @var_shift_v2i32(<2 x i32> %a, <2 x i32> %b) nounwind {
21 ; SSE2-LABEL: var_shift_v2i32:
23 ; SSE2-NEXT: pslld $23, %xmm1
24 ; SSE2-NEXT: paddd {{.*}}(%rip), %xmm1
25 ; SSE2-NEXT: cvttps2dq %xmm1, %xmm1
26 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3]
27 ; SSE2-NEXT: pmuludq %xmm1, %xmm0
28 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
29 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
30 ; SSE2-NEXT: pmuludq %xmm2, %xmm1
31 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
32 ; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
35 ; SSE41-LABEL: var_shift_v2i32:
37 ; SSE41-NEXT: pslld $23, %xmm1
38 ; SSE41-NEXT: paddd {{.*}}(%rip), %xmm1
39 ; SSE41-NEXT: cvttps2dq %xmm1, %xmm1
40 ; SSE41-NEXT: pmulld %xmm1, %xmm0
43 ; AVX1-LABEL: var_shift_v2i32:
45 ; AVX1-NEXT: vpslld $23, %xmm1, %xmm1
46 ; AVX1-NEXT: vpaddd {{.*}}(%rip), %xmm1, %xmm1
47 ; AVX1-NEXT: vcvttps2dq %xmm1, %xmm1
48 ; AVX1-NEXT: vpmulld %xmm1, %xmm0, %xmm0
51 ; AVX2-LABEL: var_shift_v2i32:
53 ; AVX2-NEXT: vpsllvd %xmm1, %xmm0, %xmm0
56 ; XOPAVX1-LABEL: var_shift_v2i32:
58 ; XOPAVX1-NEXT: vpshld %xmm1, %xmm0, %xmm0
61 ; XOPAVX2-LABEL: var_shift_v2i32:
63 ; XOPAVX2-NEXT: vpsllvd %xmm1, %xmm0, %xmm0
66 ; AVX512-LABEL: var_shift_v2i32:
68 ; AVX512-NEXT: vpsllvd %xmm1, %xmm0, %xmm0
71 ; AVX512VL-LABEL: var_shift_v2i32:
73 ; AVX512VL-NEXT: vpsllvd %xmm1, %xmm0, %xmm0
76 ; X32-SSE-LABEL: var_shift_v2i32:
78 ; X32-SSE-NEXT: pslld $23, %xmm1
79 ; X32-SSE-NEXT: paddd {{\.LCPI.*}}, %xmm1
80 ; X32-SSE-NEXT: cvttps2dq %xmm1, %xmm1
81 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3]
82 ; X32-SSE-NEXT: pmuludq %xmm1, %xmm0
83 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
84 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
85 ; X32-SSE-NEXT: pmuludq %xmm2, %xmm1
86 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
87 ; X32-SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
89 %shift = shl <2 x i32> %a, %b
93 define <4 x i16> @var_shift_v4i16(<4 x i16> %a, <4 x i16> %b) nounwind {
94 ; SSE2-LABEL: var_shift_v4i16:
96 ; SSE2-NEXT: pxor %xmm2, %xmm2
97 ; SSE2-NEXT: movdqa %xmm1, %xmm3
98 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
99 ; SSE2-NEXT: pslld $23, %xmm3
100 ; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [1065353216,1065353216,1065353216,1065353216]
101 ; SSE2-NEXT: paddd %xmm4, %xmm3
102 ; SSE2-NEXT: cvttps2dq %xmm3, %xmm3
103 ; SSE2-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[0,2,2,3,4,5,6,7]
104 ; SSE2-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
105 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
106 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
107 ; SSE2-NEXT: pslld $23, %xmm1
108 ; SSE2-NEXT: paddd %xmm4, %xmm1
109 ; SSE2-NEXT: cvttps2dq %xmm1, %xmm1
110 ; SSE2-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
111 ; SSE2-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
112 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
113 ; SSE2-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm3[0]
114 ; SSE2-NEXT: pmullw %xmm1, %xmm0
117 ; SSE41-LABEL: var_shift_v4i16:
119 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
120 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
121 ; SSE41-NEXT: pslld $23, %xmm1
122 ; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [1065353216,1065353216,1065353216,1065353216]
123 ; SSE41-NEXT: paddd %xmm3, %xmm1
124 ; SSE41-NEXT: cvttps2dq %xmm1, %xmm1
125 ; SSE41-NEXT: pslld $23, %xmm2
126 ; SSE41-NEXT: paddd %xmm3, %xmm2
127 ; SSE41-NEXT: cvttps2dq %xmm2, %xmm2
128 ; SSE41-NEXT: packusdw %xmm1, %xmm2
129 ; SSE41-NEXT: pmullw %xmm2, %xmm0
132 ; AVX1-LABEL: var_shift_v4i16:
134 ; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
135 ; AVX1-NEXT: vpslld $23, %xmm2, %xmm2
136 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [1065353216,1065353216,1065353216,1065353216]
137 ; AVX1-NEXT: vpaddd %xmm3, %xmm2, %xmm2
138 ; AVX1-NEXT: vcvttps2dq %xmm2, %xmm2
139 ; AVX1-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
140 ; AVX1-NEXT: vpslld $23, %xmm1, %xmm1
141 ; AVX1-NEXT: vpaddd %xmm3, %xmm1, %xmm1
142 ; AVX1-NEXT: vcvttps2dq %xmm1, %xmm1
143 ; AVX1-NEXT: vpackusdw %xmm2, %xmm1, %xmm1
144 ; AVX1-NEXT: vpmullw %xmm1, %xmm0, %xmm0
147 ; AVX2-LABEL: var_shift_v4i16:
149 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
150 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
151 ; AVX2-NEXT: vpsllvd %ymm1, %ymm0, %ymm0
152 ; AVX2-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15,16,17,20,21,24,25,28,29,24,25,28,29,28,29,30,31]
153 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
154 ; AVX2-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
155 ; AVX2-NEXT: vzeroupper
158 ; XOP-LABEL: var_shift_v4i16:
160 ; XOP-NEXT: vpshlw %xmm1, %xmm0, %xmm0
163 ; AVX512DQ-LABEL: var_shift_v4i16:
165 ; AVX512DQ-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
166 ; AVX512DQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
167 ; AVX512DQ-NEXT: vpsllvd %ymm1, %ymm0, %ymm0
168 ; AVX512DQ-NEXT: vpmovdw %zmm0, %ymm0
169 ; AVX512DQ-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
170 ; AVX512DQ-NEXT: vzeroupper
171 ; AVX512DQ-NEXT: retq
173 ; AVX512BW-LABEL: var_shift_v4i16:
175 ; AVX512BW-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
176 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
177 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
178 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
179 ; AVX512BW-NEXT: vzeroupper
180 ; AVX512BW-NEXT: retq
182 ; AVX512DQVL-LABEL: var_shift_v4i16:
183 ; AVX512DQVL: # %bb.0:
184 ; AVX512DQVL-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
185 ; AVX512DQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
186 ; AVX512DQVL-NEXT: vpsllvd %ymm1, %ymm0, %ymm0
187 ; AVX512DQVL-NEXT: vpmovdw %ymm0, %xmm0
188 ; AVX512DQVL-NEXT: vzeroupper
189 ; AVX512DQVL-NEXT: retq
191 ; AVX512BWVL-LABEL: var_shift_v4i16:
192 ; AVX512BWVL: # %bb.0:
193 ; AVX512BWVL-NEXT: vpsllvw %xmm1, %xmm0, %xmm0
194 ; AVX512BWVL-NEXT: retq
196 ; X32-SSE-LABEL: var_shift_v4i16:
198 ; X32-SSE-NEXT: pxor %xmm2, %xmm2
199 ; X32-SSE-NEXT: movdqa %xmm1, %xmm3
200 ; X32-SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
201 ; X32-SSE-NEXT: pslld $23, %xmm3
202 ; X32-SSE-NEXT: movdqa {{.*#+}} xmm4 = [1065353216,1065353216,1065353216,1065353216]
203 ; X32-SSE-NEXT: paddd %xmm4, %xmm3
204 ; X32-SSE-NEXT: cvttps2dq %xmm3, %xmm3
205 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[0,2,2,3,4,5,6,7]
206 ; X32-SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
207 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
208 ; X32-SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
209 ; X32-SSE-NEXT: pslld $23, %xmm1
210 ; X32-SSE-NEXT: paddd %xmm4, %xmm1
211 ; X32-SSE-NEXT: cvttps2dq %xmm1, %xmm1
212 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
213 ; X32-SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
214 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
215 ; X32-SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm3[0]
216 ; X32-SSE-NEXT: pmullw %xmm1, %xmm0
218 %shift = shl <4 x i16> %a, %b
222 define <2 x i16> @var_shift_v2i16(<2 x i16> %a, <2 x i16> %b) nounwind {
223 ; SSE2-LABEL: var_shift_v2i16:
225 ; SSE2-NEXT: pxor %xmm2, %xmm2
226 ; SSE2-NEXT: movdqa %xmm1, %xmm3
227 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
228 ; SSE2-NEXT: pslld $23, %xmm3
229 ; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [1065353216,1065353216,1065353216,1065353216]
230 ; SSE2-NEXT: paddd %xmm4, %xmm3
231 ; SSE2-NEXT: cvttps2dq %xmm3, %xmm3
232 ; SSE2-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[0,2,2,3,4,5,6,7]
233 ; SSE2-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
234 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
235 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
236 ; SSE2-NEXT: pslld $23, %xmm1
237 ; SSE2-NEXT: paddd %xmm4, %xmm1
238 ; SSE2-NEXT: cvttps2dq %xmm1, %xmm1
239 ; SSE2-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
240 ; SSE2-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
241 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
242 ; SSE2-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm3[0]
243 ; SSE2-NEXT: pmullw %xmm1, %xmm0
246 ; SSE41-LABEL: var_shift_v2i16:
248 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
249 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
250 ; SSE41-NEXT: pslld $23, %xmm1
251 ; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [1065353216,1065353216,1065353216,1065353216]
252 ; SSE41-NEXT: paddd %xmm3, %xmm1
253 ; SSE41-NEXT: cvttps2dq %xmm1, %xmm1
254 ; SSE41-NEXT: pslld $23, %xmm2
255 ; SSE41-NEXT: paddd %xmm3, %xmm2
256 ; SSE41-NEXT: cvttps2dq %xmm2, %xmm2
257 ; SSE41-NEXT: packusdw %xmm1, %xmm2
258 ; SSE41-NEXT: pmullw %xmm2, %xmm0
261 ; AVX1-LABEL: var_shift_v2i16:
263 ; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
264 ; AVX1-NEXT: vpslld $23, %xmm2, %xmm2
265 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [1065353216,1065353216,1065353216,1065353216]
266 ; AVX1-NEXT: vpaddd %xmm3, %xmm2, %xmm2
267 ; AVX1-NEXT: vcvttps2dq %xmm2, %xmm2
268 ; AVX1-NEXT: vpmovzxwd {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
269 ; AVX1-NEXT: vpslld $23, %xmm1, %xmm1
270 ; AVX1-NEXT: vpaddd %xmm3, %xmm1, %xmm1
271 ; AVX1-NEXT: vcvttps2dq %xmm1, %xmm1
272 ; AVX1-NEXT: vpackusdw %xmm2, %xmm1, %xmm1
273 ; AVX1-NEXT: vpmullw %xmm1, %xmm0, %xmm0
276 ; AVX2-LABEL: var_shift_v2i16:
278 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
279 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
280 ; AVX2-NEXT: vpsllvd %ymm1, %ymm0, %ymm0
281 ; AVX2-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15,16,17,20,21,24,25,28,29,24,25,28,29,28,29,30,31]
282 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
283 ; AVX2-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
284 ; AVX2-NEXT: vzeroupper
287 ; XOP-LABEL: var_shift_v2i16:
289 ; XOP-NEXT: vpshlw %xmm1, %xmm0, %xmm0
292 ; AVX512DQ-LABEL: var_shift_v2i16:
294 ; AVX512DQ-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
295 ; AVX512DQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
296 ; AVX512DQ-NEXT: vpsllvd %ymm1, %ymm0, %ymm0
297 ; AVX512DQ-NEXT: vpmovdw %zmm0, %ymm0
298 ; AVX512DQ-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
299 ; AVX512DQ-NEXT: vzeroupper
300 ; AVX512DQ-NEXT: retq
302 ; AVX512BW-LABEL: var_shift_v2i16:
304 ; AVX512BW-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
305 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
306 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
307 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
308 ; AVX512BW-NEXT: vzeroupper
309 ; AVX512BW-NEXT: retq
311 ; AVX512DQVL-LABEL: var_shift_v2i16:
312 ; AVX512DQVL: # %bb.0:
313 ; AVX512DQVL-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
314 ; AVX512DQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
315 ; AVX512DQVL-NEXT: vpsllvd %ymm1, %ymm0, %ymm0
316 ; AVX512DQVL-NEXT: vpmovdw %ymm0, %xmm0
317 ; AVX512DQVL-NEXT: vzeroupper
318 ; AVX512DQVL-NEXT: retq
320 ; AVX512BWVL-LABEL: var_shift_v2i16:
321 ; AVX512BWVL: # %bb.0:
322 ; AVX512BWVL-NEXT: vpsllvw %xmm1, %xmm0, %xmm0
323 ; AVX512BWVL-NEXT: retq
325 ; X32-SSE-LABEL: var_shift_v2i16:
327 ; X32-SSE-NEXT: pxor %xmm2, %xmm2
328 ; X32-SSE-NEXT: movdqa %xmm1, %xmm3
329 ; X32-SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
330 ; X32-SSE-NEXT: pslld $23, %xmm3
331 ; X32-SSE-NEXT: movdqa {{.*#+}} xmm4 = [1065353216,1065353216,1065353216,1065353216]
332 ; X32-SSE-NEXT: paddd %xmm4, %xmm3
333 ; X32-SSE-NEXT: cvttps2dq %xmm3, %xmm3
334 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[0,2,2,3,4,5,6,7]
335 ; X32-SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
336 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
337 ; X32-SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
338 ; X32-SSE-NEXT: pslld $23, %xmm1
339 ; X32-SSE-NEXT: paddd %xmm4, %xmm1
340 ; X32-SSE-NEXT: cvttps2dq %xmm1, %xmm1
341 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
342 ; X32-SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
343 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
344 ; X32-SSE-NEXT: punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm3[0]
345 ; X32-SSE-NEXT: pmullw %xmm1, %xmm0
347 %shift = shl <2 x i16> %a, %b
351 define <8 x i8> @var_shift_v8i8(<8 x i8> %a, <8 x i8> %b) nounwind {
352 ; SSE2-LABEL: var_shift_v8i8:
354 ; SSE2-NEXT: psllw $5, %xmm1
355 ; SSE2-NEXT: pxor %xmm2, %xmm2
356 ; SSE2-NEXT: pxor %xmm3, %xmm3
357 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
358 ; SSE2-NEXT: movdqa %xmm3, %xmm4
359 ; SSE2-NEXT: pandn %xmm0, %xmm4
360 ; SSE2-NEXT: psllw $4, %xmm0
361 ; SSE2-NEXT: pand %xmm3, %xmm0
362 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
363 ; SSE2-NEXT: por %xmm4, %xmm0
364 ; SSE2-NEXT: paddb %xmm1, %xmm1
365 ; SSE2-NEXT: pxor %xmm3, %xmm3
366 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
367 ; SSE2-NEXT: movdqa %xmm3, %xmm4
368 ; SSE2-NEXT: pandn %xmm0, %xmm4
369 ; SSE2-NEXT: psllw $2, %xmm0
370 ; SSE2-NEXT: pand %xmm3, %xmm0
371 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
372 ; SSE2-NEXT: por %xmm4, %xmm0
373 ; SSE2-NEXT: paddb %xmm1, %xmm1
374 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm2
375 ; SSE2-NEXT: movdqa %xmm2, %xmm1
376 ; SSE2-NEXT: pandn %xmm0, %xmm1
377 ; SSE2-NEXT: paddb %xmm0, %xmm0
378 ; SSE2-NEXT: pand %xmm2, %xmm0
379 ; SSE2-NEXT: por %xmm1, %xmm0
382 ; SSE41-LABEL: var_shift_v8i8:
384 ; SSE41-NEXT: movdqa %xmm0, %xmm2
385 ; SSE41-NEXT: psllw $5, %xmm1
386 ; SSE41-NEXT: movdqa %xmm0, %xmm3
387 ; SSE41-NEXT: psllw $4, %xmm3
388 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
389 ; SSE41-NEXT: movdqa %xmm1, %xmm0
390 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
391 ; SSE41-NEXT: movdqa %xmm2, %xmm3
392 ; SSE41-NEXT: psllw $2, %xmm3
393 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
394 ; SSE41-NEXT: paddb %xmm1, %xmm1
395 ; SSE41-NEXT: movdqa %xmm1, %xmm0
396 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
397 ; SSE41-NEXT: movdqa %xmm2, %xmm3
398 ; SSE41-NEXT: paddb %xmm2, %xmm3
399 ; SSE41-NEXT: paddb %xmm1, %xmm1
400 ; SSE41-NEXT: movdqa %xmm1, %xmm0
401 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
402 ; SSE41-NEXT: movdqa %xmm2, %xmm0
405 ; AVX-LABEL: var_shift_v8i8:
407 ; AVX-NEXT: vpsllw $5, %xmm1, %xmm1
408 ; AVX-NEXT: vpsllw $4, %xmm0, %xmm2
409 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
410 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
411 ; AVX-NEXT: vpsllw $2, %xmm0, %xmm2
412 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
413 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
414 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
415 ; AVX-NEXT: vpaddb %xmm0, %xmm0, %xmm2
416 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
417 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
420 ; XOP-LABEL: var_shift_v8i8:
422 ; XOP-NEXT: vpshlb %xmm1, %xmm0, %xmm0
425 ; AVX512DQ-LABEL: var_shift_v8i8:
427 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
428 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
429 ; AVX512DQ-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
430 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
431 ; AVX512DQ-NEXT: vzeroupper
432 ; AVX512DQ-NEXT: retq
434 ; AVX512BW-LABEL: var_shift_v8i8:
436 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
437 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
438 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
439 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
440 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
441 ; AVX512BW-NEXT: vzeroupper
442 ; AVX512BW-NEXT: retq
444 ; AVX512DQVL-LABEL: var_shift_v8i8:
445 ; AVX512DQVL: # %bb.0:
446 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
447 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
448 ; AVX512DQVL-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
449 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
450 ; AVX512DQVL-NEXT: vzeroupper
451 ; AVX512DQVL-NEXT: retq
453 ; AVX512BWVL-LABEL: var_shift_v8i8:
454 ; AVX512BWVL: # %bb.0:
455 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
456 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
457 ; AVX512BWVL-NEXT: vpsllvw %ymm1, %ymm0, %ymm0
458 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
459 ; AVX512BWVL-NEXT: vzeroupper
460 ; AVX512BWVL-NEXT: retq
462 ; X32-SSE-LABEL: var_shift_v8i8:
464 ; X32-SSE-NEXT: psllw $5, %xmm1
465 ; X32-SSE-NEXT: pxor %xmm2, %xmm2
466 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
467 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
468 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
469 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
470 ; X32-SSE-NEXT: psllw $4, %xmm0
471 ; X32-SSE-NEXT: pand %xmm3, %xmm0
472 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
473 ; X32-SSE-NEXT: por %xmm4, %xmm0
474 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
475 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
476 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
477 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
478 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
479 ; X32-SSE-NEXT: psllw $2, %xmm0
480 ; X32-SSE-NEXT: pand %xmm3, %xmm0
481 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
482 ; X32-SSE-NEXT: por %xmm4, %xmm0
483 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
484 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm2
485 ; X32-SSE-NEXT: movdqa %xmm2, %xmm1
486 ; X32-SSE-NEXT: pandn %xmm0, %xmm1
487 ; X32-SSE-NEXT: paddb %xmm0, %xmm0
488 ; X32-SSE-NEXT: pand %xmm2, %xmm0
489 ; X32-SSE-NEXT: por %xmm1, %xmm0
491 %shift = shl <8 x i8> %a, %b
495 define <4 x i8> @var_shift_v4i8(<4 x i8> %a, <4 x i8> %b) nounwind {
496 ; SSE2-LABEL: var_shift_v4i8:
498 ; SSE2-NEXT: psllw $5, %xmm1
499 ; SSE2-NEXT: pxor %xmm2, %xmm2
500 ; SSE2-NEXT: pxor %xmm3, %xmm3
501 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
502 ; SSE2-NEXT: movdqa %xmm3, %xmm4
503 ; SSE2-NEXT: pandn %xmm0, %xmm4
504 ; SSE2-NEXT: psllw $4, %xmm0
505 ; SSE2-NEXT: pand %xmm3, %xmm0
506 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
507 ; SSE2-NEXT: por %xmm4, %xmm0
508 ; SSE2-NEXT: paddb %xmm1, %xmm1
509 ; SSE2-NEXT: pxor %xmm3, %xmm3
510 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
511 ; SSE2-NEXT: movdqa %xmm3, %xmm4
512 ; SSE2-NEXT: pandn %xmm0, %xmm4
513 ; SSE2-NEXT: psllw $2, %xmm0
514 ; SSE2-NEXT: pand %xmm3, %xmm0
515 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
516 ; SSE2-NEXT: por %xmm4, %xmm0
517 ; SSE2-NEXT: paddb %xmm1, %xmm1
518 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm2
519 ; SSE2-NEXT: movdqa %xmm2, %xmm1
520 ; SSE2-NEXT: pandn %xmm0, %xmm1
521 ; SSE2-NEXT: paddb %xmm0, %xmm0
522 ; SSE2-NEXT: pand %xmm2, %xmm0
523 ; SSE2-NEXT: por %xmm1, %xmm0
526 ; SSE41-LABEL: var_shift_v4i8:
528 ; SSE41-NEXT: movdqa %xmm0, %xmm2
529 ; SSE41-NEXT: psllw $5, %xmm1
530 ; SSE41-NEXT: movdqa %xmm0, %xmm3
531 ; SSE41-NEXT: psllw $4, %xmm3
532 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
533 ; SSE41-NEXT: movdqa %xmm1, %xmm0
534 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
535 ; SSE41-NEXT: movdqa %xmm2, %xmm3
536 ; SSE41-NEXT: psllw $2, %xmm3
537 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
538 ; SSE41-NEXT: paddb %xmm1, %xmm1
539 ; SSE41-NEXT: movdqa %xmm1, %xmm0
540 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
541 ; SSE41-NEXT: movdqa %xmm2, %xmm3
542 ; SSE41-NEXT: paddb %xmm2, %xmm3
543 ; SSE41-NEXT: paddb %xmm1, %xmm1
544 ; SSE41-NEXT: movdqa %xmm1, %xmm0
545 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
546 ; SSE41-NEXT: movdqa %xmm2, %xmm0
549 ; AVX-LABEL: var_shift_v4i8:
551 ; AVX-NEXT: vpsllw $5, %xmm1, %xmm1
552 ; AVX-NEXT: vpsllw $4, %xmm0, %xmm2
553 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
554 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
555 ; AVX-NEXT: vpsllw $2, %xmm0, %xmm2
556 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
557 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
558 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
559 ; AVX-NEXT: vpaddb %xmm0, %xmm0, %xmm2
560 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
561 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
564 ; XOP-LABEL: var_shift_v4i8:
566 ; XOP-NEXT: vpshlb %xmm1, %xmm0, %xmm0
569 ; AVX512DQ-LABEL: var_shift_v4i8:
571 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
572 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
573 ; AVX512DQ-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
574 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
575 ; AVX512DQ-NEXT: vzeroupper
576 ; AVX512DQ-NEXT: retq
578 ; AVX512BW-LABEL: var_shift_v4i8:
580 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
581 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
582 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
583 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
584 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
585 ; AVX512BW-NEXT: vzeroupper
586 ; AVX512BW-NEXT: retq
588 ; AVX512DQVL-LABEL: var_shift_v4i8:
589 ; AVX512DQVL: # %bb.0:
590 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
591 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
592 ; AVX512DQVL-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
593 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
594 ; AVX512DQVL-NEXT: vzeroupper
595 ; AVX512DQVL-NEXT: retq
597 ; AVX512BWVL-LABEL: var_shift_v4i8:
598 ; AVX512BWVL: # %bb.0:
599 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
600 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
601 ; AVX512BWVL-NEXT: vpsllvw %ymm1, %ymm0, %ymm0
602 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
603 ; AVX512BWVL-NEXT: vzeroupper
604 ; AVX512BWVL-NEXT: retq
606 ; X32-SSE-LABEL: var_shift_v4i8:
608 ; X32-SSE-NEXT: psllw $5, %xmm1
609 ; X32-SSE-NEXT: pxor %xmm2, %xmm2
610 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
611 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
612 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
613 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
614 ; X32-SSE-NEXT: psllw $4, %xmm0
615 ; X32-SSE-NEXT: pand %xmm3, %xmm0
616 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
617 ; X32-SSE-NEXT: por %xmm4, %xmm0
618 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
619 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
620 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
621 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
622 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
623 ; X32-SSE-NEXT: psllw $2, %xmm0
624 ; X32-SSE-NEXT: pand %xmm3, %xmm0
625 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
626 ; X32-SSE-NEXT: por %xmm4, %xmm0
627 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
628 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm2
629 ; X32-SSE-NEXT: movdqa %xmm2, %xmm1
630 ; X32-SSE-NEXT: pandn %xmm0, %xmm1
631 ; X32-SSE-NEXT: paddb %xmm0, %xmm0
632 ; X32-SSE-NEXT: pand %xmm2, %xmm0
633 ; X32-SSE-NEXT: por %xmm1, %xmm0
635 %shift = shl <4 x i8> %a, %b
639 define <2 x i8> @var_shift_v2i8(<2 x i8> %a, <2 x i8> %b) nounwind {
640 ; SSE2-LABEL: var_shift_v2i8:
642 ; SSE2-NEXT: psllw $5, %xmm1
643 ; SSE2-NEXT: pxor %xmm2, %xmm2
644 ; SSE2-NEXT: pxor %xmm3, %xmm3
645 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
646 ; SSE2-NEXT: movdqa %xmm3, %xmm4
647 ; SSE2-NEXT: pandn %xmm0, %xmm4
648 ; SSE2-NEXT: psllw $4, %xmm0
649 ; SSE2-NEXT: pand %xmm3, %xmm0
650 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
651 ; SSE2-NEXT: por %xmm4, %xmm0
652 ; SSE2-NEXT: paddb %xmm1, %xmm1
653 ; SSE2-NEXT: pxor %xmm3, %xmm3
654 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
655 ; SSE2-NEXT: movdqa %xmm3, %xmm4
656 ; SSE2-NEXT: pandn %xmm0, %xmm4
657 ; SSE2-NEXT: psllw $2, %xmm0
658 ; SSE2-NEXT: pand %xmm3, %xmm0
659 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
660 ; SSE2-NEXT: por %xmm4, %xmm0
661 ; SSE2-NEXT: paddb %xmm1, %xmm1
662 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm2
663 ; SSE2-NEXT: movdqa %xmm2, %xmm1
664 ; SSE2-NEXT: pandn %xmm0, %xmm1
665 ; SSE2-NEXT: paddb %xmm0, %xmm0
666 ; SSE2-NEXT: pand %xmm2, %xmm0
667 ; SSE2-NEXT: por %xmm1, %xmm0
670 ; SSE41-LABEL: var_shift_v2i8:
672 ; SSE41-NEXT: movdqa %xmm0, %xmm2
673 ; SSE41-NEXT: psllw $5, %xmm1
674 ; SSE41-NEXT: movdqa %xmm0, %xmm3
675 ; SSE41-NEXT: psllw $4, %xmm3
676 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
677 ; SSE41-NEXT: movdqa %xmm1, %xmm0
678 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
679 ; SSE41-NEXT: movdqa %xmm2, %xmm3
680 ; SSE41-NEXT: psllw $2, %xmm3
681 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
682 ; SSE41-NEXT: paddb %xmm1, %xmm1
683 ; SSE41-NEXT: movdqa %xmm1, %xmm0
684 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
685 ; SSE41-NEXT: movdqa %xmm2, %xmm3
686 ; SSE41-NEXT: paddb %xmm2, %xmm3
687 ; SSE41-NEXT: paddb %xmm1, %xmm1
688 ; SSE41-NEXT: movdqa %xmm1, %xmm0
689 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
690 ; SSE41-NEXT: movdqa %xmm2, %xmm0
693 ; AVX-LABEL: var_shift_v2i8:
695 ; AVX-NEXT: vpsllw $5, %xmm1, %xmm1
696 ; AVX-NEXT: vpsllw $4, %xmm0, %xmm2
697 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
698 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
699 ; AVX-NEXT: vpsllw $2, %xmm0, %xmm2
700 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
701 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
702 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
703 ; AVX-NEXT: vpaddb %xmm0, %xmm0, %xmm2
704 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
705 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
708 ; XOP-LABEL: var_shift_v2i8:
710 ; XOP-NEXT: vpshlb %xmm1, %xmm0, %xmm0
713 ; AVX512DQ-LABEL: var_shift_v2i8:
715 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
716 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
717 ; AVX512DQ-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
718 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
719 ; AVX512DQ-NEXT: vzeroupper
720 ; AVX512DQ-NEXT: retq
722 ; AVX512BW-LABEL: var_shift_v2i8:
724 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
725 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
726 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
727 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
728 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
729 ; AVX512BW-NEXT: vzeroupper
730 ; AVX512BW-NEXT: retq
732 ; AVX512DQVL-LABEL: var_shift_v2i8:
733 ; AVX512DQVL: # %bb.0:
734 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
735 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
736 ; AVX512DQVL-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
737 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
738 ; AVX512DQVL-NEXT: vzeroupper
739 ; AVX512DQVL-NEXT: retq
741 ; AVX512BWVL-LABEL: var_shift_v2i8:
742 ; AVX512BWVL: # %bb.0:
743 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
744 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
745 ; AVX512BWVL-NEXT: vpsllvw %ymm1, %ymm0, %ymm0
746 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
747 ; AVX512BWVL-NEXT: vzeroupper
748 ; AVX512BWVL-NEXT: retq
750 ; X32-SSE-LABEL: var_shift_v2i8:
752 ; X32-SSE-NEXT: psllw $5, %xmm1
753 ; X32-SSE-NEXT: pxor %xmm2, %xmm2
754 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
755 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
756 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
757 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
758 ; X32-SSE-NEXT: psllw $4, %xmm0
759 ; X32-SSE-NEXT: pand %xmm3, %xmm0
760 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
761 ; X32-SSE-NEXT: por %xmm4, %xmm0
762 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
763 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
764 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
765 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
766 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
767 ; X32-SSE-NEXT: psllw $2, %xmm0
768 ; X32-SSE-NEXT: pand %xmm3, %xmm0
769 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
770 ; X32-SSE-NEXT: por %xmm4, %xmm0
771 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
772 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm2
773 ; X32-SSE-NEXT: movdqa %xmm2, %xmm1
774 ; X32-SSE-NEXT: pandn %xmm0, %xmm1
775 ; X32-SSE-NEXT: paddb %xmm0, %xmm0
776 ; X32-SSE-NEXT: pand %xmm2, %xmm0
777 ; X32-SSE-NEXT: por %xmm1, %xmm0
779 %shift = shl <2 x i8> %a, %b
784 ; Uniform Variable Shifts
787 define <2 x i32> @splatvar_shift_v2i32(<2 x i32> %a, <2 x i32> %b) nounwind {
788 ; SSE2-LABEL: splatvar_shift_v2i32:
790 ; SSE2-NEXT: xorps %xmm2, %xmm2
791 ; SSE2-NEXT: movss {{.*#+}} xmm2 = xmm1[0],xmm2[1,2,3]
792 ; SSE2-NEXT: pslld %xmm2, %xmm0
795 ; SSE41-LABEL: splatvar_shift_v2i32:
797 ; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
798 ; SSE41-NEXT: pslld %xmm1, %xmm0
801 ; AVX-LABEL: splatvar_shift_v2i32:
803 ; AVX-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
804 ; AVX-NEXT: vpslld %xmm1, %xmm0, %xmm0
807 ; XOP-LABEL: splatvar_shift_v2i32:
809 ; XOP-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
810 ; XOP-NEXT: vpslld %xmm1, %xmm0, %xmm0
813 ; AVX512-LABEL: splatvar_shift_v2i32:
815 ; AVX512-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
816 ; AVX512-NEXT: vpslld %xmm1, %xmm0, %xmm0
819 ; AVX512VL-LABEL: splatvar_shift_v2i32:
821 ; AVX512VL-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
822 ; AVX512VL-NEXT: vpslld %xmm1, %xmm0, %xmm0
823 ; AVX512VL-NEXT: retq
825 ; X32-SSE-LABEL: splatvar_shift_v2i32:
827 ; X32-SSE-NEXT: xorps %xmm2, %xmm2
828 ; X32-SSE-NEXT: movss {{.*#+}} xmm2 = xmm1[0],xmm2[1,2,3]
829 ; X32-SSE-NEXT: pslld %xmm2, %xmm0
831 %splat = shufflevector <2 x i32> %b, <2 x i32> undef, <2 x i32> zeroinitializer
832 %shift = shl <2 x i32> %a, %splat
836 define <4 x i16> @splatvar_shift_v4i16(<4 x i16> %a, <4 x i16> %b) nounwind {
837 ; SSE2-LABEL: splatvar_shift_v4i16:
839 ; SSE2-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0,1]
840 ; SSE2-NEXT: psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
841 ; SSE2-NEXT: psllw %xmm1, %xmm0
844 ; SSE41-LABEL: splatvar_shift_v4i16:
846 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
847 ; SSE41-NEXT: psllw %xmm1, %xmm0
850 ; AVX-LABEL: splatvar_shift_v4i16:
852 ; AVX-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
853 ; AVX-NEXT: vpsllw %xmm1, %xmm0, %xmm0
856 ; XOP-LABEL: splatvar_shift_v4i16:
858 ; XOP-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
859 ; XOP-NEXT: vpsllw %xmm1, %xmm0, %xmm0
862 ; AVX512-LABEL: splatvar_shift_v4i16:
864 ; AVX512-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
865 ; AVX512-NEXT: vpsllw %xmm1, %xmm0, %xmm0
868 ; AVX512VL-LABEL: splatvar_shift_v4i16:
870 ; AVX512VL-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
871 ; AVX512VL-NEXT: vpsllw %xmm1, %xmm0, %xmm0
872 ; AVX512VL-NEXT: retq
874 ; X32-SSE-LABEL: splatvar_shift_v4i16:
876 ; X32-SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0,1]
877 ; X32-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
878 ; X32-SSE-NEXT: psllw %xmm1, %xmm0
880 %splat = shufflevector <4 x i16> %b, <4 x i16> undef, <4 x i32> zeroinitializer
881 %shift = shl <4 x i16> %a, %splat
885 define <2 x i16> @splatvar_shift_v2i16(<2 x i16> %a, <2 x i16> %b) nounwind {
886 ; SSE2-LABEL: splatvar_shift_v2i16:
888 ; SSE2-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0,1]
889 ; SSE2-NEXT: psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
890 ; SSE2-NEXT: psllw %xmm1, %xmm0
893 ; SSE41-LABEL: splatvar_shift_v2i16:
895 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
896 ; SSE41-NEXT: psllw %xmm1, %xmm0
899 ; AVX-LABEL: splatvar_shift_v2i16:
901 ; AVX-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
902 ; AVX-NEXT: vpsllw %xmm1, %xmm0, %xmm0
905 ; XOP-LABEL: splatvar_shift_v2i16:
907 ; XOP-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
908 ; XOP-NEXT: vpsllw %xmm1, %xmm0, %xmm0
911 ; AVX512-LABEL: splatvar_shift_v2i16:
913 ; AVX512-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
914 ; AVX512-NEXT: vpsllw %xmm1, %xmm0, %xmm0
917 ; AVX512VL-LABEL: splatvar_shift_v2i16:
919 ; AVX512VL-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
920 ; AVX512VL-NEXT: vpsllw %xmm1, %xmm0, %xmm0
921 ; AVX512VL-NEXT: retq
923 ; X32-SSE-LABEL: splatvar_shift_v2i16:
925 ; X32-SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0,1]
926 ; X32-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
927 ; X32-SSE-NEXT: psllw %xmm1, %xmm0
929 %splat = shufflevector <2 x i16> %b, <2 x i16> undef, <2 x i32> zeroinitializer
930 %shift = shl <2 x i16> %a, %splat
934 define <8 x i8> @splatvar_shift_v8i8(<8 x i8> %a, <8 x i8> %b) nounwind {
935 ; SSE2-LABEL: splatvar_shift_v8i8:
937 ; SSE2-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
938 ; SSE2-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
939 ; SSE2-NEXT: psllw %xmm1, %xmm0
940 ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
941 ; SSE2-NEXT: psllw %xmm1, %xmm2
942 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
943 ; SSE2-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
944 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
945 ; SSE2-NEXT: pand %xmm1, %xmm0
948 ; SSE41-LABEL: splatvar_shift_v8i8:
950 ; SSE41-NEXT: pmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
951 ; SSE41-NEXT: psllw %xmm1, %xmm0
952 ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
953 ; SSE41-NEXT: psllw %xmm1, %xmm2
954 ; SSE41-NEXT: pxor %xmm1, %xmm1
955 ; SSE41-NEXT: pshufb %xmm1, %xmm2
956 ; SSE41-NEXT: pand %xmm2, %xmm0
959 ; AVX1-LABEL: splatvar_shift_v8i8:
961 ; AVX1-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
962 ; AVX1-NEXT: vpsllw %xmm1, %xmm0, %xmm0
963 ; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
964 ; AVX1-NEXT: vpsllw %xmm1, %xmm2, %xmm1
965 ; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
966 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1
967 ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
970 ; AVX2-LABEL: splatvar_shift_v8i8:
972 ; AVX2-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
973 ; AVX2-NEXT: vpsllw %xmm1, %xmm0, %xmm0
974 ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
975 ; AVX2-NEXT: vpsllw %xmm1, %xmm2, %xmm1
976 ; AVX2-NEXT: vpbroadcastb %xmm1, %xmm1
977 ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
980 ; XOPAVX1-LABEL: splatvar_shift_v8i8:
982 ; XOPAVX1-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
983 ; XOPAVX1-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,0,0,0,4,5,6,7]
984 ; XOPAVX1-NEXT: vpshlb %xmm1, %xmm0, %xmm0
987 ; XOPAVX2-LABEL: splatvar_shift_v8i8:
989 ; XOPAVX2-NEXT: vpbroadcastb %xmm1, %xmm1
990 ; XOPAVX2-NEXT: vpshlb %xmm1, %xmm0, %xmm0
993 ; AVX512DQ-LABEL: splatvar_shift_v8i8:
995 ; AVX512DQ-NEXT: vpbroadcastb %xmm1, %xmm1
996 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
997 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
998 ; AVX512DQ-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
999 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1000 ; AVX512DQ-NEXT: vzeroupper
1001 ; AVX512DQ-NEXT: retq
1003 ; AVX512BW-LABEL: splatvar_shift_v8i8:
1004 ; AVX512BW: # %bb.0:
1005 ; AVX512BW-NEXT: vpbroadcastb %xmm1, %xmm1
1006 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1007 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1008 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
1009 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1010 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1011 ; AVX512BW-NEXT: vzeroupper
1012 ; AVX512BW-NEXT: retq
1014 ; AVX512DQVL-LABEL: splatvar_shift_v8i8:
1015 ; AVX512DQVL: # %bb.0:
1016 ; AVX512DQVL-NEXT: vpbroadcastb %xmm1, %xmm1
1017 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1018 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1019 ; AVX512DQVL-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
1020 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1021 ; AVX512DQVL-NEXT: vzeroupper
1022 ; AVX512DQVL-NEXT: retq
1024 ; AVX512BWVL-LABEL: splatvar_shift_v8i8:
1025 ; AVX512BWVL: # %bb.0:
1026 ; AVX512BWVL-NEXT: vpbroadcastb %xmm1, %xmm1
1027 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1028 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1029 ; AVX512BWVL-NEXT: vpsllvw %ymm1, %ymm0, %ymm0
1030 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1031 ; AVX512BWVL-NEXT: vzeroupper
1032 ; AVX512BWVL-NEXT: retq
1034 ; X32-SSE-LABEL: splatvar_shift_v8i8:
1036 ; X32-SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1037 ; X32-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1038 ; X32-SSE-NEXT: psllw %xmm1, %xmm0
1039 ; X32-SSE-NEXT: pcmpeqd %xmm2, %xmm2
1040 ; X32-SSE-NEXT: psllw %xmm1, %xmm2
1041 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1042 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1043 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1044 ; X32-SSE-NEXT: pand %xmm1, %xmm0
1045 ; X32-SSE-NEXT: retl
1046 %splat = shufflevector <8 x i8> %b, <8 x i8> undef, <8 x i32> zeroinitializer
1047 %shift = shl <8 x i8> %a, %splat
1051 define <4 x i8> @splatvar_shift_v4i8(<4 x i8> %a, <4 x i8> %b) nounwind {
1052 ; SSE2-LABEL: splatvar_shift_v4i8:
1054 ; SSE2-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1055 ; SSE2-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1056 ; SSE2-NEXT: psllw %xmm1, %xmm0
1057 ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
1058 ; SSE2-NEXT: psllw %xmm1, %xmm2
1059 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1060 ; SSE2-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1061 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1062 ; SSE2-NEXT: pand %xmm1, %xmm0
1065 ; SSE41-LABEL: splatvar_shift_v4i8:
1067 ; SSE41-NEXT: pmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1068 ; SSE41-NEXT: psllw %xmm1, %xmm0
1069 ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
1070 ; SSE41-NEXT: psllw %xmm1, %xmm2
1071 ; SSE41-NEXT: pxor %xmm1, %xmm1
1072 ; SSE41-NEXT: pshufb %xmm1, %xmm2
1073 ; SSE41-NEXT: pand %xmm2, %xmm0
1076 ; AVX1-LABEL: splatvar_shift_v4i8:
1078 ; AVX1-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1079 ; AVX1-NEXT: vpsllw %xmm1, %xmm0, %xmm0
1080 ; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1081 ; AVX1-NEXT: vpsllw %xmm1, %xmm2, %xmm1
1082 ; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
1083 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1
1084 ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
1087 ; AVX2-LABEL: splatvar_shift_v4i8:
1089 ; AVX2-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1090 ; AVX2-NEXT: vpsllw %xmm1, %xmm0, %xmm0
1091 ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1092 ; AVX2-NEXT: vpsllw %xmm1, %xmm2, %xmm1
1093 ; AVX2-NEXT: vpbroadcastb %xmm1, %xmm1
1094 ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1097 ; XOPAVX1-LABEL: splatvar_shift_v4i8:
1099 ; XOPAVX1-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1100 ; XOPAVX1-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,0,2,3,4,5,6,7]
1101 ; XOPAVX1-NEXT: vpshlb %xmm1, %xmm0, %xmm0
1102 ; XOPAVX1-NEXT: retq
1104 ; XOPAVX2-LABEL: splatvar_shift_v4i8:
1106 ; XOPAVX2-NEXT: vpbroadcastb %xmm1, %xmm1
1107 ; XOPAVX2-NEXT: vpshlb %xmm1, %xmm0, %xmm0
1108 ; XOPAVX2-NEXT: retq
1110 ; AVX512DQ-LABEL: splatvar_shift_v4i8:
1111 ; AVX512DQ: # %bb.0:
1112 ; AVX512DQ-NEXT: vpbroadcastb %xmm1, %xmm1
1113 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1114 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1115 ; AVX512DQ-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
1116 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1117 ; AVX512DQ-NEXT: vzeroupper
1118 ; AVX512DQ-NEXT: retq
1120 ; AVX512BW-LABEL: splatvar_shift_v4i8:
1121 ; AVX512BW: # %bb.0:
1122 ; AVX512BW-NEXT: vpbroadcastb %xmm1, %xmm1
1123 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1124 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1125 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
1126 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1127 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1128 ; AVX512BW-NEXT: vzeroupper
1129 ; AVX512BW-NEXT: retq
1131 ; AVX512DQVL-LABEL: splatvar_shift_v4i8:
1132 ; AVX512DQVL: # %bb.0:
1133 ; AVX512DQVL-NEXT: vpbroadcastb %xmm1, %xmm1
1134 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1135 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1136 ; AVX512DQVL-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
1137 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1138 ; AVX512DQVL-NEXT: vzeroupper
1139 ; AVX512DQVL-NEXT: retq
1141 ; AVX512BWVL-LABEL: splatvar_shift_v4i8:
1142 ; AVX512BWVL: # %bb.0:
1143 ; AVX512BWVL-NEXT: vpbroadcastb %xmm1, %xmm1
1144 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1145 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1146 ; AVX512BWVL-NEXT: vpsllvw %ymm1, %ymm0, %ymm0
1147 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1148 ; AVX512BWVL-NEXT: vzeroupper
1149 ; AVX512BWVL-NEXT: retq
1151 ; X32-SSE-LABEL: splatvar_shift_v4i8:
1153 ; X32-SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1154 ; X32-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1155 ; X32-SSE-NEXT: psllw %xmm1, %xmm0
1156 ; X32-SSE-NEXT: pcmpeqd %xmm2, %xmm2
1157 ; X32-SSE-NEXT: psllw %xmm1, %xmm2
1158 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1159 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1160 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1161 ; X32-SSE-NEXT: pand %xmm1, %xmm0
1162 ; X32-SSE-NEXT: retl
1163 %splat = shufflevector <4 x i8> %b, <4 x i8> undef, <4 x i32> zeroinitializer
1164 %shift = shl <4 x i8> %a, %splat
1168 define <2 x i8> @splatvar_shift_v2i8(<2 x i8> %a, <2 x i8> %b) nounwind {
1169 ; SSE2-LABEL: splatvar_shift_v2i8:
1171 ; SSE2-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1172 ; SSE2-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1173 ; SSE2-NEXT: psllw %xmm1, %xmm0
1174 ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
1175 ; SSE2-NEXT: psllw %xmm1, %xmm2
1176 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1177 ; SSE2-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1178 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1179 ; SSE2-NEXT: pand %xmm1, %xmm0
1182 ; SSE41-LABEL: splatvar_shift_v2i8:
1184 ; SSE41-NEXT: pmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1185 ; SSE41-NEXT: psllw %xmm1, %xmm0
1186 ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
1187 ; SSE41-NEXT: psllw %xmm1, %xmm2
1188 ; SSE41-NEXT: pxor %xmm1, %xmm1
1189 ; SSE41-NEXT: pshufb %xmm1, %xmm2
1190 ; SSE41-NEXT: pand %xmm2, %xmm0
1193 ; AVX1-LABEL: splatvar_shift_v2i8:
1195 ; AVX1-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1196 ; AVX1-NEXT: vpsllw %xmm1, %xmm0, %xmm0
1197 ; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1198 ; AVX1-NEXT: vpsllw %xmm1, %xmm2, %xmm1
1199 ; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
1200 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1
1201 ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
1204 ; AVX2-LABEL: splatvar_shift_v2i8:
1206 ; AVX2-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1207 ; AVX2-NEXT: vpsllw %xmm1, %xmm0, %xmm0
1208 ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1209 ; AVX2-NEXT: vpsllw %xmm1, %xmm2, %xmm1
1210 ; AVX2-NEXT: vpbroadcastb %xmm1, %xmm1
1211 ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1214 ; XOP-LABEL: splatvar_shift_v2i8:
1216 ; XOP-NEXT: insertq {{.*#+}} xmm1 = xmm1[0,0,2,3,4,5,6,7,u,u,u,u,u,u,u,u]
1217 ; XOP-NEXT: vpshlb %xmm1, %xmm0, %xmm0
1220 ; AVX512DQ-LABEL: splatvar_shift_v2i8:
1221 ; AVX512DQ: # %bb.0:
1222 ; AVX512DQ-NEXT: vpbroadcastb %xmm1, %xmm1
1223 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1224 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1225 ; AVX512DQ-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
1226 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1227 ; AVX512DQ-NEXT: vzeroupper
1228 ; AVX512DQ-NEXT: retq
1230 ; AVX512BW-LABEL: splatvar_shift_v2i8:
1231 ; AVX512BW: # %bb.0:
1232 ; AVX512BW-NEXT: vpbroadcastb %xmm1, %xmm1
1233 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1234 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1235 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
1236 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1237 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1238 ; AVX512BW-NEXT: vzeroupper
1239 ; AVX512BW-NEXT: retq
1241 ; AVX512DQVL-LABEL: splatvar_shift_v2i8:
1242 ; AVX512DQVL: # %bb.0:
1243 ; AVX512DQVL-NEXT: vpbroadcastb %xmm1, %xmm1
1244 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1245 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1246 ; AVX512DQVL-NEXT: vpsllvd %zmm1, %zmm0, %zmm0
1247 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1248 ; AVX512DQVL-NEXT: vzeroupper
1249 ; AVX512DQVL-NEXT: retq
1251 ; AVX512BWVL-LABEL: splatvar_shift_v2i8:
1252 ; AVX512BWVL: # %bb.0:
1253 ; AVX512BWVL-NEXT: vpbroadcastb %xmm1, %xmm1
1254 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1255 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1256 ; AVX512BWVL-NEXT: vpsllvw %ymm1, %ymm0, %ymm0
1257 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1258 ; AVX512BWVL-NEXT: vzeroupper
1259 ; AVX512BWVL-NEXT: retq
1261 ; X32-SSE-LABEL: splatvar_shift_v2i8:
1263 ; X32-SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1264 ; X32-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1265 ; X32-SSE-NEXT: psllw %xmm1, %xmm0
1266 ; X32-SSE-NEXT: pcmpeqd %xmm2, %xmm2
1267 ; X32-SSE-NEXT: psllw %xmm1, %xmm2
1268 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1269 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1270 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1271 ; X32-SSE-NEXT: pand %xmm1, %xmm0
1272 ; X32-SSE-NEXT: retl
1273 %splat = shufflevector <2 x i8> %b, <2 x i8> undef, <2 x i32> zeroinitializer
1274 %shift = shl <2 x i8> %a, %splat
1282 define <2 x i32> @constant_shift_v2i32(<2 x i32> %a) nounwind {
1283 ; SSE2-LABEL: constant_shift_v2i32:
1285 ; SSE2-NEXT: movdqa %xmm0, %xmm1
1286 ; SSE2-NEXT: pslld $4, %xmm1
1287 ; SSE2-NEXT: pslld $5, %xmm0
1288 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
1289 ; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1290 ; SSE2-NEXT: movdqa %xmm1, %xmm0
1293 ; SSE41-LABEL: constant_shift_v2i32:
1295 ; SSE41-NEXT: movdqa %xmm0, %xmm1
1296 ; SSE41-NEXT: pslld $5, %xmm1
1297 ; SSE41-NEXT: pslld $4, %xmm0
1298 ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6,7]
1301 ; AVX1-LABEL: constant_shift_v2i32:
1303 ; AVX1-NEXT: vpslld $5, %xmm0, %xmm1
1304 ; AVX1-NEXT: vpslld $4, %xmm0, %xmm0
1305 ; AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6,7]
1308 ; AVX2-LABEL: constant_shift_v2i32:
1310 ; AVX2-NEXT: vpsllvd {{.*}}(%rip), %xmm0, %xmm0
1313 ; XOPAVX1-LABEL: constant_shift_v2i32:
1315 ; XOPAVX1-NEXT: vpshld {{.*}}(%rip), %xmm0, %xmm0
1316 ; XOPAVX1-NEXT: retq
1318 ; XOPAVX2-LABEL: constant_shift_v2i32:
1320 ; XOPAVX2-NEXT: vpsllvd {{.*}}(%rip), %xmm0, %xmm0
1321 ; XOPAVX2-NEXT: retq
1323 ; AVX512-LABEL: constant_shift_v2i32:
1325 ; AVX512-NEXT: vpsllvd {{.*}}(%rip), %xmm0, %xmm0
1328 ; AVX512VL-LABEL: constant_shift_v2i32:
1329 ; AVX512VL: # %bb.0:
1330 ; AVX512VL-NEXT: vpsllvd {{.*}}(%rip), %xmm0, %xmm0
1331 ; AVX512VL-NEXT: retq
1333 ; X32-SSE-LABEL: constant_shift_v2i32:
1335 ; X32-SSE-NEXT: movdqa %xmm0, %xmm1
1336 ; X32-SSE-NEXT: pslld $4, %xmm1
1337 ; X32-SSE-NEXT: pslld $5, %xmm0
1338 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
1339 ; X32-SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1340 ; X32-SSE-NEXT: movdqa %xmm1, %xmm0
1341 ; X32-SSE-NEXT: retl
1342 %shift = shl <2 x i32> %a, <i32 4, i32 5>
1343 ret <2 x i32> %shift
1346 define <4 x i16> @constant_shift_v4i16(<4 x i16> %a) nounwind {
1347 ; SSE-LABEL: constant_shift_v4i16:
1349 ; SSE-NEXT: pmullw {{.*}}(%rip), %xmm0
1352 ; AVX-LABEL: constant_shift_v4i16:
1354 ; AVX-NEXT: vpmullw {{.*}}(%rip), %xmm0, %xmm0
1357 ; XOP-LABEL: constant_shift_v4i16:
1359 ; XOP-NEXT: vpshlw {{.*}}(%rip), %xmm0, %xmm0
1362 ; AVX512DQ-LABEL: constant_shift_v4i16:
1363 ; AVX512DQ: # %bb.0:
1364 ; AVX512DQ-NEXT: vpmullw {{.*}}(%rip), %xmm0, %xmm0
1365 ; AVX512DQ-NEXT: retq
1367 ; AVX512BW-LABEL: constant_shift_v4i16:
1368 ; AVX512BW: # %bb.0:
1369 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
1370 ; AVX512BW-NEXT: vmovdqa {{.*#+}} xmm1 = <0,1,2,3,u,u,u,u>
1371 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
1372 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
1373 ; AVX512BW-NEXT: vzeroupper
1374 ; AVX512BW-NEXT: retq
1376 ; AVX512DQVL-LABEL: constant_shift_v4i16:
1377 ; AVX512DQVL: # %bb.0:
1378 ; AVX512DQVL-NEXT: vpmullw {{.*}}(%rip), %xmm0, %xmm0
1379 ; AVX512DQVL-NEXT: retq
1381 ; AVX512BWVL-LABEL: constant_shift_v4i16:
1382 ; AVX512BWVL: # %bb.0:
1383 ; AVX512BWVL-NEXT: vpsllvw {{.*}}(%rip), %xmm0, %xmm0
1384 ; AVX512BWVL-NEXT: retq
1386 ; X32-SSE-LABEL: constant_shift_v4i16:
1388 ; X32-SSE-NEXT: pmullw {{\.LCPI.*}}, %xmm0
1389 ; X32-SSE-NEXT: retl
1390 %shift = shl <4 x i16> %a, <i16 0, i16 1, i16 2, i16 3>
1391 ret <4 x i16> %shift
1394 define <2 x i16> @constant_shift_v2i16(<2 x i16> %a) nounwind {
1395 ; SSE2-LABEL: constant_shift_v2i16:
1397 ; SSE2-NEXT: pmullw {{.*}}(%rip), %xmm0
1400 ; SSE41-LABEL: constant_shift_v2i16:
1402 ; SSE41-NEXT: movdqa %xmm0, %xmm1
1403 ; SSE41-NEXT: psllw $3, %xmm1
1404 ; SSE41-NEXT: psllw $2, %xmm0
1405 ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
1408 ; AVX-LABEL: constant_shift_v2i16:
1410 ; AVX-NEXT: vpsllw $3, %xmm0, %xmm1
1411 ; AVX-NEXT: vpsllw $2, %xmm0, %xmm0
1412 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
1415 ; XOP-LABEL: constant_shift_v2i16:
1417 ; XOP-NEXT: vpshlw {{.*}}(%rip), %xmm0, %xmm0
1420 ; AVX512DQ-LABEL: constant_shift_v2i16:
1421 ; AVX512DQ: # %bb.0:
1422 ; AVX512DQ-NEXT: vpsllw $3, %xmm0, %xmm1
1423 ; AVX512DQ-NEXT: vpsllw $2, %xmm0, %xmm0
1424 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
1425 ; AVX512DQ-NEXT: retq
1427 ; AVX512BW-LABEL: constant_shift_v2i16:
1428 ; AVX512BW: # %bb.0:
1429 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
1430 ; AVX512BW-NEXT: vmovdqa {{.*#+}} xmm1 = <2,3,u,u,u,u,u,u>
1431 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
1432 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
1433 ; AVX512BW-NEXT: vzeroupper
1434 ; AVX512BW-NEXT: retq
1436 ; AVX512DQVL-LABEL: constant_shift_v2i16:
1437 ; AVX512DQVL: # %bb.0:
1438 ; AVX512DQVL-NEXT: vpsllw $3, %xmm0, %xmm1
1439 ; AVX512DQVL-NEXT: vpsllw $2, %xmm0, %xmm0
1440 ; AVX512DQVL-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
1441 ; AVX512DQVL-NEXT: retq
1443 ; AVX512BWVL-LABEL: constant_shift_v2i16:
1444 ; AVX512BWVL: # %bb.0:
1445 ; AVX512BWVL-NEXT: vpsllvw {{.*}}(%rip), %xmm0, %xmm0
1446 ; AVX512BWVL-NEXT: retq
1448 ; X32-SSE-LABEL: constant_shift_v2i16:
1450 ; X32-SSE-NEXT: pmullw {{\.LCPI.*}}, %xmm0
1451 ; X32-SSE-NEXT: retl
1452 %shift = shl <2 x i16> %a, <i16 2, i16 3>
1453 ret <2 x i16> %shift
1456 define <8 x i8> @constant_shift_v8i8(<8 x i8> %a) nounwind {
1457 ; SSE2-LABEL: constant_shift_v8i8:
1459 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1460 ; SSE2-NEXT: pmullw {{.*}}(%rip), %xmm0
1461 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
1462 ; SSE2-NEXT: pxor %xmm1, %xmm1
1463 ; SSE2-NEXT: packuswb %xmm1, %xmm0
1466 ; SSE41-LABEL: constant_shift_v8i8:
1468 ; SSE41-NEXT: pmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1469 ; SSE41-NEXT: pmullw {{.*}}(%rip), %xmm0
1470 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm0
1471 ; SSE41-NEXT: pxor %xmm1, %xmm1
1472 ; SSE41-NEXT: packuswb %xmm1, %xmm0
1475 ; AVX1-LABEL: constant_shift_v8i8:
1477 ; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1478 ; AVX1-NEXT: vpmullw {{.*}}(%rip), %xmm0, %xmm0
1479 ; AVX1-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1480 ; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
1481 ; AVX1-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1484 ; AVX2-LABEL: constant_shift_v8i8:
1486 ; AVX2-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1487 ; AVX2-NEXT: vpmullw {{.*}}(%rip), %ymm0, %ymm0
1488 ; AVX2-NEXT: vpand {{.*}}(%rip), %ymm0, %ymm0
1489 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1490 ; AVX2-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1491 ; AVX2-NEXT: vzeroupper
1494 ; XOP-LABEL: constant_shift_v8i8:
1496 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
1499 ; AVX512DQ-LABEL: constant_shift_v8i8:
1500 ; AVX512DQ: # %bb.0:
1501 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1502 ; AVX512DQ-NEXT: vpsllvd {{.*}}(%rip), %zmm0, %zmm0
1503 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1504 ; AVX512DQ-NEXT: vzeroupper
1505 ; AVX512DQ-NEXT: retq
1507 ; AVX512BW-LABEL: constant_shift_v8i8:
1508 ; AVX512BW: # %bb.0:
1509 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,2,3,4,5,6,7,0,0,0,0,0,0,0,0]
1510 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1511 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
1512 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1513 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1514 ; AVX512BW-NEXT: vzeroupper
1515 ; AVX512BW-NEXT: retq
1517 ; AVX512DQVL-LABEL: constant_shift_v8i8:
1518 ; AVX512DQVL: # %bb.0:
1519 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1520 ; AVX512DQVL-NEXT: vpsllvd {{.*}}(%rip), %zmm0, %zmm0
1521 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1522 ; AVX512DQVL-NEXT: vzeroupper
1523 ; AVX512DQVL-NEXT: retq
1525 ; AVX512BWVL-LABEL: constant_shift_v8i8:
1526 ; AVX512BWVL: # %bb.0:
1527 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1528 ; AVX512BWVL-NEXT: vpsllvw {{.*}}(%rip), %ymm0, %ymm0
1529 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1530 ; AVX512BWVL-NEXT: vzeroupper
1531 ; AVX512BWVL-NEXT: retq
1533 ; X32-SSE-LABEL: constant_shift_v8i8:
1535 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1536 ; X32-SSE-NEXT: pmullw {{\.LCPI.*}}, %xmm0
1537 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
1538 ; X32-SSE-NEXT: pxor %xmm1, %xmm1
1539 ; X32-SSE-NEXT: packuswb %xmm1, %xmm0
1540 ; X32-SSE-NEXT: retl
1541 %shift = shl <8 x i8> %a, <i8 0, i8 1, i8 2, i8 3, i8 4, i8 5, i8 6, i8 7>
1545 define <4 x i8> @constant_shift_v4i8(<4 x i8> %a) nounwind {
1546 ; SSE2-LABEL: constant_shift_v4i8:
1548 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1549 ; SSE2-NEXT: pmullw {{.*}}(%rip), %xmm0
1550 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
1551 ; SSE2-NEXT: pxor %xmm1, %xmm1
1552 ; SSE2-NEXT: packuswb %xmm1, %xmm0
1555 ; SSE41-LABEL: constant_shift_v4i8:
1557 ; SSE41-NEXT: pmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1558 ; SSE41-NEXT: pmullw {{.*}}(%rip), %xmm0
1559 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm0
1560 ; SSE41-NEXT: pxor %xmm1, %xmm1
1561 ; SSE41-NEXT: packuswb %xmm1, %xmm0
1564 ; AVX1-LABEL: constant_shift_v4i8:
1566 ; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1567 ; AVX1-NEXT: vpmullw {{.*}}(%rip), %xmm0, %xmm0
1568 ; AVX1-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1569 ; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
1570 ; AVX1-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1573 ; AVX2-LABEL: constant_shift_v4i8:
1575 ; AVX2-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1576 ; AVX2-NEXT: vpmullw {{.*}}(%rip), %ymm0, %ymm0
1577 ; AVX2-NEXT: vpand {{.*}}(%rip), %ymm0, %ymm0
1578 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1579 ; AVX2-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1580 ; AVX2-NEXT: vzeroupper
1583 ; XOP-LABEL: constant_shift_v4i8:
1585 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
1588 ; AVX512DQ-LABEL: constant_shift_v4i8:
1589 ; AVX512DQ: # %bb.0:
1590 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1591 ; AVX512DQ-NEXT: vpsllvd {{.*}}(%rip), %zmm0, %zmm0
1592 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1593 ; AVX512DQ-NEXT: vzeroupper
1594 ; AVX512DQ-NEXT: retq
1596 ; AVX512BW-LABEL: constant_shift_v4i8:
1597 ; AVX512BW: # %bb.0:
1598 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,2,3,0,0,0,0,0,0,0,0,0,0,0,0]
1599 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1600 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
1601 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1602 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1603 ; AVX512BW-NEXT: vzeroupper
1604 ; AVX512BW-NEXT: retq
1606 ; AVX512DQVL-LABEL: constant_shift_v4i8:
1607 ; AVX512DQVL: # %bb.0:
1608 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1609 ; AVX512DQVL-NEXT: vpsllvd {{.*}}(%rip), %zmm0, %zmm0
1610 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1611 ; AVX512DQVL-NEXT: vzeroupper
1612 ; AVX512DQVL-NEXT: retq
1614 ; AVX512BWVL-LABEL: constant_shift_v4i8:
1615 ; AVX512BWVL: # %bb.0:
1616 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1617 ; AVX512BWVL-NEXT: vpsllvw {{.*}}(%rip), %ymm0, %ymm0
1618 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1619 ; AVX512BWVL-NEXT: vzeroupper
1620 ; AVX512BWVL-NEXT: retq
1622 ; X32-SSE-LABEL: constant_shift_v4i8:
1624 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1625 ; X32-SSE-NEXT: pmullw {{\.LCPI.*}}, %xmm0
1626 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
1627 ; X32-SSE-NEXT: pxor %xmm1, %xmm1
1628 ; X32-SSE-NEXT: packuswb %xmm1, %xmm0
1629 ; X32-SSE-NEXT: retl
1630 %shift = shl <4 x i8> %a, <i8 0, i8 1, i8 2, i8 3>
1634 define <2 x i8> @constant_shift_v2i8(<2 x i8> %a) nounwind {
1635 ; SSE2-LABEL: constant_shift_v2i8:
1637 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1638 ; SSE2-NEXT: pmullw {{.*}}(%rip), %xmm0
1639 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
1640 ; SSE2-NEXT: pxor %xmm1, %xmm1
1641 ; SSE2-NEXT: packuswb %xmm1, %xmm0
1644 ; SSE41-LABEL: constant_shift_v2i8:
1646 ; SSE41-NEXT: pmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1647 ; SSE41-NEXT: pmullw {{.*}}(%rip), %xmm0
1648 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm0
1649 ; SSE41-NEXT: pxor %xmm1, %xmm1
1650 ; SSE41-NEXT: packuswb %xmm1, %xmm0
1653 ; AVX1-LABEL: constant_shift_v2i8:
1655 ; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1656 ; AVX1-NEXT: vpmullw {{.*}}(%rip), %xmm0, %xmm0
1657 ; AVX1-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1658 ; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
1659 ; AVX1-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1662 ; AVX2-LABEL: constant_shift_v2i8:
1664 ; AVX2-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1665 ; AVX2-NEXT: vpmullw {{.*}}(%rip), %ymm0, %ymm0
1666 ; AVX2-NEXT: vpand {{.*}}(%rip), %ymm0, %ymm0
1667 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1668 ; AVX2-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1669 ; AVX2-NEXT: vzeroupper
1672 ; XOP-LABEL: constant_shift_v2i8:
1674 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
1677 ; AVX512DQ-LABEL: constant_shift_v2i8:
1678 ; AVX512DQ: # %bb.0:
1679 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1680 ; AVX512DQ-NEXT: vpsllvd {{.*}}(%rip), %zmm0, %zmm0
1681 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1682 ; AVX512DQ-NEXT: vzeroupper
1683 ; AVX512DQ-NEXT: retq
1685 ; AVX512BW-LABEL: constant_shift_v2i8:
1686 ; AVX512BW: # %bb.0:
1687 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = [2,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
1688 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1689 ; AVX512BW-NEXT: vpsllvw %zmm1, %zmm0, %zmm0
1690 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1691 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1692 ; AVX512BW-NEXT: vzeroupper
1693 ; AVX512BW-NEXT: retq
1695 ; AVX512DQVL-LABEL: constant_shift_v2i8:
1696 ; AVX512DQVL: # %bb.0:
1697 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1698 ; AVX512DQVL-NEXT: vpsllvd {{.*}}(%rip), %zmm0, %zmm0
1699 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1700 ; AVX512DQVL-NEXT: vzeroupper
1701 ; AVX512DQVL-NEXT: retq
1703 ; AVX512BWVL-LABEL: constant_shift_v2i8:
1704 ; AVX512BWVL: # %bb.0:
1705 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1706 ; AVX512BWVL-NEXT: vpsllvw {{.*}}(%rip), %ymm0, %ymm0
1707 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1708 ; AVX512BWVL-NEXT: vzeroupper
1709 ; AVX512BWVL-NEXT: retq
1711 ; X32-SSE-LABEL: constant_shift_v2i8:
1713 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1714 ; X32-SSE-NEXT: pmullw {{\.LCPI.*}}, %xmm0
1715 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
1716 ; X32-SSE-NEXT: pxor %xmm1, %xmm1
1717 ; X32-SSE-NEXT: packuswb %xmm1, %xmm0
1718 ; X32-SSE-NEXT: retl
1719 %shift = shl <2 x i8> %a, <i8 2, i8 3>
1724 ; Uniform Constant Shifts
1727 define <2 x i32> @splatconstant_shift_v2i32(<2 x i32> %a) nounwind {
1728 ; SSE-LABEL: splatconstant_shift_v2i32:
1730 ; SSE-NEXT: pslld $5, %xmm0
1733 ; AVX-LABEL: splatconstant_shift_v2i32:
1735 ; AVX-NEXT: vpslld $5, %xmm0, %xmm0
1738 ; XOP-LABEL: splatconstant_shift_v2i32:
1740 ; XOP-NEXT: vpslld $5, %xmm0, %xmm0
1743 ; AVX512-LABEL: splatconstant_shift_v2i32:
1745 ; AVX512-NEXT: vpslld $5, %xmm0, %xmm0
1748 ; AVX512VL-LABEL: splatconstant_shift_v2i32:
1749 ; AVX512VL: # %bb.0:
1750 ; AVX512VL-NEXT: vpslld $5, %xmm0, %xmm0
1751 ; AVX512VL-NEXT: retq
1753 ; X32-SSE-LABEL: splatconstant_shift_v2i32:
1755 ; X32-SSE-NEXT: pslld $5, %xmm0
1756 ; X32-SSE-NEXT: retl
1757 %shift = shl <2 x i32> %a, <i32 5, i32 5>
1758 ret <2 x i32> %shift
1761 define <4 x i16> @splatconstant_shift_v4i16(<4 x i16> %a) nounwind {
1762 ; SSE-LABEL: splatconstant_shift_v4i16:
1764 ; SSE-NEXT: psllw $3, %xmm0
1767 ; AVX-LABEL: splatconstant_shift_v4i16:
1769 ; AVX-NEXT: vpsllw $3, %xmm0, %xmm0
1772 ; XOP-LABEL: splatconstant_shift_v4i16:
1774 ; XOP-NEXT: vpsllw $3, %xmm0, %xmm0
1777 ; AVX512-LABEL: splatconstant_shift_v4i16:
1779 ; AVX512-NEXT: vpsllw $3, %xmm0, %xmm0
1782 ; AVX512VL-LABEL: splatconstant_shift_v4i16:
1783 ; AVX512VL: # %bb.0:
1784 ; AVX512VL-NEXT: vpsllw $3, %xmm0, %xmm0
1785 ; AVX512VL-NEXT: retq
1787 ; X32-SSE-LABEL: splatconstant_shift_v4i16:
1789 ; X32-SSE-NEXT: psllw $3, %xmm0
1790 ; X32-SSE-NEXT: retl
1791 %shift = shl <4 x i16> %a, <i16 3, i16 3, i16 3, i16 3>
1792 ret <4 x i16> %shift
1795 define <2 x i16> @splatconstant_shift_v2i16(<2 x i16> %a) nounwind {
1796 ; SSE-LABEL: splatconstant_shift_v2i16:
1798 ; SSE-NEXT: psllw $3, %xmm0
1801 ; AVX-LABEL: splatconstant_shift_v2i16:
1803 ; AVX-NEXT: vpsllw $3, %xmm0, %xmm0
1806 ; XOP-LABEL: splatconstant_shift_v2i16:
1808 ; XOP-NEXT: vpsllw $3, %xmm0, %xmm0
1811 ; AVX512-LABEL: splatconstant_shift_v2i16:
1813 ; AVX512-NEXT: vpsllw $3, %xmm0, %xmm0
1816 ; AVX512VL-LABEL: splatconstant_shift_v2i16:
1817 ; AVX512VL: # %bb.0:
1818 ; AVX512VL-NEXT: vpsllw $3, %xmm0, %xmm0
1819 ; AVX512VL-NEXT: retq
1821 ; X32-SSE-LABEL: splatconstant_shift_v2i16:
1823 ; X32-SSE-NEXT: psllw $3, %xmm0
1824 ; X32-SSE-NEXT: retl
1825 %shift = shl <2 x i16> %a, <i16 3, i16 3>
1826 ret <2 x i16> %shift
1829 define <8 x i8> @splatconstant_shift_v8i8(<8 x i8> %a) nounwind {
1830 ; SSE-LABEL: splatconstant_shift_v8i8:
1832 ; SSE-NEXT: psllw $3, %xmm0
1833 ; SSE-NEXT: pand {{.*}}(%rip), %xmm0
1836 ; AVX-LABEL: splatconstant_shift_v8i8:
1838 ; AVX-NEXT: vpsllw $3, %xmm0, %xmm0
1839 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1842 ; XOP-LABEL: splatconstant_shift_v8i8:
1844 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
1847 ; AVX512-LABEL: splatconstant_shift_v8i8:
1849 ; AVX512-NEXT: vpsllw $3, %xmm0, %xmm0
1850 ; AVX512-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1853 ; AVX512VL-LABEL: splatconstant_shift_v8i8:
1854 ; AVX512VL: # %bb.0:
1855 ; AVX512VL-NEXT: vpsllw $3, %xmm0, %xmm0
1856 ; AVX512VL-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1857 ; AVX512VL-NEXT: retq
1859 ; X32-SSE-LABEL: splatconstant_shift_v8i8:
1861 ; X32-SSE-NEXT: psllw $3, %xmm0
1862 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
1863 ; X32-SSE-NEXT: retl
1864 %shift = shl <8 x i8> %a, <i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3>
1868 define <4 x i8> @splatconstant_shift_v4i8(<4 x i8> %a) nounwind {
1869 ; SSE-LABEL: splatconstant_shift_v4i8:
1871 ; SSE-NEXT: psllw $3, %xmm0
1872 ; SSE-NEXT: pand {{.*}}(%rip), %xmm0
1875 ; AVX-LABEL: splatconstant_shift_v4i8:
1877 ; AVX-NEXT: vpsllw $3, %xmm0, %xmm0
1878 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1881 ; XOP-LABEL: splatconstant_shift_v4i8:
1883 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
1886 ; AVX512-LABEL: splatconstant_shift_v4i8:
1888 ; AVX512-NEXT: vpsllw $3, %xmm0, %xmm0
1889 ; AVX512-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1892 ; AVX512VL-LABEL: splatconstant_shift_v4i8:
1893 ; AVX512VL: # %bb.0:
1894 ; AVX512VL-NEXT: vpsllw $3, %xmm0, %xmm0
1895 ; AVX512VL-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1896 ; AVX512VL-NEXT: retq
1898 ; X32-SSE-LABEL: splatconstant_shift_v4i8:
1900 ; X32-SSE-NEXT: psllw $3, %xmm0
1901 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
1902 ; X32-SSE-NEXT: retl
1903 %shift = shl <4 x i8> %a, <i8 3, i8 3, i8 3, i8 3>
1907 define <2 x i8> @splatconstant_shift_v2i8(<2 x i8> %a) nounwind {
1908 ; SSE-LABEL: splatconstant_shift_v2i8:
1910 ; SSE-NEXT: psllw $3, %xmm0
1911 ; SSE-NEXT: pand {{.*}}(%rip), %xmm0
1914 ; AVX-LABEL: splatconstant_shift_v2i8:
1916 ; AVX-NEXT: vpsllw $3, %xmm0, %xmm0
1917 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1920 ; XOP-LABEL: splatconstant_shift_v2i8:
1922 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
1925 ; AVX512-LABEL: splatconstant_shift_v2i8:
1927 ; AVX512-NEXT: vpsllw $3, %xmm0, %xmm0
1928 ; AVX512-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1931 ; AVX512VL-LABEL: splatconstant_shift_v2i8:
1932 ; AVX512VL: # %bb.0:
1933 ; AVX512VL-NEXT: vpsllw $3, %xmm0, %xmm0
1934 ; AVX512VL-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
1935 ; AVX512VL-NEXT: retq
1937 ; X32-SSE-LABEL: splatconstant_shift_v2i8:
1939 ; X32-SSE-NEXT: psllw $3, %xmm0
1940 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
1941 ; X32-SSE-NEXT: retl
1942 %shift = shl <2 x i8> %a, <i8 3, i8 3>