1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse2 | FileCheck %s --check-prefix=ALL --check-prefix=SSE --check-prefix=SSE2
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse4.1 | FileCheck %s --check-prefix=ALL --check-prefix=SSE --check-prefix=SSE41
4 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefix=ALL --check-prefix=AVX --check-prefix=AVX1
5 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefix=ALL --check-prefix=AVX --check-prefix=AVX2
6 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+xop,+avx | FileCheck %s --check-prefix=ALL --check-prefix=XOP --check-prefix=XOPAVX1
7 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+xop,+avx2 | FileCheck %s --check-prefix=ALL --check-prefix=XOP --check-prefix=XOPAVX2
8 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512dq | FileCheck %s --check-prefix=ALL --check-prefix=AVX512 --check-prefix=AVX512DQ
9 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s --check-prefix=ALL --check-prefix=AVX512 --check-prefix=AVX512BW
10 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512dq,+avx512vl | FileCheck %s --check-prefix=ALL --check-prefix=AVX512VL --check-prefix=AVX512DQVL
11 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw,+avx512vl | FileCheck %s --check-prefix=ALL --check-prefix=AVX512VL --check-prefix=AVX512BWVL
13 ; Just one 32-bit run to make sure we do reasonable things for i64 shifts.
14 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+sse2 | FileCheck %s --check-prefix=ALL --check-prefix=X32-SSE --check-prefix=X32-SSE2
20 define <2 x i32> @var_shift_v2i32(<2 x i32> %a, <2 x i32> %b) nounwind {
21 ; SSE2-LABEL: var_shift_v2i32:
23 ; SSE2-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[2,3,3,3,4,5,6,7]
24 ; SSE2-NEXT: movdqa %xmm0, %xmm3
25 ; SSE2-NEXT: psrld %xmm2, %xmm3
26 ; SSE2-NEXT: pshuflw {{.*#+}} xmm4 = xmm1[0,1,1,1,4,5,6,7]
27 ; SSE2-NEXT: movdqa %xmm0, %xmm2
28 ; SSE2-NEXT: psrld %xmm4, %xmm2
29 ; SSE2-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
30 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
31 ; SSE2-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[2,3,3,3,4,5,6,7]
32 ; SSE2-NEXT: movdqa %xmm0, %xmm4
33 ; SSE2-NEXT: psrld %xmm3, %xmm4
34 ; SSE2-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,1,1,4,5,6,7]
35 ; SSE2-NEXT: psrld %xmm1, %xmm0
36 ; SSE2-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm4[1]
37 ; SSE2-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,3],xmm0[0,3]
38 ; SSE2-NEXT: movaps %xmm2, %xmm0
41 ; SSE41-LABEL: var_shift_v2i32:
43 ; SSE41-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[2,3,3,3,4,5,6,7]
44 ; SSE41-NEXT: movdqa %xmm0, %xmm3
45 ; SSE41-NEXT: psrld %xmm2, %xmm3
46 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm1[2,3,0,1]
47 ; SSE41-NEXT: pshuflw {{.*#+}} xmm4 = xmm2[2,3,3,3,4,5,6,7]
48 ; SSE41-NEXT: movdqa %xmm0, %xmm5
49 ; SSE41-NEXT: psrld %xmm4, %xmm5
50 ; SSE41-NEXT: pblendw {{.*#+}} xmm5 = xmm3[0,1,2,3],xmm5[4,5,6,7]
51 ; SSE41-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,1,1,4,5,6,7]
52 ; SSE41-NEXT: movdqa %xmm0, %xmm3
53 ; SSE41-NEXT: psrld %xmm1, %xmm3
54 ; SSE41-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,1,1,1,4,5,6,7]
55 ; SSE41-NEXT: psrld %xmm1, %xmm0
56 ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm3[0,1,2,3],xmm0[4,5,6,7]
57 ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm0[0,1],xmm5[2,3],xmm0[4,5],xmm5[6,7]
60 ; AVX1-LABEL: var_shift_v2i32:
62 ; AVX1-NEXT: vpsrldq {{.*#+}} xmm2 = xmm1[12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
63 ; AVX1-NEXT: vpsrld %xmm2, %xmm0, %xmm2
64 ; AVX1-NEXT: vpsrlq $32, %xmm1, %xmm3
65 ; AVX1-NEXT: vpsrld %xmm3, %xmm0, %xmm3
66 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2,3],xmm2[4,5,6,7]
67 ; AVX1-NEXT: vpxor %xmm3, %xmm3, %xmm3
68 ; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm3 = xmm1[2],xmm3[2],xmm1[3],xmm3[3]
69 ; AVX1-NEXT: vpsrld %xmm3, %xmm0, %xmm3
70 ; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
71 ; AVX1-NEXT: vpsrld %xmm1, %xmm0, %xmm0
72 ; AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm3[4,5,6,7]
73 ; AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm2[2,3],xmm0[4,5],xmm2[6,7]
76 ; AVX2-LABEL: var_shift_v2i32:
78 ; AVX2-NEXT: vpsrlvd %xmm1, %xmm0, %xmm0
81 ; XOPAVX1-LABEL: var_shift_v2i32:
83 ; XOPAVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
84 ; XOPAVX1-NEXT: vpsubd %xmm1, %xmm2, %xmm1
85 ; XOPAVX1-NEXT: vpshld %xmm1, %xmm0, %xmm0
88 ; XOPAVX2-LABEL: var_shift_v2i32:
90 ; XOPAVX2-NEXT: vpsrlvd %xmm1, %xmm0, %xmm0
93 ; AVX512-LABEL: var_shift_v2i32:
95 ; AVX512-NEXT: vpsrlvd %xmm1, %xmm0, %xmm0
98 ; AVX512VL-LABEL: var_shift_v2i32:
100 ; AVX512VL-NEXT: vpsrlvd %xmm1, %xmm0, %xmm0
101 ; AVX512VL-NEXT: retq
103 ; X32-SSE-LABEL: var_shift_v2i32:
105 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[2,3,3,3,4,5,6,7]
106 ; X32-SSE-NEXT: movdqa %xmm0, %xmm3
107 ; X32-SSE-NEXT: psrld %xmm2, %xmm3
108 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm1[0,1,1,1,4,5,6,7]
109 ; X32-SSE-NEXT: movdqa %xmm0, %xmm2
110 ; X32-SSE-NEXT: psrld %xmm4, %xmm2
111 ; X32-SSE-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
112 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
113 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[2,3,3,3,4,5,6,7]
114 ; X32-SSE-NEXT: movdqa %xmm0, %xmm4
115 ; X32-SSE-NEXT: psrld %xmm3, %xmm4
116 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,1,1,4,5,6,7]
117 ; X32-SSE-NEXT: psrld %xmm1, %xmm0
118 ; X32-SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm4[1]
119 ; X32-SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,3],xmm0[0,3]
120 ; X32-SSE-NEXT: movaps %xmm2, %xmm0
122 %shift = lshr <2 x i32> %a, %b
126 define <4 x i16> @var_shift_v4i16(<4 x i16> %a, <4 x i16> %b) nounwind {
127 ; SSE2-LABEL: var_shift_v4i16:
129 ; SSE2-NEXT: psllw $12, %xmm1
130 ; SSE2-NEXT: movdqa %xmm1, %xmm2
131 ; SSE2-NEXT: psraw $15, %xmm2
132 ; SSE2-NEXT: movdqa %xmm2, %xmm3
133 ; SSE2-NEXT: pandn %xmm0, %xmm3
134 ; SSE2-NEXT: psrlw $8, %xmm0
135 ; SSE2-NEXT: pand %xmm2, %xmm0
136 ; SSE2-NEXT: por %xmm3, %xmm0
137 ; SSE2-NEXT: paddw %xmm1, %xmm1
138 ; SSE2-NEXT: movdqa %xmm1, %xmm2
139 ; SSE2-NEXT: psraw $15, %xmm2
140 ; SSE2-NEXT: movdqa %xmm2, %xmm3
141 ; SSE2-NEXT: pandn %xmm0, %xmm3
142 ; SSE2-NEXT: psrlw $4, %xmm0
143 ; SSE2-NEXT: pand %xmm2, %xmm0
144 ; SSE2-NEXT: por %xmm3, %xmm0
145 ; SSE2-NEXT: paddw %xmm1, %xmm1
146 ; SSE2-NEXT: movdqa %xmm1, %xmm2
147 ; SSE2-NEXT: psraw $15, %xmm2
148 ; SSE2-NEXT: movdqa %xmm2, %xmm3
149 ; SSE2-NEXT: pandn %xmm0, %xmm3
150 ; SSE2-NEXT: psrlw $2, %xmm0
151 ; SSE2-NEXT: pand %xmm2, %xmm0
152 ; SSE2-NEXT: por %xmm3, %xmm0
153 ; SSE2-NEXT: paddw %xmm1, %xmm1
154 ; SSE2-NEXT: psraw $15, %xmm1
155 ; SSE2-NEXT: movdqa %xmm1, %xmm2
156 ; SSE2-NEXT: pandn %xmm0, %xmm2
157 ; SSE2-NEXT: psrlw $1, %xmm0
158 ; SSE2-NEXT: pand %xmm1, %xmm0
159 ; SSE2-NEXT: por %xmm2, %xmm0
162 ; SSE41-LABEL: var_shift_v4i16:
164 ; SSE41-NEXT: movdqa %xmm1, %xmm2
165 ; SSE41-NEXT: movdqa %xmm0, %xmm1
166 ; SSE41-NEXT: movdqa %xmm2, %xmm0
167 ; SSE41-NEXT: psllw $12, %xmm0
168 ; SSE41-NEXT: psllw $4, %xmm2
169 ; SSE41-NEXT: por %xmm0, %xmm2
170 ; SSE41-NEXT: movdqa %xmm2, %xmm3
171 ; SSE41-NEXT: paddw %xmm2, %xmm3
172 ; SSE41-NEXT: movdqa %xmm1, %xmm4
173 ; SSE41-NEXT: psrlw $8, %xmm4
174 ; SSE41-NEXT: movdqa %xmm2, %xmm0
175 ; SSE41-NEXT: pblendvb %xmm0, %xmm4, %xmm1
176 ; SSE41-NEXT: movdqa %xmm1, %xmm2
177 ; SSE41-NEXT: psrlw $4, %xmm2
178 ; SSE41-NEXT: movdqa %xmm3, %xmm0
179 ; SSE41-NEXT: pblendvb %xmm0, %xmm2, %xmm1
180 ; SSE41-NEXT: movdqa %xmm1, %xmm2
181 ; SSE41-NEXT: psrlw $2, %xmm2
182 ; SSE41-NEXT: paddw %xmm3, %xmm3
183 ; SSE41-NEXT: movdqa %xmm3, %xmm0
184 ; SSE41-NEXT: pblendvb %xmm0, %xmm2, %xmm1
185 ; SSE41-NEXT: movdqa %xmm1, %xmm2
186 ; SSE41-NEXT: psrlw $1, %xmm2
187 ; SSE41-NEXT: paddw %xmm3, %xmm3
188 ; SSE41-NEXT: movdqa %xmm3, %xmm0
189 ; SSE41-NEXT: pblendvb %xmm0, %xmm2, %xmm1
190 ; SSE41-NEXT: movdqa %xmm1, %xmm0
193 ; AVX1-LABEL: var_shift_v4i16:
195 ; AVX1-NEXT: vpsllw $12, %xmm1, %xmm2
196 ; AVX1-NEXT: vpsllw $4, %xmm1, %xmm1
197 ; AVX1-NEXT: vpor %xmm2, %xmm1, %xmm1
198 ; AVX1-NEXT: vpaddw %xmm1, %xmm1, %xmm2
199 ; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm3
200 ; AVX1-NEXT: vpblendvb %xmm1, %xmm3, %xmm0, %xmm0
201 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm1
202 ; AVX1-NEXT: vpblendvb %xmm2, %xmm1, %xmm0, %xmm0
203 ; AVX1-NEXT: vpsrlw $2, %xmm0, %xmm1
204 ; AVX1-NEXT: vpaddw %xmm2, %xmm2, %xmm2
205 ; AVX1-NEXT: vpblendvb %xmm2, %xmm1, %xmm0, %xmm0
206 ; AVX1-NEXT: vpsrlw $1, %xmm0, %xmm1
207 ; AVX1-NEXT: vpaddw %xmm2, %xmm2, %xmm2
208 ; AVX1-NEXT: vpblendvb %xmm2, %xmm1, %xmm0, %xmm0
211 ; AVX2-LABEL: var_shift_v4i16:
213 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
214 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
215 ; AVX2-NEXT: vpsrlvd %ymm1, %ymm0, %ymm0
216 ; AVX2-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15,16,17,20,21,24,25,28,29,24,25,28,29,28,29,30,31]
217 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
218 ; AVX2-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
219 ; AVX2-NEXT: vzeroupper
222 ; XOP-LABEL: var_shift_v4i16:
224 ; XOP-NEXT: vpxor %xmm2, %xmm2, %xmm2
225 ; XOP-NEXT: vpsubw %xmm1, %xmm2, %xmm1
226 ; XOP-NEXT: vpshlw %xmm1, %xmm0, %xmm0
229 ; AVX512DQ-LABEL: var_shift_v4i16:
231 ; AVX512DQ-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
232 ; AVX512DQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
233 ; AVX512DQ-NEXT: vpsrlvd %ymm1, %ymm0, %ymm0
234 ; AVX512DQ-NEXT: vpmovdw %zmm0, %ymm0
235 ; AVX512DQ-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
236 ; AVX512DQ-NEXT: vzeroupper
237 ; AVX512DQ-NEXT: retq
239 ; AVX512BW-LABEL: var_shift_v4i16:
241 ; AVX512BW-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
242 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
243 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
244 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
245 ; AVX512BW-NEXT: vzeroupper
246 ; AVX512BW-NEXT: retq
248 ; AVX512DQVL-LABEL: var_shift_v4i16:
249 ; AVX512DQVL: # %bb.0:
250 ; AVX512DQVL-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
251 ; AVX512DQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
252 ; AVX512DQVL-NEXT: vpsrlvd %ymm1, %ymm0, %ymm0
253 ; AVX512DQVL-NEXT: vpmovdw %ymm0, %xmm0
254 ; AVX512DQVL-NEXT: vzeroupper
255 ; AVX512DQVL-NEXT: retq
257 ; AVX512BWVL-LABEL: var_shift_v4i16:
258 ; AVX512BWVL: # %bb.0:
259 ; AVX512BWVL-NEXT: vpsrlvw %xmm1, %xmm0, %xmm0
260 ; AVX512BWVL-NEXT: retq
262 ; X32-SSE-LABEL: var_shift_v4i16:
264 ; X32-SSE-NEXT: psllw $12, %xmm1
265 ; X32-SSE-NEXT: movdqa %xmm1, %xmm2
266 ; X32-SSE-NEXT: psraw $15, %xmm2
267 ; X32-SSE-NEXT: movdqa %xmm2, %xmm3
268 ; X32-SSE-NEXT: pandn %xmm0, %xmm3
269 ; X32-SSE-NEXT: psrlw $8, %xmm0
270 ; X32-SSE-NEXT: pand %xmm2, %xmm0
271 ; X32-SSE-NEXT: por %xmm3, %xmm0
272 ; X32-SSE-NEXT: paddw %xmm1, %xmm1
273 ; X32-SSE-NEXT: movdqa %xmm1, %xmm2
274 ; X32-SSE-NEXT: psraw $15, %xmm2
275 ; X32-SSE-NEXT: movdqa %xmm2, %xmm3
276 ; X32-SSE-NEXT: pandn %xmm0, %xmm3
277 ; X32-SSE-NEXT: psrlw $4, %xmm0
278 ; X32-SSE-NEXT: pand %xmm2, %xmm0
279 ; X32-SSE-NEXT: por %xmm3, %xmm0
280 ; X32-SSE-NEXT: paddw %xmm1, %xmm1
281 ; X32-SSE-NEXT: movdqa %xmm1, %xmm2
282 ; X32-SSE-NEXT: psraw $15, %xmm2
283 ; X32-SSE-NEXT: movdqa %xmm2, %xmm3
284 ; X32-SSE-NEXT: pandn %xmm0, %xmm3
285 ; X32-SSE-NEXT: psrlw $2, %xmm0
286 ; X32-SSE-NEXT: pand %xmm2, %xmm0
287 ; X32-SSE-NEXT: por %xmm3, %xmm0
288 ; X32-SSE-NEXT: paddw %xmm1, %xmm1
289 ; X32-SSE-NEXT: psraw $15, %xmm1
290 ; X32-SSE-NEXT: movdqa %xmm1, %xmm2
291 ; X32-SSE-NEXT: pandn %xmm0, %xmm2
292 ; X32-SSE-NEXT: psrlw $1, %xmm0
293 ; X32-SSE-NEXT: pand %xmm1, %xmm0
294 ; X32-SSE-NEXT: por %xmm2, %xmm0
296 %shift = lshr <4 x i16> %a, %b
300 define <2 x i16> @var_shift_v2i16(<2 x i16> %a, <2 x i16> %b) nounwind {
301 ; SSE2-LABEL: var_shift_v2i16:
303 ; SSE2-NEXT: psllw $12, %xmm1
304 ; SSE2-NEXT: movdqa %xmm1, %xmm2
305 ; SSE2-NEXT: psraw $15, %xmm2
306 ; SSE2-NEXT: movdqa %xmm2, %xmm3
307 ; SSE2-NEXT: pandn %xmm0, %xmm3
308 ; SSE2-NEXT: psrlw $8, %xmm0
309 ; SSE2-NEXT: pand %xmm2, %xmm0
310 ; SSE2-NEXT: por %xmm3, %xmm0
311 ; SSE2-NEXT: paddw %xmm1, %xmm1
312 ; SSE2-NEXT: movdqa %xmm1, %xmm2
313 ; SSE2-NEXT: psraw $15, %xmm2
314 ; SSE2-NEXT: movdqa %xmm2, %xmm3
315 ; SSE2-NEXT: pandn %xmm0, %xmm3
316 ; SSE2-NEXT: psrlw $4, %xmm0
317 ; SSE2-NEXT: pand %xmm2, %xmm0
318 ; SSE2-NEXT: por %xmm3, %xmm0
319 ; SSE2-NEXT: paddw %xmm1, %xmm1
320 ; SSE2-NEXT: movdqa %xmm1, %xmm2
321 ; SSE2-NEXT: psraw $15, %xmm2
322 ; SSE2-NEXT: movdqa %xmm2, %xmm3
323 ; SSE2-NEXT: pandn %xmm0, %xmm3
324 ; SSE2-NEXT: psrlw $2, %xmm0
325 ; SSE2-NEXT: pand %xmm2, %xmm0
326 ; SSE2-NEXT: por %xmm3, %xmm0
327 ; SSE2-NEXT: paddw %xmm1, %xmm1
328 ; SSE2-NEXT: psraw $15, %xmm1
329 ; SSE2-NEXT: movdqa %xmm1, %xmm2
330 ; SSE2-NEXT: pandn %xmm0, %xmm2
331 ; SSE2-NEXT: psrlw $1, %xmm0
332 ; SSE2-NEXT: pand %xmm1, %xmm0
333 ; SSE2-NEXT: por %xmm2, %xmm0
336 ; SSE41-LABEL: var_shift_v2i16:
338 ; SSE41-NEXT: movdqa %xmm1, %xmm2
339 ; SSE41-NEXT: movdqa %xmm0, %xmm1
340 ; SSE41-NEXT: movdqa %xmm2, %xmm0
341 ; SSE41-NEXT: psllw $12, %xmm0
342 ; SSE41-NEXT: psllw $4, %xmm2
343 ; SSE41-NEXT: por %xmm0, %xmm2
344 ; SSE41-NEXT: movdqa %xmm2, %xmm3
345 ; SSE41-NEXT: paddw %xmm2, %xmm3
346 ; SSE41-NEXT: movdqa %xmm1, %xmm4
347 ; SSE41-NEXT: psrlw $8, %xmm4
348 ; SSE41-NEXT: movdqa %xmm2, %xmm0
349 ; SSE41-NEXT: pblendvb %xmm0, %xmm4, %xmm1
350 ; SSE41-NEXT: movdqa %xmm1, %xmm2
351 ; SSE41-NEXT: psrlw $4, %xmm2
352 ; SSE41-NEXT: movdqa %xmm3, %xmm0
353 ; SSE41-NEXT: pblendvb %xmm0, %xmm2, %xmm1
354 ; SSE41-NEXT: movdqa %xmm1, %xmm2
355 ; SSE41-NEXT: psrlw $2, %xmm2
356 ; SSE41-NEXT: paddw %xmm3, %xmm3
357 ; SSE41-NEXT: movdqa %xmm3, %xmm0
358 ; SSE41-NEXT: pblendvb %xmm0, %xmm2, %xmm1
359 ; SSE41-NEXT: movdqa %xmm1, %xmm2
360 ; SSE41-NEXT: psrlw $1, %xmm2
361 ; SSE41-NEXT: paddw %xmm3, %xmm3
362 ; SSE41-NEXT: movdqa %xmm3, %xmm0
363 ; SSE41-NEXT: pblendvb %xmm0, %xmm2, %xmm1
364 ; SSE41-NEXT: movdqa %xmm1, %xmm0
367 ; AVX1-LABEL: var_shift_v2i16:
369 ; AVX1-NEXT: vpsllw $12, %xmm1, %xmm2
370 ; AVX1-NEXT: vpsllw $4, %xmm1, %xmm1
371 ; AVX1-NEXT: vpor %xmm2, %xmm1, %xmm1
372 ; AVX1-NEXT: vpaddw %xmm1, %xmm1, %xmm2
373 ; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm3
374 ; AVX1-NEXT: vpblendvb %xmm1, %xmm3, %xmm0, %xmm0
375 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm1
376 ; AVX1-NEXT: vpblendvb %xmm2, %xmm1, %xmm0, %xmm0
377 ; AVX1-NEXT: vpsrlw $2, %xmm0, %xmm1
378 ; AVX1-NEXT: vpaddw %xmm2, %xmm2, %xmm2
379 ; AVX1-NEXT: vpblendvb %xmm2, %xmm1, %xmm0, %xmm0
380 ; AVX1-NEXT: vpsrlw $1, %xmm0, %xmm1
381 ; AVX1-NEXT: vpaddw %xmm2, %xmm2, %xmm2
382 ; AVX1-NEXT: vpblendvb %xmm2, %xmm1, %xmm0, %xmm0
385 ; AVX2-LABEL: var_shift_v2i16:
387 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
388 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
389 ; AVX2-NEXT: vpsrlvd %ymm1, %ymm0, %ymm0
390 ; AVX2-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15,16,17,20,21,24,25,28,29,24,25,28,29,28,29,30,31]
391 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
392 ; AVX2-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
393 ; AVX2-NEXT: vzeroupper
396 ; XOP-LABEL: var_shift_v2i16:
398 ; XOP-NEXT: vpxor %xmm2, %xmm2, %xmm2
399 ; XOP-NEXT: vpsubw %xmm1, %xmm2, %xmm1
400 ; XOP-NEXT: vpshlw %xmm1, %xmm0, %xmm0
403 ; AVX512DQ-LABEL: var_shift_v2i16:
405 ; AVX512DQ-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
406 ; AVX512DQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
407 ; AVX512DQ-NEXT: vpsrlvd %ymm1, %ymm0, %ymm0
408 ; AVX512DQ-NEXT: vpmovdw %zmm0, %ymm0
409 ; AVX512DQ-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
410 ; AVX512DQ-NEXT: vzeroupper
411 ; AVX512DQ-NEXT: retq
413 ; AVX512BW-LABEL: var_shift_v2i16:
415 ; AVX512BW-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
416 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
417 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
418 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
419 ; AVX512BW-NEXT: vzeroupper
420 ; AVX512BW-NEXT: retq
422 ; AVX512DQVL-LABEL: var_shift_v2i16:
423 ; AVX512DQVL: # %bb.0:
424 ; AVX512DQVL-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
425 ; AVX512DQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
426 ; AVX512DQVL-NEXT: vpsrlvd %ymm1, %ymm0, %ymm0
427 ; AVX512DQVL-NEXT: vpmovdw %ymm0, %xmm0
428 ; AVX512DQVL-NEXT: vzeroupper
429 ; AVX512DQVL-NEXT: retq
431 ; AVX512BWVL-LABEL: var_shift_v2i16:
432 ; AVX512BWVL: # %bb.0:
433 ; AVX512BWVL-NEXT: vpsrlvw %xmm1, %xmm0, %xmm0
434 ; AVX512BWVL-NEXT: retq
436 ; X32-SSE-LABEL: var_shift_v2i16:
438 ; X32-SSE-NEXT: psllw $12, %xmm1
439 ; X32-SSE-NEXT: movdqa %xmm1, %xmm2
440 ; X32-SSE-NEXT: psraw $15, %xmm2
441 ; X32-SSE-NEXT: movdqa %xmm2, %xmm3
442 ; X32-SSE-NEXT: pandn %xmm0, %xmm3
443 ; X32-SSE-NEXT: psrlw $8, %xmm0
444 ; X32-SSE-NEXT: pand %xmm2, %xmm0
445 ; X32-SSE-NEXT: por %xmm3, %xmm0
446 ; X32-SSE-NEXT: paddw %xmm1, %xmm1
447 ; X32-SSE-NEXT: movdqa %xmm1, %xmm2
448 ; X32-SSE-NEXT: psraw $15, %xmm2
449 ; X32-SSE-NEXT: movdqa %xmm2, %xmm3
450 ; X32-SSE-NEXT: pandn %xmm0, %xmm3
451 ; X32-SSE-NEXT: psrlw $4, %xmm0
452 ; X32-SSE-NEXT: pand %xmm2, %xmm0
453 ; X32-SSE-NEXT: por %xmm3, %xmm0
454 ; X32-SSE-NEXT: paddw %xmm1, %xmm1
455 ; X32-SSE-NEXT: movdqa %xmm1, %xmm2
456 ; X32-SSE-NEXT: psraw $15, %xmm2
457 ; X32-SSE-NEXT: movdqa %xmm2, %xmm3
458 ; X32-SSE-NEXT: pandn %xmm0, %xmm3
459 ; X32-SSE-NEXT: psrlw $2, %xmm0
460 ; X32-SSE-NEXT: pand %xmm2, %xmm0
461 ; X32-SSE-NEXT: por %xmm3, %xmm0
462 ; X32-SSE-NEXT: paddw %xmm1, %xmm1
463 ; X32-SSE-NEXT: psraw $15, %xmm1
464 ; X32-SSE-NEXT: movdqa %xmm1, %xmm2
465 ; X32-SSE-NEXT: pandn %xmm0, %xmm2
466 ; X32-SSE-NEXT: psrlw $1, %xmm0
467 ; X32-SSE-NEXT: pand %xmm1, %xmm0
468 ; X32-SSE-NEXT: por %xmm2, %xmm0
470 %shift = lshr <2 x i16> %a, %b
474 define <8 x i8> @var_shift_v8i8(<8 x i8> %a, <8 x i8> %b) nounwind {
475 ; SSE2-LABEL: var_shift_v8i8:
477 ; SSE2-NEXT: psllw $5, %xmm1
478 ; SSE2-NEXT: pxor %xmm2, %xmm2
479 ; SSE2-NEXT: pxor %xmm3, %xmm3
480 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
481 ; SSE2-NEXT: movdqa %xmm3, %xmm4
482 ; SSE2-NEXT: pandn %xmm0, %xmm4
483 ; SSE2-NEXT: psrlw $4, %xmm0
484 ; SSE2-NEXT: pand %xmm3, %xmm0
485 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
486 ; SSE2-NEXT: por %xmm4, %xmm0
487 ; SSE2-NEXT: paddb %xmm1, %xmm1
488 ; SSE2-NEXT: pxor %xmm3, %xmm3
489 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
490 ; SSE2-NEXT: movdqa %xmm3, %xmm4
491 ; SSE2-NEXT: pandn %xmm0, %xmm4
492 ; SSE2-NEXT: psrlw $2, %xmm0
493 ; SSE2-NEXT: pand %xmm3, %xmm0
494 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
495 ; SSE2-NEXT: por %xmm4, %xmm0
496 ; SSE2-NEXT: paddb %xmm1, %xmm1
497 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm2
498 ; SSE2-NEXT: movdqa %xmm2, %xmm1
499 ; SSE2-NEXT: pandn %xmm0, %xmm1
500 ; SSE2-NEXT: psrlw $1, %xmm0
501 ; SSE2-NEXT: pand %xmm2, %xmm0
502 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
503 ; SSE2-NEXT: por %xmm1, %xmm0
506 ; SSE41-LABEL: var_shift_v8i8:
508 ; SSE41-NEXT: movdqa %xmm0, %xmm2
509 ; SSE41-NEXT: psllw $5, %xmm1
510 ; SSE41-NEXT: movdqa %xmm0, %xmm3
511 ; SSE41-NEXT: psrlw $4, %xmm3
512 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
513 ; SSE41-NEXT: movdqa %xmm1, %xmm0
514 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
515 ; SSE41-NEXT: movdqa %xmm2, %xmm3
516 ; SSE41-NEXT: psrlw $2, %xmm3
517 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
518 ; SSE41-NEXT: paddb %xmm1, %xmm1
519 ; SSE41-NEXT: movdqa %xmm1, %xmm0
520 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
521 ; SSE41-NEXT: movdqa %xmm2, %xmm3
522 ; SSE41-NEXT: psrlw $1, %xmm3
523 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
524 ; SSE41-NEXT: paddb %xmm1, %xmm1
525 ; SSE41-NEXT: movdqa %xmm1, %xmm0
526 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
527 ; SSE41-NEXT: movdqa %xmm2, %xmm0
530 ; AVX-LABEL: var_shift_v8i8:
532 ; AVX-NEXT: vpsllw $5, %xmm1, %xmm1
533 ; AVX-NEXT: vpsrlw $4, %xmm0, %xmm2
534 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
535 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
536 ; AVX-NEXT: vpsrlw $2, %xmm0, %xmm2
537 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
538 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
539 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
540 ; AVX-NEXT: vpsrlw $1, %xmm0, %xmm2
541 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
542 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
543 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
546 ; XOP-LABEL: var_shift_v8i8:
548 ; XOP-NEXT: vpxor %xmm2, %xmm2, %xmm2
549 ; XOP-NEXT: vpsubb %xmm1, %xmm2, %xmm1
550 ; XOP-NEXT: vpshlb %xmm1, %xmm0, %xmm0
553 ; AVX512DQ-LABEL: var_shift_v8i8:
555 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
556 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
557 ; AVX512DQ-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
558 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
559 ; AVX512DQ-NEXT: vzeroupper
560 ; AVX512DQ-NEXT: retq
562 ; AVX512BW-LABEL: var_shift_v8i8:
564 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
565 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
566 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
567 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
568 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
569 ; AVX512BW-NEXT: vzeroupper
570 ; AVX512BW-NEXT: retq
572 ; AVX512DQVL-LABEL: var_shift_v8i8:
573 ; AVX512DQVL: # %bb.0:
574 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
575 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
576 ; AVX512DQVL-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
577 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
578 ; AVX512DQVL-NEXT: vzeroupper
579 ; AVX512DQVL-NEXT: retq
581 ; AVX512BWVL-LABEL: var_shift_v8i8:
582 ; AVX512BWVL: # %bb.0:
583 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
584 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
585 ; AVX512BWVL-NEXT: vpsrlvw %ymm1, %ymm0, %ymm0
586 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
587 ; AVX512BWVL-NEXT: vzeroupper
588 ; AVX512BWVL-NEXT: retq
590 ; X32-SSE-LABEL: var_shift_v8i8:
592 ; X32-SSE-NEXT: psllw $5, %xmm1
593 ; X32-SSE-NEXT: pxor %xmm2, %xmm2
594 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
595 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
596 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
597 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
598 ; X32-SSE-NEXT: psrlw $4, %xmm0
599 ; X32-SSE-NEXT: pand %xmm3, %xmm0
600 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
601 ; X32-SSE-NEXT: por %xmm4, %xmm0
602 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
603 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
604 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
605 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
606 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
607 ; X32-SSE-NEXT: psrlw $2, %xmm0
608 ; X32-SSE-NEXT: pand %xmm3, %xmm0
609 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
610 ; X32-SSE-NEXT: por %xmm4, %xmm0
611 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
612 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm2
613 ; X32-SSE-NEXT: movdqa %xmm2, %xmm1
614 ; X32-SSE-NEXT: pandn %xmm0, %xmm1
615 ; X32-SSE-NEXT: psrlw $1, %xmm0
616 ; X32-SSE-NEXT: pand %xmm2, %xmm0
617 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
618 ; X32-SSE-NEXT: por %xmm1, %xmm0
620 %shift = lshr <8 x i8> %a, %b
624 define <4 x i8> @var_shift_v4i8(<4 x i8> %a, <4 x i8> %b) nounwind {
625 ; SSE2-LABEL: var_shift_v4i8:
627 ; SSE2-NEXT: psllw $5, %xmm1
628 ; SSE2-NEXT: pxor %xmm2, %xmm2
629 ; SSE2-NEXT: pxor %xmm3, %xmm3
630 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
631 ; SSE2-NEXT: movdqa %xmm3, %xmm4
632 ; SSE2-NEXT: pandn %xmm0, %xmm4
633 ; SSE2-NEXT: psrlw $4, %xmm0
634 ; SSE2-NEXT: pand %xmm3, %xmm0
635 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
636 ; SSE2-NEXT: por %xmm4, %xmm0
637 ; SSE2-NEXT: paddb %xmm1, %xmm1
638 ; SSE2-NEXT: pxor %xmm3, %xmm3
639 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
640 ; SSE2-NEXT: movdqa %xmm3, %xmm4
641 ; SSE2-NEXT: pandn %xmm0, %xmm4
642 ; SSE2-NEXT: psrlw $2, %xmm0
643 ; SSE2-NEXT: pand %xmm3, %xmm0
644 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
645 ; SSE2-NEXT: por %xmm4, %xmm0
646 ; SSE2-NEXT: paddb %xmm1, %xmm1
647 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm2
648 ; SSE2-NEXT: movdqa %xmm2, %xmm1
649 ; SSE2-NEXT: pandn %xmm0, %xmm1
650 ; SSE2-NEXT: psrlw $1, %xmm0
651 ; SSE2-NEXT: pand %xmm2, %xmm0
652 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
653 ; SSE2-NEXT: por %xmm1, %xmm0
656 ; SSE41-LABEL: var_shift_v4i8:
658 ; SSE41-NEXT: movdqa %xmm0, %xmm2
659 ; SSE41-NEXT: psllw $5, %xmm1
660 ; SSE41-NEXT: movdqa %xmm0, %xmm3
661 ; SSE41-NEXT: psrlw $4, %xmm3
662 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
663 ; SSE41-NEXT: movdqa %xmm1, %xmm0
664 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
665 ; SSE41-NEXT: movdqa %xmm2, %xmm3
666 ; SSE41-NEXT: psrlw $2, %xmm3
667 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
668 ; SSE41-NEXT: paddb %xmm1, %xmm1
669 ; SSE41-NEXT: movdqa %xmm1, %xmm0
670 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
671 ; SSE41-NEXT: movdqa %xmm2, %xmm3
672 ; SSE41-NEXT: psrlw $1, %xmm3
673 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
674 ; SSE41-NEXT: paddb %xmm1, %xmm1
675 ; SSE41-NEXT: movdqa %xmm1, %xmm0
676 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
677 ; SSE41-NEXT: movdqa %xmm2, %xmm0
680 ; AVX-LABEL: var_shift_v4i8:
682 ; AVX-NEXT: vpsllw $5, %xmm1, %xmm1
683 ; AVX-NEXT: vpsrlw $4, %xmm0, %xmm2
684 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
685 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
686 ; AVX-NEXT: vpsrlw $2, %xmm0, %xmm2
687 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
688 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
689 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
690 ; AVX-NEXT: vpsrlw $1, %xmm0, %xmm2
691 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
692 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
693 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
696 ; XOP-LABEL: var_shift_v4i8:
698 ; XOP-NEXT: vpxor %xmm2, %xmm2, %xmm2
699 ; XOP-NEXT: vpsubb %xmm1, %xmm2, %xmm1
700 ; XOP-NEXT: vpshlb %xmm1, %xmm0, %xmm0
703 ; AVX512DQ-LABEL: var_shift_v4i8:
705 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
706 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
707 ; AVX512DQ-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
708 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
709 ; AVX512DQ-NEXT: vzeroupper
710 ; AVX512DQ-NEXT: retq
712 ; AVX512BW-LABEL: var_shift_v4i8:
714 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
715 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
716 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
717 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
718 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
719 ; AVX512BW-NEXT: vzeroupper
720 ; AVX512BW-NEXT: retq
722 ; AVX512DQVL-LABEL: var_shift_v4i8:
723 ; AVX512DQVL: # %bb.0:
724 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
725 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
726 ; AVX512DQVL-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
727 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
728 ; AVX512DQVL-NEXT: vzeroupper
729 ; AVX512DQVL-NEXT: retq
731 ; AVX512BWVL-LABEL: var_shift_v4i8:
732 ; AVX512BWVL: # %bb.0:
733 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
734 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
735 ; AVX512BWVL-NEXT: vpsrlvw %ymm1, %ymm0, %ymm0
736 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
737 ; AVX512BWVL-NEXT: vzeroupper
738 ; AVX512BWVL-NEXT: retq
740 ; X32-SSE-LABEL: var_shift_v4i8:
742 ; X32-SSE-NEXT: psllw $5, %xmm1
743 ; X32-SSE-NEXT: pxor %xmm2, %xmm2
744 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
745 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
746 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
747 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
748 ; X32-SSE-NEXT: psrlw $4, %xmm0
749 ; X32-SSE-NEXT: pand %xmm3, %xmm0
750 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
751 ; X32-SSE-NEXT: por %xmm4, %xmm0
752 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
753 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
754 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
755 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
756 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
757 ; X32-SSE-NEXT: psrlw $2, %xmm0
758 ; X32-SSE-NEXT: pand %xmm3, %xmm0
759 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
760 ; X32-SSE-NEXT: por %xmm4, %xmm0
761 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
762 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm2
763 ; X32-SSE-NEXT: movdqa %xmm2, %xmm1
764 ; X32-SSE-NEXT: pandn %xmm0, %xmm1
765 ; X32-SSE-NEXT: psrlw $1, %xmm0
766 ; X32-SSE-NEXT: pand %xmm2, %xmm0
767 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
768 ; X32-SSE-NEXT: por %xmm1, %xmm0
770 %shift = lshr <4 x i8> %a, %b
774 define <2 x i8> @var_shift_v2i8(<2 x i8> %a, <2 x i8> %b) nounwind {
775 ; SSE2-LABEL: var_shift_v2i8:
777 ; SSE2-NEXT: psllw $5, %xmm1
778 ; SSE2-NEXT: pxor %xmm2, %xmm2
779 ; SSE2-NEXT: pxor %xmm3, %xmm3
780 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
781 ; SSE2-NEXT: movdqa %xmm3, %xmm4
782 ; SSE2-NEXT: pandn %xmm0, %xmm4
783 ; SSE2-NEXT: psrlw $4, %xmm0
784 ; SSE2-NEXT: pand %xmm3, %xmm0
785 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
786 ; SSE2-NEXT: por %xmm4, %xmm0
787 ; SSE2-NEXT: paddb %xmm1, %xmm1
788 ; SSE2-NEXT: pxor %xmm3, %xmm3
789 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm3
790 ; SSE2-NEXT: movdqa %xmm3, %xmm4
791 ; SSE2-NEXT: pandn %xmm0, %xmm4
792 ; SSE2-NEXT: psrlw $2, %xmm0
793 ; SSE2-NEXT: pand %xmm3, %xmm0
794 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
795 ; SSE2-NEXT: por %xmm4, %xmm0
796 ; SSE2-NEXT: paddb %xmm1, %xmm1
797 ; SSE2-NEXT: pcmpgtb %xmm1, %xmm2
798 ; SSE2-NEXT: movdqa %xmm2, %xmm1
799 ; SSE2-NEXT: pandn %xmm0, %xmm1
800 ; SSE2-NEXT: psrlw $1, %xmm0
801 ; SSE2-NEXT: pand %xmm2, %xmm0
802 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
803 ; SSE2-NEXT: por %xmm1, %xmm0
806 ; SSE41-LABEL: var_shift_v2i8:
808 ; SSE41-NEXT: movdqa %xmm0, %xmm2
809 ; SSE41-NEXT: psllw $5, %xmm1
810 ; SSE41-NEXT: movdqa %xmm0, %xmm3
811 ; SSE41-NEXT: psrlw $4, %xmm3
812 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
813 ; SSE41-NEXT: movdqa %xmm1, %xmm0
814 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
815 ; SSE41-NEXT: movdqa %xmm2, %xmm3
816 ; SSE41-NEXT: psrlw $2, %xmm3
817 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
818 ; SSE41-NEXT: paddb %xmm1, %xmm1
819 ; SSE41-NEXT: movdqa %xmm1, %xmm0
820 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
821 ; SSE41-NEXT: movdqa %xmm2, %xmm3
822 ; SSE41-NEXT: psrlw $1, %xmm3
823 ; SSE41-NEXT: pand {{.*}}(%rip), %xmm3
824 ; SSE41-NEXT: paddb %xmm1, %xmm1
825 ; SSE41-NEXT: movdqa %xmm1, %xmm0
826 ; SSE41-NEXT: pblendvb %xmm0, %xmm3, %xmm2
827 ; SSE41-NEXT: movdqa %xmm2, %xmm0
830 ; AVX-LABEL: var_shift_v2i8:
832 ; AVX-NEXT: vpsllw $5, %xmm1, %xmm1
833 ; AVX-NEXT: vpsrlw $4, %xmm0, %xmm2
834 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
835 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
836 ; AVX-NEXT: vpsrlw $2, %xmm0, %xmm2
837 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
838 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
839 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
840 ; AVX-NEXT: vpsrlw $1, %xmm0, %xmm2
841 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm2, %xmm2
842 ; AVX-NEXT: vpaddb %xmm1, %xmm1, %xmm1
843 ; AVX-NEXT: vpblendvb %xmm1, %xmm2, %xmm0, %xmm0
846 ; XOP-LABEL: var_shift_v2i8:
848 ; XOP-NEXT: vpxor %xmm2, %xmm2, %xmm2
849 ; XOP-NEXT: vpsubb %xmm1, %xmm2, %xmm1
850 ; XOP-NEXT: vpshlb %xmm1, %xmm0, %xmm0
853 ; AVX512DQ-LABEL: var_shift_v2i8:
855 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
856 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
857 ; AVX512DQ-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
858 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
859 ; AVX512DQ-NEXT: vzeroupper
860 ; AVX512DQ-NEXT: retq
862 ; AVX512BW-LABEL: var_shift_v2i8:
864 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
865 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
866 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
867 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
868 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
869 ; AVX512BW-NEXT: vzeroupper
870 ; AVX512BW-NEXT: retq
872 ; AVX512DQVL-LABEL: var_shift_v2i8:
873 ; AVX512DQVL: # %bb.0:
874 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
875 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
876 ; AVX512DQVL-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
877 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
878 ; AVX512DQVL-NEXT: vzeroupper
879 ; AVX512DQVL-NEXT: retq
881 ; AVX512BWVL-LABEL: var_shift_v2i8:
882 ; AVX512BWVL: # %bb.0:
883 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
884 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
885 ; AVX512BWVL-NEXT: vpsrlvw %ymm1, %ymm0, %ymm0
886 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
887 ; AVX512BWVL-NEXT: vzeroupper
888 ; AVX512BWVL-NEXT: retq
890 ; X32-SSE-LABEL: var_shift_v2i8:
892 ; X32-SSE-NEXT: psllw $5, %xmm1
893 ; X32-SSE-NEXT: pxor %xmm2, %xmm2
894 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
895 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
896 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
897 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
898 ; X32-SSE-NEXT: psrlw $4, %xmm0
899 ; X32-SSE-NEXT: pand %xmm3, %xmm0
900 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
901 ; X32-SSE-NEXT: por %xmm4, %xmm0
902 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
903 ; X32-SSE-NEXT: pxor %xmm3, %xmm3
904 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm3
905 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
906 ; X32-SSE-NEXT: pandn %xmm0, %xmm4
907 ; X32-SSE-NEXT: psrlw $2, %xmm0
908 ; X32-SSE-NEXT: pand %xmm3, %xmm0
909 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
910 ; X32-SSE-NEXT: por %xmm4, %xmm0
911 ; X32-SSE-NEXT: paddb %xmm1, %xmm1
912 ; X32-SSE-NEXT: pcmpgtb %xmm1, %xmm2
913 ; X32-SSE-NEXT: movdqa %xmm2, %xmm1
914 ; X32-SSE-NEXT: pandn %xmm0, %xmm1
915 ; X32-SSE-NEXT: psrlw $1, %xmm0
916 ; X32-SSE-NEXT: pand %xmm2, %xmm0
917 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
918 ; X32-SSE-NEXT: por %xmm1, %xmm0
920 %shift = lshr <2 x i8> %a, %b
925 ; Uniform Variable Shifts
928 define <2 x i32> @splatvar_shift_v2i32(<2 x i32> %a, <2 x i32> %b) nounwind {
929 ; SSE2-LABEL: splatvar_shift_v2i32:
931 ; SSE2-NEXT: xorps %xmm2, %xmm2
932 ; SSE2-NEXT: movss {{.*#+}} xmm2 = xmm1[0],xmm2[1,2,3]
933 ; SSE2-NEXT: psrld %xmm2, %xmm0
936 ; SSE41-LABEL: splatvar_shift_v2i32:
938 ; SSE41-NEXT: pmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
939 ; SSE41-NEXT: psrld %xmm1, %xmm0
942 ; AVX-LABEL: splatvar_shift_v2i32:
944 ; AVX-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
945 ; AVX-NEXT: vpsrld %xmm1, %xmm0, %xmm0
948 ; XOP-LABEL: splatvar_shift_v2i32:
950 ; XOP-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
951 ; XOP-NEXT: vpsrld %xmm1, %xmm0, %xmm0
954 ; AVX512-LABEL: splatvar_shift_v2i32:
956 ; AVX512-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
957 ; AVX512-NEXT: vpsrld %xmm1, %xmm0, %xmm0
960 ; AVX512VL-LABEL: splatvar_shift_v2i32:
962 ; AVX512VL-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
963 ; AVX512VL-NEXT: vpsrld %xmm1, %xmm0, %xmm0
964 ; AVX512VL-NEXT: retq
966 ; X32-SSE-LABEL: splatvar_shift_v2i32:
968 ; X32-SSE-NEXT: xorps %xmm2, %xmm2
969 ; X32-SSE-NEXT: movss {{.*#+}} xmm2 = xmm1[0],xmm2[1,2,3]
970 ; X32-SSE-NEXT: psrld %xmm2, %xmm0
972 %splat = shufflevector <2 x i32> %b, <2 x i32> undef, <2 x i32> zeroinitializer
973 %shift = lshr <2 x i32> %a, %splat
977 define <4 x i16> @splatvar_shift_v4i16(<4 x i16> %a, <4 x i16> %b) nounwind {
978 ; SSE2-LABEL: splatvar_shift_v4i16:
980 ; SSE2-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0,1]
981 ; SSE2-NEXT: psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
982 ; SSE2-NEXT: psrlw %xmm1, %xmm0
985 ; SSE41-LABEL: splatvar_shift_v4i16:
987 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
988 ; SSE41-NEXT: psrlw %xmm1, %xmm0
991 ; AVX-LABEL: splatvar_shift_v4i16:
993 ; AVX-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
994 ; AVX-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
997 ; XOP-LABEL: splatvar_shift_v4i16:
999 ; XOP-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1000 ; XOP-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1003 ; AVX512-LABEL: splatvar_shift_v4i16:
1005 ; AVX512-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1006 ; AVX512-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1009 ; AVX512VL-LABEL: splatvar_shift_v4i16:
1010 ; AVX512VL: # %bb.0:
1011 ; AVX512VL-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1012 ; AVX512VL-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1013 ; AVX512VL-NEXT: retq
1015 ; X32-SSE-LABEL: splatvar_shift_v4i16:
1017 ; X32-SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0,1]
1018 ; X32-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1019 ; X32-SSE-NEXT: psrlw %xmm1, %xmm0
1020 ; X32-SSE-NEXT: retl
1021 %splat = shufflevector <4 x i16> %b, <4 x i16> undef, <4 x i32> zeroinitializer
1022 %shift = lshr <4 x i16> %a, %splat
1023 ret <4 x i16> %shift
1026 define <2 x i16> @splatvar_shift_v2i16(<2 x i16> %a, <2 x i16> %b) nounwind {
1027 ; SSE2-LABEL: splatvar_shift_v2i16:
1029 ; SSE2-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0,1]
1030 ; SSE2-NEXT: psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1031 ; SSE2-NEXT: psrlw %xmm1, %xmm0
1034 ; SSE41-LABEL: splatvar_shift_v2i16:
1036 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1037 ; SSE41-NEXT: psrlw %xmm1, %xmm0
1040 ; AVX-LABEL: splatvar_shift_v2i16:
1042 ; AVX-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1043 ; AVX-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1046 ; XOP-LABEL: splatvar_shift_v2i16:
1048 ; XOP-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1049 ; XOP-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1052 ; AVX512-LABEL: splatvar_shift_v2i16:
1054 ; AVX512-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1055 ; AVX512-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1058 ; AVX512VL-LABEL: splatvar_shift_v2i16:
1059 ; AVX512VL: # %bb.0:
1060 ; AVX512VL-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1061 ; AVX512VL-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1062 ; AVX512VL-NEXT: retq
1064 ; X32-SSE-LABEL: splatvar_shift_v2i16:
1066 ; X32-SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0,1]
1067 ; X32-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1068 ; X32-SSE-NEXT: psrlw %xmm1, %xmm0
1069 ; X32-SSE-NEXT: retl
1070 %splat = shufflevector <2 x i16> %b, <2 x i16> undef, <2 x i32> zeroinitializer
1071 %shift = lshr <2 x i16> %a, %splat
1072 ret <2 x i16> %shift
1075 define <8 x i8> @splatvar_shift_v8i8(<8 x i8> %a, <8 x i8> %b) nounwind {
1076 ; SSE2-LABEL: splatvar_shift_v8i8:
1078 ; SSE2-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1079 ; SSE2-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1080 ; SSE2-NEXT: psrlw %xmm1, %xmm0
1081 ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
1082 ; SSE2-NEXT: psrlw %xmm1, %xmm2
1083 ; SSE2-NEXT: psrlw $8, %xmm2
1084 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1085 ; SSE2-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1086 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1087 ; SSE2-NEXT: pand %xmm1, %xmm0
1090 ; SSE41-LABEL: splatvar_shift_v8i8:
1092 ; SSE41-NEXT: pmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1093 ; SSE41-NEXT: psrlw %xmm1, %xmm0
1094 ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
1095 ; SSE41-NEXT: psrlw %xmm1, %xmm2
1096 ; SSE41-NEXT: pshufb {{.*#+}} xmm2 = xmm2[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
1097 ; SSE41-NEXT: pand %xmm2, %xmm0
1100 ; AVX1-LABEL: splatvar_shift_v8i8:
1102 ; AVX1-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1103 ; AVX1-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1104 ; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1105 ; AVX1-NEXT: vpsrlw %xmm1, %xmm2, %xmm1
1106 ; AVX1-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
1107 ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
1110 ; AVX2-LABEL: splatvar_shift_v8i8:
1112 ; AVX2-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1113 ; AVX2-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1114 ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1115 ; AVX2-NEXT: vpsrlw %xmm1, %xmm2, %xmm1
1116 ; AVX2-NEXT: vpsrlw $8, %xmm1, %xmm1
1117 ; AVX2-NEXT: vpbroadcastb %xmm1, %xmm1
1118 ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1121 ; XOPAVX1-LABEL: splatvar_shift_v8i8:
1123 ; XOPAVX1-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1124 ; XOPAVX1-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,0,0,0,4,5,6,7]
1125 ; XOPAVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
1126 ; XOPAVX1-NEXT: vpsubb %xmm1, %xmm2, %xmm1
1127 ; XOPAVX1-NEXT: vpshlb %xmm1, %xmm0, %xmm0
1128 ; XOPAVX1-NEXT: retq
1130 ; XOPAVX2-LABEL: splatvar_shift_v8i8:
1132 ; XOPAVX2-NEXT: vpbroadcastb %xmm1, %xmm1
1133 ; XOPAVX2-NEXT: vpxor %xmm2, %xmm2, %xmm2
1134 ; XOPAVX2-NEXT: vpsubb %xmm1, %xmm2, %xmm1
1135 ; XOPAVX2-NEXT: vpshlb %xmm1, %xmm0, %xmm0
1136 ; XOPAVX2-NEXT: retq
1138 ; AVX512DQ-LABEL: splatvar_shift_v8i8:
1139 ; AVX512DQ: # %bb.0:
1140 ; AVX512DQ-NEXT: vpbroadcastb %xmm1, %xmm1
1141 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1142 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1143 ; AVX512DQ-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
1144 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1145 ; AVX512DQ-NEXT: vzeroupper
1146 ; AVX512DQ-NEXT: retq
1148 ; AVX512BW-LABEL: splatvar_shift_v8i8:
1149 ; AVX512BW: # %bb.0:
1150 ; AVX512BW-NEXT: vpbroadcastb %xmm1, %xmm1
1151 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1152 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1153 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
1154 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1155 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1156 ; AVX512BW-NEXT: vzeroupper
1157 ; AVX512BW-NEXT: retq
1159 ; AVX512DQVL-LABEL: splatvar_shift_v8i8:
1160 ; AVX512DQVL: # %bb.0:
1161 ; AVX512DQVL-NEXT: vpbroadcastb %xmm1, %xmm1
1162 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1163 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1164 ; AVX512DQVL-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
1165 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1166 ; AVX512DQVL-NEXT: vzeroupper
1167 ; AVX512DQVL-NEXT: retq
1169 ; AVX512BWVL-LABEL: splatvar_shift_v8i8:
1170 ; AVX512BWVL: # %bb.0:
1171 ; AVX512BWVL-NEXT: vpbroadcastb %xmm1, %xmm1
1172 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1173 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1174 ; AVX512BWVL-NEXT: vpsrlvw %ymm1, %ymm0, %ymm0
1175 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1176 ; AVX512BWVL-NEXT: vzeroupper
1177 ; AVX512BWVL-NEXT: retq
1179 ; X32-SSE-LABEL: splatvar_shift_v8i8:
1181 ; X32-SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1182 ; X32-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1183 ; X32-SSE-NEXT: psrlw %xmm1, %xmm0
1184 ; X32-SSE-NEXT: pcmpeqd %xmm2, %xmm2
1185 ; X32-SSE-NEXT: psrlw %xmm1, %xmm2
1186 ; X32-SSE-NEXT: psrlw $8, %xmm2
1187 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1188 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1189 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1190 ; X32-SSE-NEXT: pand %xmm1, %xmm0
1191 ; X32-SSE-NEXT: retl
1192 %splat = shufflevector <8 x i8> %b, <8 x i8> undef, <8 x i32> zeroinitializer
1193 %shift = lshr <8 x i8> %a, %splat
1197 define <4 x i8> @splatvar_shift_v4i8(<4 x i8> %a, <4 x i8> %b) nounwind {
1198 ; SSE2-LABEL: splatvar_shift_v4i8:
1200 ; SSE2-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1201 ; SSE2-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1202 ; SSE2-NEXT: psrlw %xmm1, %xmm0
1203 ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
1204 ; SSE2-NEXT: psrlw %xmm1, %xmm2
1205 ; SSE2-NEXT: psrlw $8, %xmm2
1206 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1207 ; SSE2-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1208 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1209 ; SSE2-NEXT: pand %xmm1, %xmm0
1212 ; SSE41-LABEL: splatvar_shift_v4i8:
1214 ; SSE41-NEXT: pmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1215 ; SSE41-NEXT: psrlw %xmm1, %xmm0
1216 ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
1217 ; SSE41-NEXT: psrlw %xmm1, %xmm2
1218 ; SSE41-NEXT: pshufb {{.*#+}} xmm2 = xmm2[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
1219 ; SSE41-NEXT: pand %xmm2, %xmm0
1222 ; AVX1-LABEL: splatvar_shift_v4i8:
1224 ; AVX1-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1225 ; AVX1-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1226 ; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1227 ; AVX1-NEXT: vpsrlw %xmm1, %xmm2, %xmm1
1228 ; AVX1-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
1229 ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
1232 ; AVX2-LABEL: splatvar_shift_v4i8:
1234 ; AVX2-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1235 ; AVX2-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1236 ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1237 ; AVX2-NEXT: vpsrlw %xmm1, %xmm2, %xmm1
1238 ; AVX2-NEXT: vpsrlw $8, %xmm1, %xmm1
1239 ; AVX2-NEXT: vpbroadcastb %xmm1, %xmm1
1240 ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1243 ; XOPAVX1-LABEL: splatvar_shift_v4i8:
1245 ; XOPAVX1-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1246 ; XOPAVX1-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,0,2,3,4,5,6,7]
1247 ; XOPAVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
1248 ; XOPAVX1-NEXT: vpsubb %xmm1, %xmm2, %xmm1
1249 ; XOPAVX1-NEXT: vpshlb %xmm1, %xmm0, %xmm0
1250 ; XOPAVX1-NEXT: retq
1252 ; XOPAVX2-LABEL: splatvar_shift_v4i8:
1254 ; XOPAVX2-NEXT: vpbroadcastb %xmm1, %xmm1
1255 ; XOPAVX2-NEXT: vpxor %xmm2, %xmm2, %xmm2
1256 ; XOPAVX2-NEXT: vpsubb %xmm1, %xmm2, %xmm1
1257 ; XOPAVX2-NEXT: vpshlb %xmm1, %xmm0, %xmm0
1258 ; XOPAVX2-NEXT: retq
1260 ; AVX512DQ-LABEL: splatvar_shift_v4i8:
1261 ; AVX512DQ: # %bb.0:
1262 ; AVX512DQ-NEXT: vpbroadcastb %xmm1, %xmm1
1263 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1264 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1265 ; AVX512DQ-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
1266 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1267 ; AVX512DQ-NEXT: vzeroupper
1268 ; AVX512DQ-NEXT: retq
1270 ; AVX512BW-LABEL: splatvar_shift_v4i8:
1271 ; AVX512BW: # %bb.0:
1272 ; AVX512BW-NEXT: vpbroadcastb %xmm1, %xmm1
1273 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1274 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1275 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
1276 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1277 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1278 ; AVX512BW-NEXT: vzeroupper
1279 ; AVX512BW-NEXT: retq
1281 ; AVX512DQVL-LABEL: splatvar_shift_v4i8:
1282 ; AVX512DQVL: # %bb.0:
1283 ; AVX512DQVL-NEXT: vpbroadcastb %xmm1, %xmm1
1284 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1285 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1286 ; AVX512DQVL-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
1287 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1288 ; AVX512DQVL-NEXT: vzeroupper
1289 ; AVX512DQVL-NEXT: retq
1291 ; AVX512BWVL-LABEL: splatvar_shift_v4i8:
1292 ; AVX512BWVL: # %bb.0:
1293 ; AVX512BWVL-NEXT: vpbroadcastb %xmm1, %xmm1
1294 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1295 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1296 ; AVX512BWVL-NEXT: vpsrlvw %ymm1, %ymm0, %ymm0
1297 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1298 ; AVX512BWVL-NEXT: vzeroupper
1299 ; AVX512BWVL-NEXT: retq
1301 ; X32-SSE-LABEL: splatvar_shift_v4i8:
1303 ; X32-SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1304 ; X32-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1305 ; X32-SSE-NEXT: psrlw %xmm1, %xmm0
1306 ; X32-SSE-NEXT: pcmpeqd %xmm2, %xmm2
1307 ; X32-SSE-NEXT: psrlw %xmm1, %xmm2
1308 ; X32-SSE-NEXT: psrlw $8, %xmm2
1309 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1310 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1311 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1312 ; X32-SSE-NEXT: pand %xmm1, %xmm0
1313 ; X32-SSE-NEXT: retl
1314 %splat = shufflevector <4 x i8> %b, <4 x i8> undef, <4 x i32> zeroinitializer
1315 %shift = lshr <4 x i8> %a, %splat
1319 define <2 x i8> @splatvar_shift_v2i8(<2 x i8> %a, <2 x i8> %b) nounwind {
1320 ; SSE2-LABEL: splatvar_shift_v2i8:
1322 ; SSE2-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1323 ; SSE2-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1324 ; SSE2-NEXT: psrlw %xmm1, %xmm0
1325 ; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
1326 ; SSE2-NEXT: psrlw %xmm1, %xmm2
1327 ; SSE2-NEXT: psrlw $8, %xmm2
1328 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1329 ; SSE2-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1330 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1331 ; SSE2-NEXT: pand %xmm1, %xmm0
1334 ; SSE41-LABEL: splatvar_shift_v2i8:
1336 ; SSE41-NEXT: pmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1337 ; SSE41-NEXT: psrlw %xmm1, %xmm0
1338 ; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
1339 ; SSE41-NEXT: psrlw %xmm1, %xmm2
1340 ; SSE41-NEXT: pshufb {{.*#+}} xmm2 = xmm2[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
1341 ; SSE41-NEXT: pand %xmm2, %xmm0
1344 ; AVX1-LABEL: splatvar_shift_v2i8:
1346 ; AVX1-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1347 ; AVX1-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1348 ; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1349 ; AVX1-NEXT: vpsrlw %xmm1, %xmm2, %xmm1
1350 ; AVX1-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
1351 ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
1354 ; AVX2-LABEL: splatvar_shift_v2i8:
1356 ; AVX2-NEXT: vpmovzxbq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,zero,zero,zero,zero,xmm1[1],zero,zero,zero,zero,zero,zero,zero
1357 ; AVX2-NEXT: vpsrlw %xmm1, %xmm0, %xmm0
1358 ; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2
1359 ; AVX2-NEXT: vpsrlw %xmm1, %xmm2, %xmm1
1360 ; AVX2-NEXT: vpsrlw $8, %xmm1, %xmm1
1361 ; AVX2-NEXT: vpbroadcastb %xmm1, %xmm1
1362 ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
1365 ; XOP-LABEL: splatvar_shift_v2i8:
1367 ; XOP-NEXT: insertq {{.*#+}} xmm1 = xmm1[0,0,2,3,4,5,6,7,u,u,u,u,u,u,u,u]
1368 ; XOP-NEXT: vpxor %xmm2, %xmm2, %xmm2
1369 ; XOP-NEXT: vpsubb %xmm1, %xmm2, %xmm1
1370 ; XOP-NEXT: vpshlb %xmm1, %xmm0, %xmm0
1373 ; AVX512DQ-LABEL: splatvar_shift_v2i8:
1374 ; AVX512DQ: # %bb.0:
1375 ; AVX512DQ-NEXT: vpbroadcastb %xmm1, %xmm1
1376 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1377 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1378 ; AVX512DQ-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
1379 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1380 ; AVX512DQ-NEXT: vzeroupper
1381 ; AVX512DQ-NEXT: retq
1383 ; AVX512BW-LABEL: splatvar_shift_v2i8:
1384 ; AVX512BW: # %bb.0:
1385 ; AVX512BW-NEXT: vpbroadcastb %xmm1, %xmm1
1386 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1387 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1388 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
1389 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1390 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1391 ; AVX512BW-NEXT: vzeroupper
1392 ; AVX512BW-NEXT: retq
1394 ; AVX512DQVL-LABEL: splatvar_shift_v2i8:
1395 ; AVX512DQVL: # %bb.0:
1396 ; AVX512DQVL-NEXT: vpbroadcastb %xmm1, %xmm1
1397 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1398 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
1399 ; AVX512DQVL-NEXT: vpsrlvd %zmm1, %zmm0, %zmm0
1400 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1401 ; AVX512DQVL-NEXT: vzeroupper
1402 ; AVX512DQVL-NEXT: retq
1404 ; AVX512BWVL-LABEL: splatvar_shift_v2i8:
1405 ; AVX512BWVL: # %bb.0:
1406 ; AVX512BWVL-NEXT: vpbroadcastb %xmm1, %xmm1
1407 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1408 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero,xmm1[8],zero,xmm1[9],zero,xmm1[10],zero,xmm1[11],zero,xmm1[12],zero,xmm1[13],zero,xmm1[14],zero,xmm1[15],zero
1409 ; AVX512BWVL-NEXT: vpsrlvw %ymm1, %ymm0, %ymm0
1410 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1411 ; AVX512BWVL-NEXT: vzeroupper
1412 ; AVX512BWVL-NEXT: retq
1414 ; X32-SSE-LABEL: splatvar_shift_v2i8:
1416 ; X32-SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0]
1417 ; X32-SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1418 ; X32-SSE-NEXT: psrlw %xmm1, %xmm0
1419 ; X32-SSE-NEXT: pcmpeqd %xmm2, %xmm2
1420 ; X32-SSE-NEXT: psrlw %xmm1, %xmm2
1421 ; X32-SSE-NEXT: psrlw $8, %xmm2
1422 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1423 ; X32-SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm2[0,0,2,3,4,5,6,7]
1424 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1425 ; X32-SSE-NEXT: pand %xmm1, %xmm0
1426 ; X32-SSE-NEXT: retl
1427 %splat = shufflevector <2 x i8> %b, <2 x i8> undef, <2 x i32> zeroinitializer
1428 %shift = lshr <2 x i8> %a, %splat
1436 define <2 x i32> @constant_shift_v2i32(<2 x i32> %a) nounwind {
1437 ; SSE2-LABEL: constant_shift_v2i32:
1439 ; SSE2-NEXT: movdqa %xmm0, %xmm1
1440 ; SSE2-NEXT: psrld $4, %xmm1
1441 ; SSE2-NEXT: psrld $5, %xmm0
1442 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
1443 ; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1444 ; SSE2-NEXT: movdqa %xmm1, %xmm0
1447 ; SSE41-LABEL: constant_shift_v2i32:
1449 ; SSE41-NEXT: movdqa %xmm0, %xmm1
1450 ; SSE41-NEXT: psrld $5, %xmm1
1451 ; SSE41-NEXT: psrld $4, %xmm0
1452 ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6,7]
1455 ; AVX1-LABEL: constant_shift_v2i32:
1457 ; AVX1-NEXT: vpsrld $5, %xmm0, %xmm1
1458 ; AVX1-NEXT: vpsrld $4, %xmm0, %xmm0
1459 ; AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6,7]
1462 ; AVX2-LABEL: constant_shift_v2i32:
1464 ; AVX2-NEXT: vpsrlvd {{.*}}(%rip), %xmm0, %xmm0
1467 ; XOPAVX1-LABEL: constant_shift_v2i32:
1469 ; XOPAVX1-NEXT: vpshld {{.*}}(%rip), %xmm0, %xmm0
1470 ; XOPAVX1-NEXT: retq
1472 ; XOPAVX2-LABEL: constant_shift_v2i32:
1474 ; XOPAVX2-NEXT: vpsrlvd {{.*}}(%rip), %xmm0, %xmm0
1475 ; XOPAVX2-NEXT: retq
1477 ; AVX512-LABEL: constant_shift_v2i32:
1479 ; AVX512-NEXT: vpsrlvd {{.*}}(%rip), %xmm0, %xmm0
1482 ; AVX512VL-LABEL: constant_shift_v2i32:
1483 ; AVX512VL: # %bb.0:
1484 ; AVX512VL-NEXT: vpsrlvd {{.*}}(%rip), %xmm0, %xmm0
1485 ; AVX512VL-NEXT: retq
1487 ; X32-SSE-LABEL: constant_shift_v2i32:
1489 ; X32-SSE-NEXT: movdqa %xmm0, %xmm1
1490 ; X32-SSE-NEXT: psrld $4, %xmm1
1491 ; X32-SSE-NEXT: psrld $5, %xmm0
1492 ; X32-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
1493 ; X32-SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1494 ; X32-SSE-NEXT: movdqa %xmm1, %xmm0
1495 ; X32-SSE-NEXT: retl
1496 %shift = lshr <2 x i32> %a, <i32 4, i32 5>
1497 ret <2 x i32> %shift
1500 define <4 x i16> @constant_shift_v4i16(<4 x i16> %a) nounwind {
1501 ; SSE2-LABEL: constant_shift_v4i16:
1503 ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [0,65535,65535,65535,65535,65535,65535,65535]
1504 ; SSE2-NEXT: movdqa %xmm1, %xmm2
1505 ; SSE2-NEXT: pandn %xmm0, %xmm2
1506 ; SSE2-NEXT: pmulhuw {{.*}}(%rip), %xmm0
1507 ; SSE2-NEXT: pand %xmm1, %xmm0
1508 ; SSE2-NEXT: por %xmm2, %xmm0
1511 ; SSE41-LABEL: constant_shift_v4i16:
1513 ; SSE41-NEXT: movdqa {{.*#+}} xmm1 = <u,32768,16384,8192,u,u,u,u>
1514 ; SSE41-NEXT: pmulhuw %xmm0, %xmm1
1515 ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
1518 ; AVX-LABEL: constant_shift_v4i16:
1520 ; AVX-NEXT: vpmulhuw {{.*}}(%rip), %xmm0, %xmm1
1521 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
1524 ; XOP-LABEL: constant_shift_v4i16:
1526 ; XOP-NEXT: vpshlw {{.*}}(%rip), %xmm0, %xmm0
1529 ; AVX512DQ-LABEL: constant_shift_v4i16:
1530 ; AVX512DQ: # %bb.0:
1531 ; AVX512DQ-NEXT: vpmulhuw {{.*}}(%rip), %xmm0, %xmm1
1532 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
1533 ; AVX512DQ-NEXT: retq
1535 ; AVX512BW-LABEL: constant_shift_v4i16:
1536 ; AVX512BW: # %bb.0:
1537 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
1538 ; AVX512BW-NEXT: vmovdqa {{.*#+}} xmm1 = <0,1,2,3,u,u,u,u>
1539 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
1540 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
1541 ; AVX512BW-NEXT: vzeroupper
1542 ; AVX512BW-NEXT: retq
1544 ; AVX512DQVL-LABEL: constant_shift_v4i16:
1545 ; AVX512DQVL: # %bb.0:
1546 ; AVX512DQVL-NEXT: vpmulhuw {{.*}}(%rip), %xmm0, %xmm1
1547 ; AVX512DQVL-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
1548 ; AVX512DQVL-NEXT: retq
1550 ; AVX512BWVL-LABEL: constant_shift_v4i16:
1551 ; AVX512BWVL: # %bb.0:
1552 ; AVX512BWVL-NEXT: vpsrlvw {{.*}}(%rip), %xmm0, %xmm0
1553 ; AVX512BWVL-NEXT: retq
1555 ; X32-SSE-LABEL: constant_shift_v4i16:
1557 ; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [0,65535,65535,65535,65535,65535,65535,65535]
1558 ; X32-SSE-NEXT: movdqa %xmm1, %xmm2
1559 ; X32-SSE-NEXT: pandn %xmm0, %xmm2
1560 ; X32-SSE-NEXT: pmulhuw {{\.LCPI.*}}, %xmm0
1561 ; X32-SSE-NEXT: pand %xmm1, %xmm0
1562 ; X32-SSE-NEXT: por %xmm2, %xmm0
1563 ; X32-SSE-NEXT: retl
1564 %shift = lshr <4 x i16> %a, <i16 0, i16 1, i16 2, i16 3>
1565 ret <4 x i16> %shift
1568 define <2 x i16> @constant_shift_v2i16(<2 x i16> %a) nounwind {
1569 ; SSE2-LABEL: constant_shift_v2i16:
1571 ; SSE2-NEXT: movdqa %xmm0, %xmm1
1572 ; SSE2-NEXT: psrlw $3, %xmm1
1573 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [65535,0,65535,65535,65535,65535,65535,65535]
1574 ; SSE2-NEXT: psrlw $2, %xmm0
1575 ; SSE2-NEXT: pand %xmm2, %xmm0
1576 ; SSE2-NEXT: pandn %xmm1, %xmm2
1577 ; SSE2-NEXT: por %xmm2, %xmm0
1580 ; SSE41-LABEL: constant_shift_v2i16:
1582 ; SSE41-NEXT: movdqa %xmm0, %xmm1
1583 ; SSE41-NEXT: psrlw $3, %xmm1
1584 ; SSE41-NEXT: psrlw $2, %xmm0
1585 ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
1588 ; AVX-LABEL: constant_shift_v2i16:
1590 ; AVX-NEXT: vpsrlw $3, %xmm0, %xmm1
1591 ; AVX-NEXT: vpsrlw $2, %xmm0, %xmm0
1592 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
1595 ; XOP-LABEL: constant_shift_v2i16:
1597 ; XOP-NEXT: vpshlw {{.*}}(%rip), %xmm0, %xmm0
1600 ; AVX512DQ-LABEL: constant_shift_v2i16:
1601 ; AVX512DQ: # %bb.0:
1602 ; AVX512DQ-NEXT: vpsrlw $3, %xmm0, %xmm1
1603 ; AVX512DQ-NEXT: vpsrlw $2, %xmm0, %xmm0
1604 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
1605 ; AVX512DQ-NEXT: retq
1607 ; AVX512BW-LABEL: constant_shift_v2i16:
1608 ; AVX512BW: # %bb.0:
1609 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
1610 ; AVX512BW-NEXT: vmovdqa {{.*#+}} xmm1 = <2,3,u,u,u,u,u,u>
1611 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
1612 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
1613 ; AVX512BW-NEXT: vzeroupper
1614 ; AVX512BW-NEXT: retq
1616 ; AVX512DQVL-LABEL: constant_shift_v2i16:
1617 ; AVX512DQVL: # %bb.0:
1618 ; AVX512DQVL-NEXT: vpsrlw $3, %xmm0, %xmm1
1619 ; AVX512DQVL-NEXT: vpsrlw $2, %xmm0, %xmm0
1620 ; AVX512DQVL-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
1621 ; AVX512DQVL-NEXT: retq
1623 ; AVX512BWVL-LABEL: constant_shift_v2i16:
1624 ; AVX512BWVL: # %bb.0:
1625 ; AVX512BWVL-NEXT: vpsrlvw {{.*}}(%rip), %xmm0, %xmm0
1626 ; AVX512BWVL-NEXT: retq
1628 ; X32-SSE-LABEL: constant_shift_v2i16:
1630 ; X32-SSE-NEXT: movdqa %xmm0, %xmm1
1631 ; X32-SSE-NEXT: psrlw $3, %xmm1
1632 ; X32-SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,0,65535,65535,65535,65535,65535,65535]
1633 ; X32-SSE-NEXT: psrlw $2, %xmm0
1634 ; X32-SSE-NEXT: pand %xmm2, %xmm0
1635 ; X32-SSE-NEXT: pandn %xmm1, %xmm2
1636 ; X32-SSE-NEXT: por %xmm2, %xmm0
1637 ; X32-SSE-NEXT: retl
1638 %shift = lshr <2 x i16> %a, <i16 2, i16 3>
1639 ret <2 x i16> %shift
1642 define <8 x i8> @constant_shift_v8i8(<8 x i8> %a) nounwind {
1643 ; SSE2-LABEL: constant_shift_v8i8:
1645 ; SSE2-NEXT: pxor %xmm1, %xmm1
1646 ; SSE2-NEXT: movdqa %xmm0, %xmm2
1647 ; SSE2-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
1648 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1649 ; SSE2-NEXT: pmullw {{.*}}(%rip), %xmm0
1650 ; SSE2-NEXT: psrlw $8, %xmm0
1651 ; SSE2-NEXT: packuswb %xmm2, %xmm0
1654 ; SSE41-LABEL: constant_shift_v8i8:
1656 ; SSE41-NEXT: pxor %xmm2, %xmm2
1657 ; SSE41-NEXT: pmovzxbw {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1658 ; SSE41-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm2[8],xmm0[9],xmm2[9],xmm0[10],xmm2[10],xmm0[11],xmm2[11],xmm0[12],xmm2[12],xmm0[13],xmm2[13],xmm0[14],xmm2[14],xmm0[15],xmm2[15]
1659 ; SSE41-NEXT: pmullw {{.*}}(%rip), %xmm1
1660 ; SSE41-NEXT: psrlw $8, %xmm1
1661 ; SSE41-NEXT: packuswb %xmm0, %xmm1
1662 ; SSE41-NEXT: movdqa %xmm1, %xmm0
1665 ; AVX1-LABEL: constant_shift_v8i8:
1667 ; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
1668 ; AVX1-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
1669 ; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1670 ; AVX1-NEXT: vpmullw {{.*}}(%rip), %xmm0, %xmm0
1671 ; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
1672 ; AVX1-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1675 ; AVX2-LABEL: constant_shift_v8i8:
1677 ; AVX2-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1678 ; AVX2-NEXT: vpmullw {{.*}}(%rip), %ymm0, %ymm0
1679 ; AVX2-NEXT: vpsrlw $8, %ymm0, %ymm0
1680 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1681 ; AVX2-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1682 ; AVX2-NEXT: vzeroupper
1685 ; XOP-LABEL: constant_shift_v8i8:
1687 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
1690 ; AVX512DQ-LABEL: constant_shift_v8i8:
1691 ; AVX512DQ: # %bb.0:
1692 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1693 ; AVX512DQ-NEXT: vpsrlvd {{.*}}(%rip), %zmm0, %zmm0
1694 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1695 ; AVX512DQ-NEXT: vzeroupper
1696 ; AVX512DQ-NEXT: retq
1698 ; AVX512BW-LABEL: constant_shift_v8i8:
1699 ; AVX512BW: # %bb.0:
1700 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,2,3,4,5,6,7,0,0,0,0,0,0,0,0]
1701 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1702 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
1703 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1704 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1705 ; AVX512BW-NEXT: vzeroupper
1706 ; AVX512BW-NEXT: retq
1708 ; AVX512DQVL-LABEL: constant_shift_v8i8:
1709 ; AVX512DQVL: # %bb.0:
1710 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1711 ; AVX512DQVL-NEXT: vpsrlvd {{.*}}(%rip), %zmm0, %zmm0
1712 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1713 ; AVX512DQVL-NEXT: vzeroupper
1714 ; AVX512DQVL-NEXT: retq
1716 ; AVX512BWVL-LABEL: constant_shift_v8i8:
1717 ; AVX512BWVL: # %bb.0:
1718 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1719 ; AVX512BWVL-NEXT: vpsrlvw {{.*}}(%rip), %ymm0, %ymm0
1720 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1721 ; AVX512BWVL-NEXT: vzeroupper
1722 ; AVX512BWVL-NEXT: retq
1724 ; X32-SSE-LABEL: constant_shift_v8i8:
1726 ; X32-SSE-NEXT: pxor %xmm1, %xmm1
1727 ; X32-SSE-NEXT: movdqa %xmm0, %xmm2
1728 ; X32-SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
1729 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1730 ; X32-SSE-NEXT: pmullw {{\.LCPI.*}}, %xmm0
1731 ; X32-SSE-NEXT: psrlw $8, %xmm0
1732 ; X32-SSE-NEXT: packuswb %xmm2, %xmm0
1733 ; X32-SSE-NEXT: retl
1734 %shift = lshr <8 x i8> %a, <i8 0, i8 1, i8 2, i8 3, i8 4, i8 5, i8 6, i8 7>
1738 define <4 x i8> @constant_shift_v4i8(<4 x i8> %a) nounwind {
1739 ; SSE2-LABEL: constant_shift_v4i8:
1741 ; SSE2-NEXT: pxor %xmm1, %xmm1
1742 ; SSE2-NEXT: movdqa %xmm0, %xmm2
1743 ; SSE2-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
1744 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1745 ; SSE2-NEXT: pmullw {{.*}}(%rip), %xmm0
1746 ; SSE2-NEXT: psrlw $8, %xmm0
1747 ; SSE2-NEXT: packuswb %xmm2, %xmm0
1750 ; SSE41-LABEL: constant_shift_v4i8:
1752 ; SSE41-NEXT: pxor %xmm2, %xmm2
1753 ; SSE41-NEXT: pmovzxbw {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1754 ; SSE41-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm2[8],xmm0[9],xmm2[9],xmm0[10],xmm2[10],xmm0[11],xmm2[11],xmm0[12],xmm2[12],xmm0[13],xmm2[13],xmm0[14],xmm2[14],xmm0[15],xmm2[15]
1755 ; SSE41-NEXT: pmullw {{.*}}(%rip), %xmm1
1756 ; SSE41-NEXT: psrlw $8, %xmm1
1757 ; SSE41-NEXT: packuswb %xmm0, %xmm1
1758 ; SSE41-NEXT: movdqa %xmm1, %xmm0
1761 ; AVX1-LABEL: constant_shift_v4i8:
1763 ; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
1764 ; AVX1-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
1765 ; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1766 ; AVX1-NEXT: vpmullw {{.*}}(%rip), %xmm0, %xmm0
1767 ; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
1768 ; AVX1-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1771 ; AVX2-LABEL: constant_shift_v4i8:
1773 ; AVX2-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1774 ; AVX2-NEXT: vpmullw {{.*}}(%rip), %ymm0, %ymm0
1775 ; AVX2-NEXT: vpsrlw $8, %ymm0, %ymm0
1776 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1777 ; AVX2-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1778 ; AVX2-NEXT: vzeroupper
1781 ; XOP-LABEL: constant_shift_v4i8:
1783 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
1786 ; AVX512DQ-LABEL: constant_shift_v4i8:
1787 ; AVX512DQ: # %bb.0:
1788 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1789 ; AVX512DQ-NEXT: vpsrlvd {{.*}}(%rip), %zmm0, %zmm0
1790 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1791 ; AVX512DQ-NEXT: vzeroupper
1792 ; AVX512DQ-NEXT: retq
1794 ; AVX512BW-LABEL: constant_shift_v4i8:
1795 ; AVX512BW: # %bb.0:
1796 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,2,3,0,0,0,0,0,0,0,0,0,0,0,0]
1797 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1798 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
1799 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1800 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1801 ; AVX512BW-NEXT: vzeroupper
1802 ; AVX512BW-NEXT: retq
1804 ; AVX512DQVL-LABEL: constant_shift_v4i8:
1805 ; AVX512DQVL: # %bb.0:
1806 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1807 ; AVX512DQVL-NEXT: vpsrlvd {{.*}}(%rip), %zmm0, %zmm0
1808 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1809 ; AVX512DQVL-NEXT: vzeroupper
1810 ; AVX512DQVL-NEXT: retq
1812 ; AVX512BWVL-LABEL: constant_shift_v4i8:
1813 ; AVX512BWVL: # %bb.0:
1814 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1815 ; AVX512BWVL-NEXT: vpsrlvw {{.*}}(%rip), %ymm0, %ymm0
1816 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1817 ; AVX512BWVL-NEXT: vzeroupper
1818 ; AVX512BWVL-NEXT: retq
1820 ; X32-SSE-LABEL: constant_shift_v4i8:
1822 ; X32-SSE-NEXT: pxor %xmm1, %xmm1
1823 ; X32-SSE-NEXT: movdqa %xmm0, %xmm2
1824 ; X32-SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
1825 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1826 ; X32-SSE-NEXT: pmullw {{\.LCPI.*}}, %xmm0
1827 ; X32-SSE-NEXT: psrlw $8, %xmm0
1828 ; X32-SSE-NEXT: packuswb %xmm2, %xmm0
1829 ; X32-SSE-NEXT: retl
1830 %shift = lshr <4 x i8> %a, <i8 0, i8 1, i8 2, i8 3>
1834 define <2 x i8> @constant_shift_v2i8(<2 x i8> %a) nounwind {
1835 ; SSE2-LABEL: constant_shift_v2i8:
1837 ; SSE2-NEXT: pxor %xmm1, %xmm1
1838 ; SSE2-NEXT: movdqa %xmm0, %xmm2
1839 ; SSE2-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
1840 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1841 ; SSE2-NEXT: pmullw {{.*}}(%rip), %xmm0
1842 ; SSE2-NEXT: psrlw $8, %xmm0
1843 ; SSE2-NEXT: packuswb %xmm2, %xmm0
1846 ; SSE41-LABEL: constant_shift_v2i8:
1848 ; SSE41-NEXT: pxor %xmm2, %xmm2
1849 ; SSE41-NEXT: pmovzxbw {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1850 ; SSE41-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm2[8],xmm0[9],xmm2[9],xmm0[10],xmm2[10],xmm0[11],xmm2[11],xmm0[12],xmm2[12],xmm0[13],xmm2[13],xmm0[14],xmm2[14],xmm0[15],xmm2[15]
1851 ; SSE41-NEXT: pmullw {{.*}}(%rip), %xmm1
1852 ; SSE41-NEXT: psrlw $8, %xmm1
1853 ; SSE41-NEXT: packuswb %xmm0, %xmm1
1854 ; SSE41-NEXT: movdqa %xmm1, %xmm0
1857 ; AVX1-LABEL: constant_shift_v2i8:
1859 ; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
1860 ; AVX1-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
1861 ; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1862 ; AVX1-NEXT: vpmullw {{.*}}(%rip), %xmm0, %xmm0
1863 ; AVX1-NEXT: vpsrlw $8, %xmm0, %xmm0
1864 ; AVX1-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1867 ; AVX2-LABEL: constant_shift_v2i8:
1869 ; AVX2-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1870 ; AVX2-NEXT: vpmullw {{.*}}(%rip), %ymm0, %ymm0
1871 ; AVX2-NEXT: vpsrlw $8, %ymm0, %ymm0
1872 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1873 ; AVX2-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
1874 ; AVX2-NEXT: vzeroupper
1877 ; XOP-LABEL: constant_shift_v2i8:
1879 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
1882 ; AVX512DQ-LABEL: constant_shift_v2i8:
1883 ; AVX512DQ: # %bb.0:
1884 ; AVX512DQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1885 ; AVX512DQ-NEXT: vpsrlvd {{.*}}(%rip), %zmm0, %zmm0
1886 ; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
1887 ; AVX512DQ-NEXT: vzeroupper
1888 ; AVX512DQ-NEXT: retq
1890 ; AVX512BW-LABEL: constant_shift_v2i8:
1891 ; AVX512BW: # %bb.0:
1892 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = [2,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
1893 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1894 ; AVX512BW-NEXT: vpsrlvw %zmm1, %zmm0, %zmm0
1895 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
1896 ; AVX512BW-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
1897 ; AVX512BW-NEXT: vzeroupper
1898 ; AVX512BW-NEXT: retq
1900 ; AVX512DQVL-LABEL: constant_shift_v2i8:
1901 ; AVX512DQVL: # %bb.0:
1902 ; AVX512DQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
1903 ; AVX512DQVL-NEXT: vpsrlvd {{.*}}(%rip), %zmm0, %zmm0
1904 ; AVX512DQVL-NEXT: vpmovdb %zmm0, %xmm0
1905 ; AVX512DQVL-NEXT: vzeroupper
1906 ; AVX512DQVL-NEXT: retq
1908 ; AVX512BWVL-LABEL: constant_shift_v2i8:
1909 ; AVX512BWVL: # %bb.0:
1910 ; AVX512BWVL-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
1911 ; AVX512BWVL-NEXT: vpsrlvw {{.*}}(%rip), %ymm0, %ymm0
1912 ; AVX512BWVL-NEXT: vpmovwb %ymm0, %xmm0
1913 ; AVX512BWVL-NEXT: vzeroupper
1914 ; AVX512BWVL-NEXT: retq
1916 ; X32-SSE-LABEL: constant_shift_v2i8:
1918 ; X32-SSE-NEXT: pxor %xmm1, %xmm1
1919 ; X32-SSE-NEXT: movdqa %xmm0, %xmm2
1920 ; X32-SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
1921 ; X32-SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1922 ; X32-SSE-NEXT: pmullw {{\.LCPI.*}}, %xmm0
1923 ; X32-SSE-NEXT: psrlw $8, %xmm0
1924 ; X32-SSE-NEXT: packuswb %xmm2, %xmm0
1925 ; X32-SSE-NEXT: retl
1926 %shift = lshr <2 x i8> %a, <i8 2, i8 3>
1931 ; Uniform Constant Shifts
1934 define <2 x i32> @splatconstant_shift_v2i32(<2 x i32> %a) nounwind {
1935 ; SSE-LABEL: splatconstant_shift_v2i32:
1937 ; SSE-NEXT: psrld $5, %xmm0
1940 ; AVX-LABEL: splatconstant_shift_v2i32:
1942 ; AVX-NEXT: vpsrld $5, %xmm0, %xmm0
1945 ; XOP-LABEL: splatconstant_shift_v2i32:
1947 ; XOP-NEXT: vpsrld $5, %xmm0, %xmm0
1950 ; AVX512-LABEL: splatconstant_shift_v2i32:
1952 ; AVX512-NEXT: vpsrld $5, %xmm0, %xmm0
1955 ; AVX512VL-LABEL: splatconstant_shift_v2i32:
1956 ; AVX512VL: # %bb.0:
1957 ; AVX512VL-NEXT: vpsrld $5, %xmm0, %xmm0
1958 ; AVX512VL-NEXT: retq
1960 ; X32-SSE-LABEL: splatconstant_shift_v2i32:
1962 ; X32-SSE-NEXT: psrld $5, %xmm0
1963 ; X32-SSE-NEXT: retl
1964 %shift = lshr <2 x i32> %a, <i32 5, i32 5>
1965 ret <2 x i32> %shift
1968 define <4 x i16> @splatconstant_shift_v4i16(<4 x i16> %a) nounwind {
1969 ; SSE-LABEL: splatconstant_shift_v4i16:
1971 ; SSE-NEXT: psrlw $3, %xmm0
1974 ; AVX-LABEL: splatconstant_shift_v4i16:
1976 ; AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
1979 ; XOP-LABEL: splatconstant_shift_v4i16:
1981 ; XOP-NEXT: vpsrlw $3, %xmm0, %xmm0
1984 ; AVX512-LABEL: splatconstant_shift_v4i16:
1986 ; AVX512-NEXT: vpsrlw $3, %xmm0, %xmm0
1989 ; AVX512VL-LABEL: splatconstant_shift_v4i16:
1990 ; AVX512VL: # %bb.0:
1991 ; AVX512VL-NEXT: vpsrlw $3, %xmm0, %xmm0
1992 ; AVX512VL-NEXT: retq
1994 ; X32-SSE-LABEL: splatconstant_shift_v4i16:
1996 ; X32-SSE-NEXT: psrlw $3, %xmm0
1997 ; X32-SSE-NEXT: retl
1998 %shift = lshr <4 x i16> %a, <i16 3, i16 3, i16 3, i16 3>
1999 ret <4 x i16> %shift
2002 define <2 x i16> @splatconstant_shift_v2i16(<2 x i16> %a) nounwind {
2003 ; SSE-LABEL: splatconstant_shift_v2i16:
2005 ; SSE-NEXT: psrlw $3, %xmm0
2008 ; AVX-LABEL: splatconstant_shift_v2i16:
2010 ; AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
2013 ; XOP-LABEL: splatconstant_shift_v2i16:
2015 ; XOP-NEXT: vpsrlw $3, %xmm0, %xmm0
2018 ; AVX512-LABEL: splatconstant_shift_v2i16:
2020 ; AVX512-NEXT: vpsrlw $3, %xmm0, %xmm0
2023 ; AVX512VL-LABEL: splatconstant_shift_v2i16:
2024 ; AVX512VL: # %bb.0:
2025 ; AVX512VL-NEXT: vpsrlw $3, %xmm0, %xmm0
2026 ; AVX512VL-NEXT: retq
2028 ; X32-SSE-LABEL: splatconstant_shift_v2i16:
2030 ; X32-SSE-NEXT: psrlw $3, %xmm0
2031 ; X32-SSE-NEXT: retl
2032 %shift = lshr <2 x i16> %a, <i16 3, i16 3>
2033 ret <2 x i16> %shift
2036 define <8 x i8> @splatconstant_shift_v8i8(<8 x i8> %a) nounwind {
2037 ; SSE-LABEL: splatconstant_shift_v8i8:
2039 ; SSE-NEXT: psrlw $3, %xmm0
2040 ; SSE-NEXT: pand {{.*}}(%rip), %xmm0
2043 ; AVX-LABEL: splatconstant_shift_v8i8:
2045 ; AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
2046 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
2049 ; XOP-LABEL: splatconstant_shift_v8i8:
2051 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
2054 ; AVX512-LABEL: splatconstant_shift_v8i8:
2056 ; AVX512-NEXT: vpsrlw $3, %xmm0, %xmm0
2057 ; AVX512-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
2060 ; AVX512VL-LABEL: splatconstant_shift_v8i8:
2061 ; AVX512VL: # %bb.0:
2062 ; AVX512VL-NEXT: vpsrlw $3, %xmm0, %xmm0
2063 ; AVX512VL-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
2064 ; AVX512VL-NEXT: retq
2066 ; X32-SSE-LABEL: splatconstant_shift_v8i8:
2068 ; X32-SSE-NEXT: psrlw $3, %xmm0
2069 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
2070 ; X32-SSE-NEXT: retl
2071 %shift = lshr <8 x i8> %a, <i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3, i8 3>
2075 define <4 x i8> @splatconstant_shift_v4i8(<4 x i8> %a) nounwind {
2076 ; SSE-LABEL: splatconstant_shift_v4i8:
2078 ; SSE-NEXT: psrlw $3, %xmm0
2079 ; SSE-NEXT: pand {{.*}}(%rip), %xmm0
2082 ; AVX-LABEL: splatconstant_shift_v4i8:
2084 ; AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
2085 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
2088 ; XOP-LABEL: splatconstant_shift_v4i8:
2090 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
2093 ; AVX512-LABEL: splatconstant_shift_v4i8:
2095 ; AVX512-NEXT: vpsrlw $3, %xmm0, %xmm0
2096 ; AVX512-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
2099 ; AVX512VL-LABEL: splatconstant_shift_v4i8:
2100 ; AVX512VL: # %bb.0:
2101 ; AVX512VL-NEXT: vpsrlw $3, %xmm0, %xmm0
2102 ; AVX512VL-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
2103 ; AVX512VL-NEXT: retq
2105 ; X32-SSE-LABEL: splatconstant_shift_v4i8:
2107 ; X32-SSE-NEXT: psrlw $3, %xmm0
2108 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
2109 ; X32-SSE-NEXT: retl
2110 %shift = lshr <4 x i8> %a, <i8 3, i8 3, i8 3, i8 3>
2114 define <2 x i8> @splatconstant_shift_v2i8(<2 x i8> %a) nounwind {
2115 ; SSE-LABEL: splatconstant_shift_v2i8:
2117 ; SSE-NEXT: psrlw $3, %xmm0
2118 ; SSE-NEXT: pand {{.*}}(%rip), %xmm0
2121 ; AVX-LABEL: splatconstant_shift_v2i8:
2123 ; AVX-NEXT: vpsrlw $3, %xmm0, %xmm0
2124 ; AVX-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
2127 ; XOP-LABEL: splatconstant_shift_v2i8:
2129 ; XOP-NEXT: vpshlb {{.*}}(%rip), %xmm0, %xmm0
2132 ; AVX512-LABEL: splatconstant_shift_v2i8:
2134 ; AVX512-NEXT: vpsrlw $3, %xmm0, %xmm0
2135 ; AVX512-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
2138 ; AVX512VL-LABEL: splatconstant_shift_v2i8:
2139 ; AVX512VL: # %bb.0:
2140 ; AVX512VL-NEXT: vpsrlw $3, %xmm0, %xmm0
2141 ; AVX512VL-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
2142 ; AVX512VL-NEXT: retq
2144 ; X32-SSE-LABEL: splatconstant_shift_v2i8:
2146 ; X32-SSE-NEXT: psrlw $3, %xmm0
2147 ; X32-SSE-NEXT: pand {{\.LCPI.*}}, %xmm0
2148 ; X32-SSE-NEXT: retl
2149 %shift = lshr <2 x i8> %a, <i8 3, i8 3>