1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefixes=SSE,SSE2
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse4.1 | FileCheck %s --check-prefixes=SSE,SSE41
4 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX2
5 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefixes=AVX,AVX512,AVX512F
6 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s --check-prefixes=AVX,AVX512,AVX512BW
8 define <4 x i16> @zext_mulhuw_v4i16(<4 x i16> %a, <4 x i16> %b) {
9 ; SSE-LABEL: zext_mulhuw_v4i16:
11 ; SSE-NEXT: pmulhuw %xmm1, %xmm0
14 ; AVX-LABEL: zext_mulhuw_v4i16:
16 ; AVX-NEXT: vpmulhuw %xmm1, %xmm0, %xmm0
18 %a1 = zext <4 x i16> %a to <4 x i32>
19 %b1 = zext <4 x i16> %b to <4 x i32>
20 %c = mul <4 x i32> %a1, %b1
21 %d = lshr <4 x i32> %c, <i32 16, i32 16, i32 16, i32 16>
22 %e = trunc <4 x i32> %d to <4 x i16>
26 define <4 x i16> @and_mulhuw_v4i16(<4 x i64> %a, <4 x i64> %b) {
27 ; SSE2-LABEL: and_mulhuw_v4i16:
29 ; SSE2-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,2],xmm3[0,2]
30 ; SSE2-NEXT: pslld $16, %xmm2
31 ; SSE2-NEXT: psrad $16, %xmm2
32 ; SSE2-NEXT: xorps %xmm3, %xmm3
33 ; SSE2-NEXT: packssdw %xmm3, %xmm2
34 ; SSE2-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
35 ; SSE2-NEXT: pslld $16, %xmm0
36 ; SSE2-NEXT: psrad $16, %xmm0
37 ; SSE2-NEXT: packssdw %xmm3, %xmm0
38 ; SSE2-NEXT: pmulhuw %xmm2, %xmm0
41 ; SSE41-LABEL: and_mulhuw_v4i16:
43 ; SSE41-NEXT: pxor %xmm4, %xmm4
44 ; SSE41-NEXT: pblendw {{.*#+}} xmm3 = xmm3[0],xmm4[1,2,3],xmm3[4],xmm4[5,6,7]
45 ; SSE41-NEXT: pblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1,2,3],xmm2[4],xmm4[5,6,7]
46 ; SSE41-NEXT: packusdw %xmm3, %xmm2
47 ; SSE41-NEXT: packusdw %xmm4, %xmm2
48 ; SSE41-NEXT: pblendw {{.*#+}} xmm1 = xmm1[0],xmm4[1,2,3],xmm1[4],xmm4[5,6,7]
49 ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm0[0],xmm4[1,2,3],xmm0[4],xmm4[5,6,7]
50 ; SSE41-NEXT: packusdw %xmm1, %xmm0
51 ; SSE41-NEXT: packusdw %xmm4, %xmm0
52 ; SSE41-NEXT: pmulhuw %xmm2, %xmm0
55 ; AVX2-LABEL: and_mulhuw_v4i16:
57 ; AVX2-NEXT: vpmulhuw %ymm1, %ymm0, %ymm0
58 ; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
59 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm1[1,2,3],ymm0[4],ymm1[5,6,7],ymm0[8],ymm1[9,10,11],ymm0[12],ymm1[13,14,15]
60 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
61 ; AVX2-NEXT: vpackusdw %xmm1, %xmm0, %xmm0
62 ; AVX2-NEXT: vpackusdw %xmm0, %xmm0, %xmm0
63 ; AVX2-NEXT: vzeroupper
66 ; AVX512-LABEL: and_mulhuw_v4i16:
68 ; AVX512-NEXT: vpmulhuw %ymm1, %ymm0, %ymm0
69 ; AVX512-NEXT: vpmovqw %zmm0, %xmm0
70 ; AVX512-NEXT: vzeroupper
72 %a1 = and <4 x i64> %a, <i64 65535, i64 65535, i64 65535, i64 65535>
73 %b1 = and <4 x i64> %b, <i64 65535, i64 65535, i64 65535, i64 65535>
74 %c = mul <4 x i64> %a1, %b1
75 %d = lshr <4 x i64> %c, <i64 16, i64 16, i64 16, i64 16>
76 %e = trunc <4 x i64> %d to <4 x i16>
80 define <4 x i16> @sext_mulhw_v4i16(<4 x i16> %a, <4 x i16> %b) {
81 ; SSE-LABEL: sext_mulhw_v4i16:
83 ; SSE-NEXT: pmulhw %xmm1, %xmm0
86 ; AVX-LABEL: sext_mulhw_v4i16:
88 ; AVX-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
90 %a1 = sext <4 x i16> %a to <4 x i32>
91 %b1 = sext <4 x i16> %b to <4 x i32>
92 %c = mul <4 x i32> %a1, %b1
93 %d = lshr <4 x i32> %c, <i32 16, i32 16, i32 16, i32 16>
94 %e = trunc <4 x i32> %d to <4 x i16>
98 define <4 x i16> @ashr_mulhw_v4i16(<4 x i32> %a, <4 x i32> %b) {
99 ; SSE2-LABEL: ashr_mulhw_v4i16:
101 ; SSE2-NEXT: psrad $16, %xmm1
102 ; SSE2-NEXT: packssdw %xmm1, %xmm1
103 ; SSE2-NEXT: psrad $16, %xmm0
104 ; SSE2-NEXT: packssdw %xmm0, %xmm0
105 ; SSE2-NEXT: pmulhw %xmm1, %xmm0
108 ; SSE41-LABEL: ashr_mulhw_v4i16:
110 ; SSE41-NEXT: psrld $16, %xmm0
111 ; SSE41-NEXT: psrld $16, %xmm1
112 ; SSE41-NEXT: packusdw %xmm1, %xmm1
113 ; SSE41-NEXT: packusdw %xmm0, %xmm0
114 ; SSE41-NEXT: pmulhw %xmm1, %xmm0
117 ; AVX-LABEL: ashr_mulhw_v4i16:
119 ; AVX-NEXT: vpsrld $16, %xmm0, %xmm0
120 ; AVX-NEXT: vpsrld $16, %xmm1, %xmm1
121 ; AVX-NEXT: vpackusdw %xmm1, %xmm1, %xmm1
122 ; AVX-NEXT: vpackusdw %xmm0, %xmm0, %xmm0
123 ; AVX-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
125 %a1 = ashr <4 x i32> %a, <i32 16, i32 16, i32 16, i32 16>
126 %b1 = ashr <4 x i32> %b, <i32 16, i32 16, i32 16, i32 16>
127 %c = mul <4 x i32> %a1, %b1
128 %d = lshr <4 x i32> %c, <i32 16, i32 16, i32 16, i32 16>
129 %e = trunc <4 x i32> %d to <4 x i16>
133 define <8 x i16> @zext_mulhuw_v8i16(<8 x i16> %a, <8 x i16> %b) {
134 ; SSE-LABEL: zext_mulhuw_v8i16:
136 ; SSE-NEXT: pmulhuw %xmm1, %xmm0
139 ; AVX-LABEL: zext_mulhuw_v8i16:
141 ; AVX-NEXT: vpmulhuw %xmm1, %xmm0, %xmm0
143 %a1 = zext <8 x i16> %a to <8 x i32>
144 %b1 = zext <8 x i16> %b to <8 x i32>
145 %c = mul <8 x i32> %a1, %b1
146 %d = lshr <8 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
147 %e = trunc <8 x i32> %d to <8 x i16>
151 define <8 x i16> @lshr_mulhuw_v8i16(<8 x i32> %a, <8 x i32> %b) {
152 ; SSE2-LABEL: lshr_mulhuw_v8i16:
154 ; SSE2-NEXT: psrad $16, %xmm3
155 ; SSE2-NEXT: psrad $16, %xmm2
156 ; SSE2-NEXT: packssdw %xmm3, %xmm2
157 ; SSE2-NEXT: psrad $16, %xmm1
158 ; SSE2-NEXT: psrad $16, %xmm0
159 ; SSE2-NEXT: packssdw %xmm1, %xmm0
160 ; SSE2-NEXT: pmulhuw %xmm2, %xmm0
163 ; SSE41-LABEL: lshr_mulhuw_v8i16:
165 ; SSE41-NEXT: psrld $16, %xmm1
166 ; SSE41-NEXT: psrld $16, %xmm0
167 ; SSE41-NEXT: packusdw %xmm1, %xmm0
168 ; SSE41-NEXT: psrld $16, %xmm3
169 ; SSE41-NEXT: psrld $16, %xmm2
170 ; SSE41-NEXT: packusdw %xmm3, %xmm2
171 ; SSE41-NEXT: pmulhuw %xmm2, %xmm0
174 ; AVX2-LABEL: lshr_mulhuw_v8i16:
176 ; AVX2-NEXT: vpsrld $16, %ymm0, %ymm0
177 ; AVX2-NEXT: vpsrld $16, %ymm1, %ymm1
178 ; AVX2-NEXT: vpmulhuw %ymm1, %ymm0, %ymm0
179 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
180 ; AVX2-NEXT: vpackusdw %xmm1, %xmm0, %xmm0
181 ; AVX2-NEXT: vzeroupper
184 ; AVX512-LABEL: lshr_mulhuw_v8i16:
186 ; AVX512-NEXT: vpsrld $16, %ymm0, %ymm0
187 ; AVX512-NEXT: vpsrld $16, %ymm1, %ymm1
188 ; AVX512-NEXT: vpmulhuw %ymm1, %ymm0, %ymm0
189 ; AVX512-NEXT: vpmovdw %zmm0, %ymm0
190 ; AVX512-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
191 ; AVX512-NEXT: vzeroupper
193 %a1 = lshr <8 x i32> %a, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
194 %b1 = lshr <8 x i32> %b, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
195 %c = mul <8 x i32> %a1, %b1
196 %d = lshr <8 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
197 %e = trunc <8 x i32> %d to <8 x i16>
201 define <8 x i16> @sext_mulhw_v8i16(<8 x i16> %a, <8 x i16> %b) {
202 ; SSE-LABEL: sext_mulhw_v8i16:
204 ; SSE-NEXT: pmulhw %xmm1, %xmm0
207 ; AVX-LABEL: sext_mulhw_v8i16:
209 ; AVX-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
211 %a1 = sext <8 x i16> %a to <8 x i32>
212 %b1 = sext <8 x i16> %b to <8 x i32>
213 %c = mul <8 x i32> %a1, %b1
214 %d = lshr <8 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
215 %e = trunc <8 x i32> %d to <8 x i16>
219 define <8 x i16> @sextinreg_mulhw_v8i16(<8 x i32> %a, <8 x i32> %b) {
220 ; SSE-LABEL: sextinreg_mulhw_v8i16:
222 ; SSE-NEXT: pslld $24, %xmm1
223 ; SSE-NEXT: psrad $24, %xmm1
224 ; SSE-NEXT: pslld $24, %xmm0
225 ; SSE-NEXT: psrad $24, %xmm0
226 ; SSE-NEXT: packssdw %xmm1, %xmm0
227 ; SSE-NEXT: pslld $25, %xmm3
228 ; SSE-NEXT: psrad $25, %xmm3
229 ; SSE-NEXT: pslld $25, %xmm2
230 ; SSE-NEXT: psrad $25, %xmm2
231 ; SSE-NEXT: packssdw %xmm3, %xmm2
232 ; SSE-NEXT: pmulhw %xmm2, %xmm0
235 ; AVX2-LABEL: sextinreg_mulhw_v8i16:
237 ; AVX2-NEXT: vpslld $24, %ymm0, %ymm0
238 ; AVX2-NEXT: vpsrad $24, %ymm0, %ymm0
239 ; AVX2-NEXT: vpslld $25, %ymm1, %ymm1
240 ; AVX2-NEXT: vpsrad $25, %ymm1, %ymm1
241 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm2
242 ; AVX2-NEXT: vpackssdw %xmm2, %xmm1, %xmm1
243 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm2
244 ; AVX2-NEXT: vpackssdw %xmm2, %xmm0, %xmm0
245 ; AVX2-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
246 ; AVX2-NEXT: vzeroupper
249 ; AVX512-LABEL: sextinreg_mulhw_v8i16:
251 ; AVX512-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
252 ; AVX512-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
253 ; AVX512-NEXT: vpmovdw %zmm1, %ymm1
254 ; AVX512-NEXT: vpsllw $9, %xmm1, %xmm1
255 ; AVX512-NEXT: vpsraw $9, %xmm1, %xmm1
256 ; AVX512-NEXT: vpmovdw %zmm0, %ymm0
257 ; AVX512-NEXT: vpsllw $8, %xmm0, %xmm0
258 ; AVX512-NEXT: vpsraw $8, %xmm0, %xmm0
259 ; AVX512-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
260 ; AVX512-NEXT: vzeroupper
262 %a1 = shl <8 x i32> %a, <i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24>
263 %b1 = shl <8 x i32> %b, <i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25>
264 %a2 = ashr <8 x i32> %a1, <i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24>
265 %b2 = ashr <8 x i32> %b1, <i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25>
266 %c = mul <8 x i32> %a2, %b2
267 %d = lshr <8 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
268 %e = trunc <8 x i32> %d to <8 x i16>
272 define <8 x i16> @zext_mulhuw_v8i16_v8i33(<8 x i16> %a, <8 x i16> %b) {
273 ; SSE-LABEL: zext_mulhuw_v8i16_v8i33:
275 ; SSE-NEXT: pmulhuw %xmm1, %xmm0
278 ; AVX-LABEL: zext_mulhuw_v8i16_v8i33:
280 ; AVX-NEXT: vpmulhuw %xmm1, %xmm0, %xmm0
282 %a1 = zext <8 x i16> %a to <8 x i33>
283 %b1 = zext <8 x i16> %b to <8 x i33>
284 %c = mul <8 x i33> %a1, %b1
285 %d = lshr <8 x i33> %c, <i33 16, i33 16, i33 16, i33 16, i33 16, i33 16, i33 16, i33 16>
286 %e = trunc <8 x i33> %d to <8 x i16>
290 define <16 x i16> @zext_mulhuw_v16i16(<16 x i16> %a, <16 x i16> %b) {
291 ; SSE-LABEL: zext_mulhuw_v16i16:
293 ; SSE-NEXT: pmulhuw %xmm2, %xmm0
294 ; SSE-NEXT: pmulhuw %xmm3, %xmm1
297 ; AVX-LABEL: zext_mulhuw_v16i16:
299 ; AVX-NEXT: vpmulhuw %ymm1, %ymm0, %ymm0
301 %a1 = zext <16 x i16> %a to <16 x i32>
302 %b1 = zext <16 x i16> %b to <16 x i32>
303 %c = mul <16 x i32> %a1, %b1
304 %d = lshr <16 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
305 %e = trunc <16 x i32> %d to <16 x i16>
309 define <16 x i16> @and_mulhuw_v16i16(<16 x i32> %a, <16 x i32> %b) {
310 ; SSE2-LABEL: and_mulhuw_v16i16:
312 ; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [32767,32767,32767,32767]
313 ; SSE2-NEXT: pand %xmm8, %xmm3
314 ; SSE2-NEXT: pand %xmm8, %xmm2
315 ; SSE2-NEXT: packssdw %xmm3, %xmm2
316 ; SSE2-NEXT: pand %xmm8, %xmm1
317 ; SSE2-NEXT: pand %xmm8, %xmm0
318 ; SSE2-NEXT: packssdw %xmm1, %xmm0
319 ; SSE2-NEXT: pand %xmm8, %xmm7
320 ; SSE2-NEXT: pand %xmm8, %xmm6
321 ; SSE2-NEXT: packssdw %xmm7, %xmm6
322 ; SSE2-NEXT: pmulhw %xmm2, %xmm6
323 ; SSE2-NEXT: pand %xmm8, %xmm5
324 ; SSE2-NEXT: pand %xmm4, %xmm8
325 ; SSE2-NEXT: packssdw %xmm5, %xmm8
326 ; SSE2-NEXT: pmulhw %xmm8, %xmm0
327 ; SSE2-NEXT: movdqa %xmm6, %xmm1
330 ; SSE41-LABEL: and_mulhuw_v16i16:
332 ; SSE41-NEXT: pmovsxwd {{.*#+}} xmm8 = [32767,32767,32767,32767]
333 ; SSE41-NEXT: pand %xmm8, %xmm3
334 ; SSE41-NEXT: pand %xmm8, %xmm2
335 ; SSE41-NEXT: packusdw %xmm3, %xmm2
336 ; SSE41-NEXT: pand %xmm8, %xmm1
337 ; SSE41-NEXT: pand %xmm8, %xmm0
338 ; SSE41-NEXT: packusdw %xmm1, %xmm0
339 ; SSE41-NEXT: pand %xmm8, %xmm7
340 ; SSE41-NEXT: pand %xmm8, %xmm6
341 ; SSE41-NEXT: packusdw %xmm7, %xmm6
342 ; SSE41-NEXT: pmulhw %xmm2, %xmm6
343 ; SSE41-NEXT: pand %xmm8, %xmm5
344 ; SSE41-NEXT: pand %xmm4, %xmm8
345 ; SSE41-NEXT: packusdw %xmm5, %xmm8
346 ; SSE41-NEXT: pmulhw %xmm8, %xmm0
347 ; SSE41-NEXT: movdqa %xmm6, %xmm1
350 ; AVX2-LABEL: and_mulhuw_v16i16:
352 ; AVX2-NEXT: vpbroadcastd {{.*#+}} ymm4 = [32767,32767,32767,32767,32767,32767,32767,32767]
353 ; AVX2-NEXT: vpand %ymm4, %ymm0, %ymm0
354 ; AVX2-NEXT: vpand %ymm4, %ymm1, %ymm1
355 ; AVX2-NEXT: vpand %ymm4, %ymm2, %ymm2
356 ; AVX2-NEXT: vpmulhuw %ymm2, %ymm0, %ymm0
357 ; AVX2-NEXT: vpand %ymm4, %ymm3, %ymm2
358 ; AVX2-NEXT: vpmulhuw %ymm2, %ymm1, %ymm1
359 ; AVX2-NEXT: vpackusdw %ymm1, %ymm0, %ymm0
360 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
363 ; AVX512F-LABEL: and_mulhuw_v16i16:
365 ; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
366 ; AVX512F-NEXT: vpbroadcastw {{.*#+}} ymm2 = [32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767]
367 ; AVX512F-NEXT: vpand %ymm2, %ymm1, %ymm1
368 ; AVX512F-NEXT: vpmovdw %zmm0, %ymm0
369 ; AVX512F-NEXT: vpand %ymm2, %ymm0, %ymm0
370 ; AVX512F-NEXT: vpmulhw %ymm1, %ymm0, %ymm0
373 ; AVX512BW-LABEL: and_mulhuw_v16i16:
375 ; AVX512BW-NEXT: vpbroadcastd {{.*#+}} zmm2 = [32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767,32767]
376 ; AVX512BW-NEXT: vpandd %zmm2, %zmm0, %zmm0
377 ; AVX512BW-NEXT: vpandd %zmm2, %zmm1, %zmm1
378 ; AVX512BW-NEXT: vpmulhuw %zmm1, %zmm0, %zmm0
379 ; AVX512BW-NEXT: vpmovdw %zmm0, %ymm0
380 ; AVX512BW-NEXT: retq
381 %a1 = and <16 x i32> %a, <i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767>
382 %b1 = and <16 x i32> %b, <i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767, i32 32767>
383 %c = mul <16 x i32> %a1, %b1
384 %d = lshr <16 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
385 %e = trunc <16 x i32> %d to <16 x i16>
389 define <16 x i16> @sext_mulhuw_v16i16(<16 x i16> %a, <16 x i16> %b) {
390 ; SSE-LABEL: sext_mulhuw_v16i16:
392 ; SSE-NEXT: pmulhw %xmm2, %xmm0
393 ; SSE-NEXT: pmulhw %xmm3, %xmm1
396 ; AVX-LABEL: sext_mulhuw_v16i16:
398 ; AVX-NEXT: vpmulhw %ymm1, %ymm0, %ymm0
400 %a1 = sext <16 x i16> %a to <16 x i32>
401 %b1 = sext <16 x i16> %b to <16 x i32>
402 %c = mul <16 x i32> %a1, %b1
403 %d = lshr <16 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
404 %e = trunc <16 x i32> %d to <16 x i16>
408 define <16 x i16> @ashr_mulhuw_v16i16(<16 x i32> %a, <16 x i32> %b) {
409 ; SSE2-LABEL: ashr_mulhuw_v16i16:
411 ; SSE2-NEXT: psrad $16, %xmm5
412 ; SSE2-NEXT: psrad $16, %xmm4
413 ; SSE2-NEXT: packssdw %xmm5, %xmm4
414 ; SSE2-NEXT: psrad $16, %xmm1
415 ; SSE2-NEXT: psrad $16, %xmm0
416 ; SSE2-NEXT: packssdw %xmm1, %xmm0
417 ; SSE2-NEXT: pmulhw %xmm4, %xmm0
418 ; SSE2-NEXT: psrad $16, %xmm7
419 ; SSE2-NEXT: psrad $16, %xmm6
420 ; SSE2-NEXT: packssdw %xmm7, %xmm6
421 ; SSE2-NEXT: psrad $16, %xmm3
422 ; SSE2-NEXT: psrad $16, %xmm2
423 ; SSE2-NEXT: packssdw %xmm3, %xmm2
424 ; SSE2-NEXT: pmulhw %xmm6, %xmm2
425 ; SSE2-NEXT: movdqa %xmm2, %xmm1
428 ; SSE41-LABEL: ashr_mulhuw_v16i16:
430 ; SSE41-NEXT: psrld $16, %xmm3
431 ; SSE41-NEXT: psrld $16, %xmm2
432 ; SSE41-NEXT: packusdw %xmm3, %xmm2
433 ; SSE41-NEXT: psrld $16, %xmm1
434 ; SSE41-NEXT: psrld $16, %xmm0
435 ; SSE41-NEXT: packusdw %xmm1, %xmm0
436 ; SSE41-NEXT: psrld $16, %xmm7
437 ; SSE41-NEXT: psrld $16, %xmm6
438 ; SSE41-NEXT: packusdw %xmm7, %xmm6
439 ; SSE41-NEXT: pmulhw %xmm2, %xmm6
440 ; SSE41-NEXT: psrld $16, %xmm5
441 ; SSE41-NEXT: psrld $16, %xmm4
442 ; SSE41-NEXT: packusdw %xmm5, %xmm4
443 ; SSE41-NEXT: pmulhw %xmm4, %xmm0
444 ; SSE41-NEXT: movdqa %xmm6, %xmm1
447 ; AVX2-LABEL: ashr_mulhuw_v16i16:
449 ; AVX2-NEXT: vpsrld $16, %ymm1, %ymm1
450 ; AVX2-NEXT: vpsrld $16, %ymm0, %ymm0
451 ; AVX2-NEXT: vpackusdw %ymm1, %ymm0, %ymm0
452 ; AVX2-NEXT: vpsrld $16, %ymm3, %ymm1
453 ; AVX2-NEXT: vpsrld $16, %ymm2, %ymm2
454 ; AVX2-NEXT: vpackusdw %ymm1, %ymm2, %ymm1
455 ; AVX2-NEXT: vpmulhw %ymm1, %ymm0, %ymm0
456 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
459 ; AVX512-LABEL: ashr_mulhuw_v16i16:
461 ; AVX512-NEXT: vpsrld $16, %zmm0, %zmm0
462 ; AVX512-NEXT: vpsrld $16, %zmm1, %zmm1
463 ; AVX512-NEXT: vpmovdw %zmm1, %ymm1
464 ; AVX512-NEXT: vpmovdw %zmm0, %ymm0
465 ; AVX512-NEXT: vpmulhw %ymm1, %ymm0, %ymm0
467 %a1 = ashr <16 x i32> %a, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
468 %b1 = ashr <16 x i32> %b, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
469 %c = mul <16 x i32> %a1, %b1
470 %d = lshr <16 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
471 %e = trunc <16 x i32> %d to <16 x i16>
475 define <16 x i16> @zext_mulhuw_v16i16_v16i48(<16 x i16> %a, <16 x i16> %b) {
476 ; SSE-LABEL: zext_mulhuw_v16i16_v16i48:
478 ; SSE-NEXT: pmulhuw %xmm2, %xmm0
479 ; SSE-NEXT: pmulhuw %xmm3, %xmm1
482 ; AVX-LABEL: zext_mulhuw_v16i16_v16i48:
484 ; AVX-NEXT: vpmulhuw %ymm1, %ymm0, %ymm0
486 %a1 = zext <16 x i16> %a to <16 x i48>
487 %b1 = zext <16 x i16> %b to <16 x i48>
488 %c = mul <16 x i48> %a1, %b1
489 %d = lshr <16 x i48> %c, <i48 16, i48 16, i48 16, i48 16, i48 16, i48 16, i48 16, i48 16, i48 16, i48 16, i48 16, i48 16, i48 16, i48 16, i48 16, i48 16>
490 %e = trunc <16 x i48> %d to <16 x i16>
494 define <32 x i16> @zext_mulhuw_v32i16(<32 x i16> %a, <32 x i16> %b) {
495 ; SSE-LABEL: zext_mulhuw_v32i16:
497 ; SSE-NEXT: pmulhuw %xmm4, %xmm0
498 ; SSE-NEXT: pmulhuw %xmm5, %xmm1
499 ; SSE-NEXT: pmulhuw %xmm6, %xmm2
500 ; SSE-NEXT: pmulhuw %xmm7, %xmm3
503 ; AVX2-LABEL: zext_mulhuw_v32i16:
505 ; AVX2-NEXT: vpmulhuw %ymm2, %ymm0, %ymm0
506 ; AVX2-NEXT: vpmulhuw %ymm3, %ymm1, %ymm1
509 ; AVX512F-LABEL: zext_mulhuw_v32i16:
511 ; AVX512F-NEXT: vextracti64x4 $1, %zmm1, %ymm2
512 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm3
513 ; AVX512F-NEXT: vpmulhuw %ymm2, %ymm3, %ymm2
514 ; AVX512F-NEXT: vpmulhuw %ymm1, %ymm0, %ymm0
515 ; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
518 ; AVX512BW-LABEL: zext_mulhuw_v32i16:
520 ; AVX512BW-NEXT: vpmulhuw %zmm1, %zmm0, %zmm0
521 ; AVX512BW-NEXT: retq
522 %a1 = zext <32 x i16> %a to <32 x i32>
523 %b1 = zext <32 x i16> %b to <32 x i32>
524 %c = mul <32 x i32> %a1, %b1
525 %d = lshr <32 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
526 %e = trunc <32 x i32> %d to <32 x i16>
530 define <32 x i16> @sext_mulhuw_v32i16(<32 x i16> %a, <32 x i16> %b) {
531 ; SSE-LABEL: sext_mulhuw_v32i16:
533 ; SSE-NEXT: pmulhw %xmm4, %xmm0
534 ; SSE-NEXT: pmulhw %xmm5, %xmm1
535 ; SSE-NEXT: pmulhw %xmm6, %xmm2
536 ; SSE-NEXT: pmulhw %xmm7, %xmm3
539 ; AVX2-LABEL: sext_mulhuw_v32i16:
541 ; AVX2-NEXT: vpmulhw %ymm2, %ymm0, %ymm0
542 ; AVX2-NEXT: vpmulhw %ymm3, %ymm1, %ymm1
545 ; AVX512F-LABEL: sext_mulhuw_v32i16:
547 ; AVX512F-NEXT: vextracti64x4 $1, %zmm1, %ymm2
548 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm3
549 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm3, %ymm2
550 ; AVX512F-NEXT: vpmulhw %ymm1, %ymm0, %ymm0
551 ; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
554 ; AVX512BW-LABEL: sext_mulhuw_v32i16:
556 ; AVX512BW-NEXT: vpmulhw %zmm1, %zmm0, %zmm0
557 ; AVX512BW-NEXT: retq
558 %a1 = sext <32 x i16> %a to <32 x i32>
559 %b1 = sext <32 x i16> %b to <32 x i32>
560 %c = mul <32 x i32> %a1, %b1
561 %d = lshr <32 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
562 %e = trunc <32 x i32> %d to <32 x i16>
566 define <64 x i16> @zext_mulhuw_v64i16(<64 x i16> %a, <64 x i16> %b) {
567 ; SSE-LABEL: zext_mulhuw_v64i16:
569 ; SSE-NEXT: movq %rdi, %rax
570 ; SSE-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm0
571 ; SSE-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm1
572 ; SSE-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm2
573 ; SSE-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm3
574 ; SSE-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm4
575 ; SSE-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm5
576 ; SSE-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm6
577 ; SSE-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm7
578 ; SSE-NEXT: movdqa %xmm7, 112(%rdi)
579 ; SSE-NEXT: movdqa %xmm6, 96(%rdi)
580 ; SSE-NEXT: movdqa %xmm5, 80(%rdi)
581 ; SSE-NEXT: movdqa %xmm4, 64(%rdi)
582 ; SSE-NEXT: movdqa %xmm3, 48(%rdi)
583 ; SSE-NEXT: movdqa %xmm2, 32(%rdi)
584 ; SSE-NEXT: movdqa %xmm1, 16(%rdi)
585 ; SSE-NEXT: movdqa %xmm0, (%rdi)
588 ; AVX2-LABEL: zext_mulhuw_v64i16:
590 ; AVX2-NEXT: vpmulhuw %ymm4, %ymm0, %ymm0
591 ; AVX2-NEXT: vpmulhuw %ymm5, %ymm1, %ymm1
592 ; AVX2-NEXT: vpmulhuw %ymm6, %ymm2, %ymm2
593 ; AVX2-NEXT: vpmulhuw %ymm7, %ymm3, %ymm3
596 ; AVX512F-LABEL: zext_mulhuw_v64i16:
598 ; AVX512F-NEXT: vextracti64x4 $1, %zmm2, %ymm4
599 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm5
600 ; AVX512F-NEXT: vpmulhuw %ymm4, %ymm5, %ymm4
601 ; AVX512F-NEXT: vpmulhuw %ymm2, %ymm0, %ymm0
602 ; AVX512F-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
603 ; AVX512F-NEXT: vextracti64x4 $1, %zmm3, %ymm2
604 ; AVX512F-NEXT: vextracti64x4 $1, %zmm1, %ymm4
605 ; AVX512F-NEXT: vpmulhuw %ymm2, %ymm4, %ymm2
606 ; AVX512F-NEXT: vpmulhuw %ymm3, %ymm1, %ymm1
607 ; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
610 ; AVX512BW-LABEL: zext_mulhuw_v64i16:
612 ; AVX512BW-NEXT: vpmulhuw %zmm2, %zmm0, %zmm0
613 ; AVX512BW-NEXT: vpmulhuw %zmm3, %zmm1, %zmm1
614 ; AVX512BW-NEXT: retq
615 %a1 = zext <64 x i16> %a to <64 x i32>
616 %b1 = zext <64 x i16> %b to <64 x i32>
617 %c = mul <64 x i32> %a1, %b1
618 %d = lshr <64 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
619 %e = trunc <64 x i32> %d to <64 x i16>
623 define <64 x i16> @sext_mulhuw_v64i16(<64 x i16> %a, <64 x i16> %b) {
624 ; SSE-LABEL: sext_mulhuw_v64i16:
626 ; SSE-NEXT: movq %rdi, %rax
627 ; SSE-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm0
628 ; SSE-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm1
629 ; SSE-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm2
630 ; SSE-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm3
631 ; SSE-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm4
632 ; SSE-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm5
633 ; SSE-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm6
634 ; SSE-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm7
635 ; SSE-NEXT: movdqa %xmm7, 112(%rdi)
636 ; SSE-NEXT: movdqa %xmm6, 96(%rdi)
637 ; SSE-NEXT: movdqa %xmm5, 80(%rdi)
638 ; SSE-NEXT: movdqa %xmm4, 64(%rdi)
639 ; SSE-NEXT: movdqa %xmm3, 48(%rdi)
640 ; SSE-NEXT: movdqa %xmm2, 32(%rdi)
641 ; SSE-NEXT: movdqa %xmm1, 16(%rdi)
642 ; SSE-NEXT: movdqa %xmm0, (%rdi)
645 ; AVX2-LABEL: sext_mulhuw_v64i16:
647 ; AVX2-NEXT: vpmulhw %ymm4, %ymm0, %ymm0
648 ; AVX2-NEXT: vpmulhw %ymm5, %ymm1, %ymm1
649 ; AVX2-NEXT: vpmulhw %ymm6, %ymm2, %ymm2
650 ; AVX2-NEXT: vpmulhw %ymm7, %ymm3, %ymm3
653 ; AVX512F-LABEL: sext_mulhuw_v64i16:
655 ; AVX512F-NEXT: vextracti64x4 $1, %zmm2, %ymm4
656 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm5
657 ; AVX512F-NEXT: vpmulhw %ymm4, %ymm5, %ymm4
658 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm0, %ymm0
659 ; AVX512F-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
660 ; AVX512F-NEXT: vextracti64x4 $1, %zmm3, %ymm2
661 ; AVX512F-NEXT: vextracti64x4 $1, %zmm1, %ymm4
662 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm4, %ymm2
663 ; AVX512F-NEXT: vpmulhw %ymm3, %ymm1, %ymm1
664 ; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
667 ; AVX512BW-LABEL: sext_mulhuw_v64i16:
669 ; AVX512BW-NEXT: vpmulhw %zmm2, %zmm0, %zmm0
670 ; AVX512BW-NEXT: vpmulhw %zmm3, %zmm1, %zmm1
671 ; AVX512BW-NEXT: retq
672 %a1 = sext <64 x i16> %a to <64 x i32>
673 %b1 = sext <64 x i16> %b to <64 x i32>
674 %c = mul <64 x i32> %a1, %b1
675 %d = lshr <64 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
676 %e = trunc <64 x i32> %d to <64 x i16>
680 define <8 x i16> @zext_mulhuw_v8i16_i64(<8 x i16> %a, <8 x i16> %b) {
681 ; SSE-LABEL: zext_mulhuw_v8i16_i64:
683 ; SSE-NEXT: pmulhuw %xmm1, %xmm0
686 ; AVX-LABEL: zext_mulhuw_v8i16_i64:
688 ; AVX-NEXT: vpmulhuw %xmm1, %xmm0, %xmm0
690 %a1 = zext <8 x i16> %a to <8 x i64>
691 %b1 = zext <8 x i16> %b to <8 x i64>
692 %c = mul <8 x i64> %a1, %b1
693 %d = lshr <8 x i64> %c, <i64 16, i64 16, i64 16, i64 16, i64 16, i64 16, i64 16, i64 16>
694 %e = trunc <8 x i64> %d to <8 x i16>
698 define <8 x i16> @sext_mulhuw_v8i16_i64(<8 x i16> %a, <8 x i16> %b) {
699 ; SSE-LABEL: sext_mulhuw_v8i16_i64:
701 ; SSE-NEXT: pmulhw %xmm1, %xmm0
704 ; AVX-LABEL: sext_mulhuw_v8i16_i64:
706 ; AVX-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
708 %a1 = sext <8 x i16> %a to <8 x i64>
709 %b1 = sext <8 x i16> %b to <8 x i64>
710 %c = mul <8 x i64> %a1, %b1
711 %d = lshr <8 x i64> %c, <i64 16, i64 16, i64 16, i64 16, i64 16, i64 16, i64 16, i64 16>
712 %e = trunc <8 x i64> %d to <8 x i16>
716 define <4 x i32> @zext_mulhuw_v4i16_lshr(<4 x i16> %a, <4 x i16> %b) {
717 ; SSE2-LABEL: zext_mulhuw_v4i16_lshr:
719 ; SSE2-NEXT: pmulhuw %xmm1, %xmm0
720 ; SSE2-NEXT: pxor %xmm1, %xmm1
721 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
724 ; SSE41-LABEL: zext_mulhuw_v4i16_lshr:
726 ; SSE41-NEXT: pmulhuw %xmm1, %xmm0
727 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
730 ; AVX-LABEL: zext_mulhuw_v4i16_lshr:
732 ; AVX-NEXT: vpmulhuw %xmm1, %xmm0, %xmm0
733 ; AVX-NEXT: vpmovzxwd {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
735 %a1 = zext <4 x i16> %a to <4 x i32>
736 %b1 = zext <4 x i16> %b to <4 x i32>
737 %c = mul <4 x i32> %a1, %b1
738 %d = lshr <4 x i32> %c, <i32 16, i32 16, i32 16, i32 16>
742 define <4 x i32> @mulhsw_v4i16_lshr(<4 x i16> %a, <4 x i16> %b) {
743 ; SSE2-LABEL: mulhsw_v4i16_lshr:
745 ; SSE2-NEXT: pmulhw %xmm1, %xmm0
746 ; SSE2-NEXT: pxor %xmm1, %xmm1
747 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
750 ; SSE41-LABEL: mulhsw_v4i16_lshr:
752 ; SSE41-NEXT: pmulhw %xmm1, %xmm0
753 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
756 ; AVX-LABEL: mulhsw_v4i16_lshr:
758 ; AVX-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
759 ; AVX-NEXT: vpmovzxwd {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
761 %a1 = sext <4 x i16> %a to <4 x i32>
762 %b1 = sext <4 x i16> %b to <4 x i32>
763 %c = mul <4 x i32> %a1, %b1
764 %d = lshr <4 x i32> %c, <i32 16, i32 16, i32 16, i32 16>
768 define <4 x i32> @mulhsw_v4i16_ashr(<4 x i16> %a, <4 x i16> %b) {
769 ; SSE2-LABEL: mulhsw_v4i16_ashr:
771 ; SSE2-NEXT: pmulhw %xmm1, %xmm0
772 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3]
773 ; SSE2-NEXT: psrad $16, %xmm0
776 ; SSE41-LABEL: mulhsw_v4i16_ashr:
778 ; SSE41-NEXT: pmulhw %xmm1, %xmm0
779 ; SSE41-NEXT: pmovsxwd %xmm0, %xmm0
782 ; AVX-LABEL: mulhsw_v4i16_ashr:
784 ; AVX-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
785 ; AVX-NEXT: vpmovsxwd %xmm0, %xmm0
787 %a1 = sext <4 x i16> %a to <4 x i32>
788 %b1 = sext <4 x i16> %b to <4 x i32>
789 %c = mul <4 x i32> %a1, %b1
790 %d = ashr <4 x i32> %c, <i32 16, i32 16, i32 16, i32 16>
794 define <8 x i32> @zext_mulhuw_v8i16_lshr(<8 x i16> %a, <8 x i16> %b) {
795 ; SSE2-LABEL: zext_mulhuw_v8i16_lshr:
797 ; SSE2-NEXT: pmulhuw %xmm0, %xmm1
798 ; SSE2-NEXT: pxor %xmm2, %xmm2
799 ; SSE2-NEXT: movdqa %xmm1, %xmm0
800 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
801 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
804 ; SSE41-LABEL: zext_mulhuw_v8i16_lshr:
806 ; SSE41-NEXT: movdqa %xmm0, %xmm2
807 ; SSE41-NEXT: pmulhuw %xmm1, %xmm2
808 ; SSE41-NEXT: pxor %xmm1, %xmm1
809 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
810 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
811 ; SSE41-NEXT: movdqa %xmm2, %xmm1
814 ; AVX-LABEL: zext_mulhuw_v8i16_lshr:
816 ; AVX-NEXT: vpmulhuw %xmm1, %xmm0, %xmm0
817 ; AVX-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
819 %a1 = zext <8 x i16> %a to <8 x i32>
820 %b1 = zext <8 x i16> %b to <8 x i32>
821 %c = mul <8 x i32> %a1, %b1
822 %d = lshr <8 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
826 define <8 x i32> @mulhsw_v8i16_lshr(<8 x i16> %a, <8 x i16> %b) {
827 ; SSE2-LABEL: mulhsw_v8i16_lshr:
829 ; SSE2-NEXT: pmulhw %xmm0, %xmm1
830 ; SSE2-NEXT: pxor %xmm2, %xmm2
831 ; SSE2-NEXT: movdqa %xmm1, %xmm0
832 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
833 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
836 ; SSE41-LABEL: mulhsw_v8i16_lshr:
838 ; SSE41-NEXT: movdqa %xmm0, %xmm2
839 ; SSE41-NEXT: pmulhw %xmm1, %xmm2
840 ; SSE41-NEXT: pxor %xmm1, %xmm1
841 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
842 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
843 ; SSE41-NEXT: movdqa %xmm2, %xmm1
846 ; AVX-LABEL: mulhsw_v8i16_lshr:
848 ; AVX-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
849 ; AVX-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
851 %a1 = sext <8 x i16> %a to <8 x i32>
852 %b1 = sext <8 x i16> %b to <8 x i32>
853 %c = mul <8 x i32> %a1, %b1
854 %d = lshr <8 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
858 define <8 x i32> @mulhsw_v8i16_ashr(<8 x i16> %a, <8 x i16> %b) {
859 ; SSE2-LABEL: mulhsw_v8i16_ashr:
861 ; SSE2-NEXT: pmulhw %xmm1, %xmm0
862 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
863 ; SSE2-NEXT: psrad $16, %xmm2
864 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
865 ; SSE2-NEXT: psrad $16, %xmm1
866 ; SSE2-NEXT: movdqa %xmm2, %xmm0
869 ; SSE41-LABEL: mulhsw_v8i16_ashr:
871 ; SSE41-NEXT: pmulhw %xmm1, %xmm0
872 ; SSE41-NEXT: pmovsxwd %xmm0, %xmm2
873 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
874 ; SSE41-NEXT: pmovsxwd %xmm0, %xmm1
875 ; SSE41-NEXT: movdqa %xmm2, %xmm0
878 ; AVX-LABEL: mulhsw_v8i16_ashr:
880 ; AVX-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
881 ; AVX-NEXT: vpmovsxwd %xmm0, %ymm0
883 %a1 = sext <8 x i16> %a to <8 x i32>
884 %b1 = sext <8 x i16> %b to <8 x i32>
885 %c = mul <8 x i32> %a1, %b1
886 %d = ashr <8 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
890 define <16 x i32> @zext_mulhuw_v16i16_lshr(<16 x i16> %a, <16 x i16> %b) {
891 ; SSE2-LABEL: zext_mulhuw_v16i16_lshr:
893 ; SSE2-NEXT: movdqa %xmm0, %xmm4
894 ; SSE2-NEXT: pmulhuw %xmm2, %xmm4
895 ; SSE2-NEXT: pxor %xmm5, %xmm5
896 ; SSE2-NEXT: movdqa %xmm4, %xmm0
897 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3]
898 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
899 ; SSE2-NEXT: pmulhuw %xmm1, %xmm3
900 ; SSE2-NEXT: movdqa %xmm3, %xmm2
901 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
902 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
903 ; SSE2-NEXT: movdqa %xmm4, %xmm1
906 ; SSE41-LABEL: zext_mulhuw_v16i16_lshr:
908 ; SSE41-NEXT: movdqa %xmm1, %xmm4
909 ; SSE41-NEXT: movdqa %xmm0, %xmm1
910 ; SSE41-NEXT: pmulhuw %xmm2, %xmm1
911 ; SSE41-NEXT: pxor %xmm5, %xmm5
912 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
913 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
914 ; SSE41-NEXT: pmulhuw %xmm3, %xmm4
915 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm2 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
916 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
917 ; SSE41-NEXT: movdqa %xmm4, %xmm3
920 ; AVX2-LABEL: zext_mulhuw_v16i16_lshr:
922 ; AVX2-NEXT: vpmulhuw %ymm1, %ymm0, %ymm1
923 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
924 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm1
925 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
928 ; AVX512-LABEL: zext_mulhuw_v16i16_lshr:
930 ; AVX512-NEXT: vpmulhuw %ymm1, %ymm0, %ymm0
931 ; AVX512-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
933 %a1 = zext <16 x i16> %a to <16 x i32>
934 %b1 = zext <16 x i16> %b to <16 x i32>
935 %c = mul <16 x i32> %a1, %b1
936 %d = lshr <16 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
940 define <16 x i32> @mulhsw_v16i16_lshr(<16 x i16> %a, <16 x i16> %b) {
941 ; SSE2-LABEL: mulhsw_v16i16_lshr:
943 ; SSE2-NEXT: movdqa %xmm0, %xmm4
944 ; SSE2-NEXT: pmulhw %xmm2, %xmm4
945 ; SSE2-NEXT: pxor %xmm5, %xmm5
946 ; SSE2-NEXT: movdqa %xmm4, %xmm0
947 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3]
948 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
949 ; SSE2-NEXT: pmulhw %xmm1, %xmm3
950 ; SSE2-NEXT: movdqa %xmm3, %xmm2
951 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
952 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
953 ; SSE2-NEXT: movdqa %xmm4, %xmm1
956 ; SSE41-LABEL: mulhsw_v16i16_lshr:
958 ; SSE41-NEXT: movdqa %xmm1, %xmm4
959 ; SSE41-NEXT: movdqa %xmm0, %xmm1
960 ; SSE41-NEXT: pmulhw %xmm2, %xmm1
961 ; SSE41-NEXT: pxor %xmm5, %xmm5
962 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
963 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
964 ; SSE41-NEXT: pmulhw %xmm3, %xmm4
965 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm2 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
966 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
967 ; SSE41-NEXT: movdqa %xmm4, %xmm3
970 ; AVX2-LABEL: mulhsw_v16i16_lshr:
972 ; AVX2-NEXT: vpmulhw %ymm1, %ymm0, %ymm1
973 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
974 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm1
975 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
978 ; AVX512-LABEL: mulhsw_v16i16_lshr:
980 ; AVX512-NEXT: vpmulhw %ymm1, %ymm0, %ymm0
981 ; AVX512-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
983 %a1 = sext <16 x i16> %a to <16 x i32>
984 %b1 = sext <16 x i16> %b to <16 x i32>
985 %c = mul <16 x i32> %a1, %b1
986 %d = lshr <16 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
990 define <16 x i32> @mulhsw_v16i16_ashr(<16 x i16> %a, <16 x i16> %b) {
991 ; SSE2-LABEL: mulhsw_v16i16_ashr:
993 ; SSE2-NEXT: pmulhw %xmm2, %xmm0
994 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
995 ; SSE2-NEXT: psrad $16, %xmm5
996 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm0[4],xmm4[5],xmm0[5],xmm4[6],xmm0[6],xmm4[7],xmm0[7]
997 ; SSE2-NEXT: psrad $16, %xmm4
998 ; SSE2-NEXT: pmulhw %xmm3, %xmm1
999 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
1000 ; SSE2-NEXT: psrad $16, %xmm2
1001 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
1002 ; SSE2-NEXT: psrad $16, %xmm3
1003 ; SSE2-NEXT: movdqa %xmm5, %xmm0
1004 ; SSE2-NEXT: movdqa %xmm4, %xmm1
1007 ; SSE41-LABEL: mulhsw_v16i16_ashr:
1009 ; SSE41-NEXT: pmulhw %xmm2, %xmm0
1010 ; SSE41-NEXT: pmovsxwd %xmm0, %xmm4
1011 ; SSE41-NEXT: pmulhw %xmm3, %xmm1
1012 ; SSE41-NEXT: pmovsxwd %xmm1, %xmm2
1013 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
1014 ; SSE41-NEXT: pmovsxwd %xmm0, %xmm5
1015 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,2,3]
1016 ; SSE41-NEXT: pmovsxwd %xmm0, %xmm3
1017 ; SSE41-NEXT: movdqa %xmm4, %xmm0
1018 ; SSE41-NEXT: movdqa %xmm5, %xmm1
1021 ; AVX2-LABEL: mulhsw_v16i16_ashr:
1023 ; AVX2-NEXT: vpmulhw %ymm1, %ymm0, %ymm1
1024 ; AVX2-NEXT: vpmovsxwd %xmm1, %ymm0
1025 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm1
1026 ; AVX2-NEXT: vpmovsxwd %xmm1, %ymm1
1029 ; AVX512-LABEL: mulhsw_v16i16_ashr:
1031 ; AVX512-NEXT: vpmulhw %ymm1, %ymm0, %ymm0
1032 ; AVX512-NEXT: vpmovsxwd %ymm0, %zmm0
1034 %a1 = sext <16 x i16> %a to <16 x i32>
1035 %b1 = sext <16 x i16> %b to <16 x i32>
1036 %c = mul <16 x i32> %a1, %b1
1037 %d = ashr <16 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
1041 define <32 x i32> @zext_mulhuw_v32i16_lshr(<32 x i16> %a, <32 x i16> %b) {
1042 ; SSE2-LABEL: zext_mulhuw_v32i16_lshr:
1044 ; SSE2-NEXT: movq %rdi, %rax
1045 ; SSE2-NEXT: pmulhuw %xmm4, %xmm0
1046 ; SSE2-NEXT: pxor %xmm4, %xmm4
1047 ; SSE2-NEXT: movdqa %xmm0, %xmm8
1048 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm4[0],xmm8[1],xmm4[1],xmm8[2],xmm4[2],xmm8[3],xmm4[3]
1049 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
1050 ; SSE2-NEXT: pmulhuw %xmm5, %xmm1
1051 ; SSE2-NEXT: movdqa %xmm1, %xmm5
1052 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
1053 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm4[4],xmm1[5],xmm4[5],xmm1[6],xmm4[6],xmm1[7],xmm4[7]
1054 ; SSE2-NEXT: pmulhuw %xmm6, %xmm2
1055 ; SSE2-NEXT: movdqa %xmm2, %xmm6
1056 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
1057 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
1058 ; SSE2-NEXT: pmulhuw %xmm7, %xmm3
1059 ; SSE2-NEXT: movdqa %xmm3, %xmm7
1060 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3]
1061 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
1062 ; SSE2-NEXT: movdqa %xmm3, 112(%rdi)
1063 ; SSE2-NEXT: movdqa %xmm7, 96(%rdi)
1064 ; SSE2-NEXT: movdqa %xmm2, 80(%rdi)
1065 ; SSE2-NEXT: movdqa %xmm6, 64(%rdi)
1066 ; SSE2-NEXT: movdqa %xmm1, 48(%rdi)
1067 ; SSE2-NEXT: movdqa %xmm5, 32(%rdi)
1068 ; SSE2-NEXT: movdqa %xmm0, 16(%rdi)
1069 ; SSE2-NEXT: movdqa %xmm8, (%rdi)
1072 ; SSE41-LABEL: zext_mulhuw_v32i16_lshr:
1074 ; SSE41-NEXT: movq %rdi, %rax
1075 ; SSE41-NEXT: pmulhuw %xmm4, %xmm0
1076 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm4 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
1077 ; SSE41-NEXT: pxor %xmm8, %xmm8
1078 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm8[4],xmm0[5],xmm8[5],xmm0[6],xmm8[6],xmm0[7],xmm8[7]
1079 ; SSE41-NEXT: pmulhuw %xmm5, %xmm1
1080 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm5 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
1081 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm8[4],xmm1[5],xmm8[5],xmm1[6],xmm8[6],xmm1[7],xmm8[7]
1082 ; SSE41-NEXT: pmulhuw %xmm6, %xmm2
1083 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm6 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
1084 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm8[4],xmm2[5],xmm8[5],xmm2[6],xmm8[6],xmm2[7],xmm8[7]
1085 ; SSE41-NEXT: pmulhuw %xmm7, %xmm3
1086 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm7 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero
1087 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm8[4],xmm3[5],xmm8[5],xmm3[6],xmm8[6],xmm3[7],xmm8[7]
1088 ; SSE41-NEXT: movdqa %xmm3, 112(%rdi)
1089 ; SSE41-NEXT: movdqa %xmm7, 96(%rdi)
1090 ; SSE41-NEXT: movdqa %xmm2, 80(%rdi)
1091 ; SSE41-NEXT: movdqa %xmm6, 64(%rdi)
1092 ; SSE41-NEXT: movdqa %xmm1, 48(%rdi)
1093 ; SSE41-NEXT: movdqa %xmm5, 32(%rdi)
1094 ; SSE41-NEXT: movdqa %xmm0, 16(%rdi)
1095 ; SSE41-NEXT: movdqa %xmm4, (%rdi)
1098 ; AVX2-LABEL: zext_mulhuw_v32i16_lshr:
1100 ; AVX2-NEXT: vpmulhuw %ymm2, %ymm0, %ymm2
1101 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
1102 ; AVX2-NEXT: vextracti128 $1, %ymm2, %xmm2
1103 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm4 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
1104 ; AVX2-NEXT: vpmulhuw %ymm3, %ymm1, %ymm1
1105 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm2 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
1106 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm1
1107 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm3 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
1108 ; AVX2-NEXT: vmovdqa %ymm4, %ymm1
1111 ; AVX512F-LABEL: zext_mulhuw_v32i16_lshr:
1113 ; AVX512F-NEXT: vpmulhuw %ymm1, %ymm0, %ymm2
1114 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm2 = ymm2[0],zero,ymm2[1],zero,ymm2[2],zero,ymm2[3],zero,ymm2[4],zero,ymm2[5],zero,ymm2[6],zero,ymm2[7],zero,ymm2[8],zero,ymm2[9],zero,ymm2[10],zero,ymm2[11],zero,ymm2[12],zero,ymm2[13],zero,ymm2[14],zero,ymm2[15],zero
1115 ; AVX512F-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1116 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm0
1117 ; AVX512F-NEXT: vpmulhuw %ymm1, %ymm0, %ymm0
1118 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm1 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
1119 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm0
1120 ; AVX512F-NEXT: retq
1122 ; AVX512BW-LABEL: zext_mulhuw_v32i16_lshr:
1123 ; AVX512BW: # %bb.0:
1124 ; AVX512BW-NEXT: vpmulhuw %zmm1, %zmm0, %zmm1
1125 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
1126 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1127 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm1 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
1128 ; AVX512BW-NEXT: retq
1129 %a1 = zext <32 x i16> %a to <32 x i32>
1130 %b1 = zext <32 x i16> %b to <32 x i32>
1131 %c = mul <32 x i32> %a1, %b1
1132 %d = lshr <32 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
1136 define <32 x i32> @mulhsw_v32i16_lshr(<32 x i16> %a, <32 x i16> %b) {
1137 ; SSE2-LABEL: mulhsw_v32i16_lshr:
1139 ; SSE2-NEXT: movq %rdi, %rax
1140 ; SSE2-NEXT: pmulhw %xmm4, %xmm0
1141 ; SSE2-NEXT: pxor %xmm4, %xmm4
1142 ; SSE2-NEXT: movdqa %xmm0, %xmm8
1143 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm4[0],xmm8[1],xmm4[1],xmm8[2],xmm4[2],xmm8[3],xmm4[3]
1144 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
1145 ; SSE2-NEXT: pmulhw %xmm5, %xmm1
1146 ; SSE2-NEXT: movdqa %xmm1, %xmm5
1147 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
1148 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm4[4],xmm1[5],xmm4[5],xmm1[6],xmm4[6],xmm1[7],xmm4[7]
1149 ; SSE2-NEXT: pmulhw %xmm6, %xmm2
1150 ; SSE2-NEXT: movdqa %xmm2, %xmm6
1151 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
1152 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
1153 ; SSE2-NEXT: pmulhw %xmm7, %xmm3
1154 ; SSE2-NEXT: movdqa %xmm3, %xmm7
1155 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3]
1156 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
1157 ; SSE2-NEXT: movdqa %xmm3, 112(%rdi)
1158 ; SSE2-NEXT: movdqa %xmm7, 96(%rdi)
1159 ; SSE2-NEXT: movdqa %xmm2, 80(%rdi)
1160 ; SSE2-NEXT: movdqa %xmm6, 64(%rdi)
1161 ; SSE2-NEXT: movdqa %xmm1, 48(%rdi)
1162 ; SSE2-NEXT: movdqa %xmm5, 32(%rdi)
1163 ; SSE2-NEXT: movdqa %xmm0, 16(%rdi)
1164 ; SSE2-NEXT: movdqa %xmm8, (%rdi)
1167 ; SSE41-LABEL: mulhsw_v32i16_lshr:
1169 ; SSE41-NEXT: movq %rdi, %rax
1170 ; SSE41-NEXT: pmulhw %xmm4, %xmm0
1171 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm4 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
1172 ; SSE41-NEXT: pxor %xmm8, %xmm8
1173 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm8[4],xmm0[5],xmm8[5],xmm0[6],xmm8[6],xmm0[7],xmm8[7]
1174 ; SSE41-NEXT: pmulhw %xmm5, %xmm1
1175 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm5 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
1176 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm8[4],xmm1[5],xmm8[5],xmm1[6],xmm8[6],xmm1[7],xmm8[7]
1177 ; SSE41-NEXT: pmulhw %xmm6, %xmm2
1178 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm6 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
1179 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm8[4],xmm2[5],xmm8[5],xmm2[6],xmm8[6],xmm2[7],xmm8[7]
1180 ; SSE41-NEXT: pmulhw %xmm7, %xmm3
1181 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm7 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero
1182 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm8[4],xmm3[5],xmm8[5],xmm3[6],xmm8[6],xmm3[7],xmm8[7]
1183 ; SSE41-NEXT: movdqa %xmm3, 112(%rdi)
1184 ; SSE41-NEXT: movdqa %xmm7, 96(%rdi)
1185 ; SSE41-NEXT: movdqa %xmm2, 80(%rdi)
1186 ; SSE41-NEXT: movdqa %xmm6, 64(%rdi)
1187 ; SSE41-NEXT: movdqa %xmm1, 48(%rdi)
1188 ; SSE41-NEXT: movdqa %xmm5, 32(%rdi)
1189 ; SSE41-NEXT: movdqa %xmm0, 16(%rdi)
1190 ; SSE41-NEXT: movdqa %xmm4, (%rdi)
1193 ; AVX2-LABEL: mulhsw_v32i16_lshr:
1195 ; AVX2-NEXT: vpmulhw %ymm2, %ymm0, %ymm2
1196 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
1197 ; AVX2-NEXT: vextracti128 $1, %ymm2, %xmm2
1198 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm4 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
1199 ; AVX2-NEXT: vpmulhw %ymm3, %ymm1, %ymm1
1200 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm2 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
1201 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm1
1202 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm3 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
1203 ; AVX2-NEXT: vmovdqa %ymm4, %ymm1
1206 ; AVX512F-LABEL: mulhsw_v32i16_lshr:
1208 ; AVX512F-NEXT: vpmulhw %ymm1, %ymm0, %ymm2
1209 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm2 = ymm2[0],zero,ymm2[1],zero,ymm2[2],zero,ymm2[3],zero,ymm2[4],zero,ymm2[5],zero,ymm2[6],zero,ymm2[7],zero,ymm2[8],zero,ymm2[9],zero,ymm2[10],zero,ymm2[11],zero,ymm2[12],zero,ymm2[13],zero,ymm2[14],zero,ymm2[15],zero
1210 ; AVX512F-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1211 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm0
1212 ; AVX512F-NEXT: vpmulhw %ymm1, %ymm0, %ymm0
1213 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm1 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
1214 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm0
1215 ; AVX512F-NEXT: retq
1217 ; AVX512BW-LABEL: mulhsw_v32i16_lshr:
1218 ; AVX512BW: # %bb.0:
1219 ; AVX512BW-NEXT: vpmulhw %zmm1, %zmm0, %zmm1
1220 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
1221 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1222 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm1 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
1223 ; AVX512BW-NEXT: retq
1224 %a1 = sext <32 x i16> %a to <32 x i32>
1225 %b1 = sext <32 x i16> %b to <32 x i32>
1226 %c = mul <32 x i32> %a1, %b1
1227 %d = lshr <32 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
1231 define <32 x i32> @mulhsw_v32i16_ashr(<32 x i16> %a, <32 x i16> %b) {
1232 ; SSE2-LABEL: mulhsw_v32i16_ashr:
1234 ; SSE2-NEXT: movq %rdi, %rax
1235 ; SSE2-NEXT: pmulhw %xmm4, %xmm0
1236 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
1237 ; SSE2-NEXT: psrad $16, %xmm4
1238 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
1239 ; SSE2-NEXT: psrad $16, %xmm0
1240 ; SSE2-NEXT: pmulhw %xmm5, %xmm1
1241 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
1242 ; SSE2-NEXT: psrad $16, %xmm5
1243 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4,4,5,5,6,6,7,7]
1244 ; SSE2-NEXT: psrad $16, %xmm1
1245 ; SSE2-NEXT: pmulhw %xmm6, %xmm2
1246 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
1247 ; SSE2-NEXT: psrad $16, %xmm6
1248 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4,4,5,5,6,6,7,7]
1249 ; SSE2-NEXT: psrad $16, %xmm2
1250 ; SSE2-NEXT: pmulhw %xmm7, %xmm3
1251 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3]
1252 ; SSE2-NEXT: psrad $16, %xmm7
1253 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4,4,5,5,6,6,7,7]
1254 ; SSE2-NEXT: psrad $16, %xmm3
1255 ; SSE2-NEXT: movdqa %xmm3, 112(%rdi)
1256 ; SSE2-NEXT: movdqa %xmm7, 96(%rdi)
1257 ; SSE2-NEXT: movdqa %xmm2, 80(%rdi)
1258 ; SSE2-NEXT: movdqa %xmm6, 64(%rdi)
1259 ; SSE2-NEXT: movdqa %xmm1, 48(%rdi)
1260 ; SSE2-NEXT: movdqa %xmm5, 32(%rdi)
1261 ; SSE2-NEXT: movdqa %xmm0, 16(%rdi)
1262 ; SSE2-NEXT: movdqa %xmm4, (%rdi)
1265 ; SSE41-LABEL: mulhsw_v32i16_ashr:
1267 ; SSE41-NEXT: movq %rdi, %rax
1268 ; SSE41-NEXT: pmulhw %xmm4, %xmm0
1269 ; SSE41-NEXT: pmovsxwd %xmm0, %xmm4
1270 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
1271 ; SSE41-NEXT: pmovsxwd %xmm0, %xmm0
1272 ; SSE41-NEXT: pmulhw %xmm5, %xmm1
1273 ; SSE41-NEXT: pmovsxwd %xmm1, %xmm5
1274 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,3,2,3]
1275 ; SSE41-NEXT: pmovsxwd %xmm1, %xmm1
1276 ; SSE41-NEXT: pmulhw %xmm6, %xmm2
1277 ; SSE41-NEXT: pmovsxwd %xmm2, %xmm6
1278 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,3,2,3]
1279 ; SSE41-NEXT: pmovsxwd %xmm2, %xmm2
1280 ; SSE41-NEXT: pmulhw %xmm7, %xmm3
1281 ; SSE41-NEXT: pmovsxwd %xmm3, %xmm7
1282 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,3,2,3]
1283 ; SSE41-NEXT: pmovsxwd %xmm3, %xmm3
1284 ; SSE41-NEXT: movdqa %xmm3, 112(%rdi)
1285 ; SSE41-NEXT: movdqa %xmm7, 96(%rdi)
1286 ; SSE41-NEXT: movdqa %xmm2, 80(%rdi)
1287 ; SSE41-NEXT: movdqa %xmm6, 64(%rdi)
1288 ; SSE41-NEXT: movdqa %xmm1, 48(%rdi)
1289 ; SSE41-NEXT: movdqa %xmm5, 32(%rdi)
1290 ; SSE41-NEXT: movdqa %xmm0, 16(%rdi)
1291 ; SSE41-NEXT: movdqa %xmm4, (%rdi)
1294 ; AVX2-LABEL: mulhsw_v32i16_ashr:
1296 ; AVX2-NEXT: vpmulhw %ymm2, %ymm0, %ymm2
1297 ; AVX2-NEXT: vpmovsxwd %xmm2, %ymm0
1298 ; AVX2-NEXT: vextracti128 $1, %ymm2, %xmm2
1299 ; AVX2-NEXT: vpmovsxwd %xmm2, %ymm4
1300 ; AVX2-NEXT: vpmulhw %ymm3, %ymm1, %ymm1
1301 ; AVX2-NEXT: vpmovsxwd %xmm1, %ymm2
1302 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm1
1303 ; AVX2-NEXT: vpmovsxwd %xmm1, %ymm3
1304 ; AVX2-NEXT: vmovdqa %ymm4, %ymm1
1307 ; AVX512F-LABEL: mulhsw_v32i16_ashr:
1309 ; AVX512F-NEXT: vpmulhw %ymm1, %ymm0, %ymm2
1310 ; AVX512F-NEXT: vpmovsxwd %ymm2, %zmm2
1311 ; AVX512F-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1312 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm0
1313 ; AVX512F-NEXT: vpmulhw %ymm1, %ymm0, %ymm0
1314 ; AVX512F-NEXT: vpmovsxwd %ymm0, %zmm1
1315 ; AVX512F-NEXT: vmovdqa64 %zmm2, %zmm0
1316 ; AVX512F-NEXT: retq
1318 ; AVX512BW-LABEL: mulhsw_v32i16_ashr:
1319 ; AVX512BW: # %bb.0:
1320 ; AVX512BW-NEXT: vpmulhw %zmm1, %zmm0, %zmm1
1321 ; AVX512BW-NEXT: vpmovsxwd %ymm1, %zmm0
1322 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1323 ; AVX512BW-NEXT: vpmovsxwd %ymm1, %zmm1
1324 ; AVX512BW-NEXT: retq
1325 %a1 = sext <32 x i16> %a to <32 x i32>
1326 %b1 = sext <32 x i16> %b to <32 x i32>
1327 %c = mul <32 x i32> %a1, %b1
1328 %d = ashr <32 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
1332 define <64 x i32> @zext_mulhuw_v64i16_lshr(<64 x i16> %a, <64 x i16> %b) {
1333 ; SSE2-LABEL: zext_mulhuw_v64i16_lshr:
1335 ; SSE2-NEXT: movdqa %xmm7, %xmm8
1336 ; SSE2-NEXT: movq %rdi, %rax
1337 ; SSE2-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm0
1338 ; SSE2-NEXT: pxor %xmm10, %xmm10
1339 ; SSE2-NEXT: movdqa %xmm0, %xmm7
1340 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm10[0],xmm7[1],xmm10[1],xmm7[2],xmm10[2],xmm7[3],xmm10[3]
1341 ; SSE2-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1342 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm10[4],xmm0[5],xmm10[5],xmm0[6],xmm10[6],xmm0[7],xmm10[7]
1343 ; SSE2-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm1
1344 ; SSE2-NEXT: movdqa %xmm1, %xmm9
1345 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1346 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm10[4],xmm1[5],xmm10[5],xmm1[6],xmm10[6],xmm1[7],xmm10[7]
1347 ; SSE2-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm2
1348 ; SSE2-NEXT: movdqa %xmm2, %xmm11
1349 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
1350 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm10[4],xmm2[5],xmm10[5],xmm2[6],xmm10[6],xmm2[7],xmm10[7]
1351 ; SSE2-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm3
1352 ; SSE2-NEXT: movdqa %xmm3, %xmm12
1353 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm10[0],xmm12[1],xmm10[1],xmm12[2],xmm10[2],xmm12[3],xmm10[3]
1354 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm10[4],xmm3[5],xmm10[5],xmm3[6],xmm10[6],xmm3[7],xmm10[7]
1355 ; SSE2-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm4
1356 ; SSE2-NEXT: movdqa %xmm4, %xmm13
1357 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm13 = xmm13[0],xmm10[0],xmm13[1],xmm10[1],xmm13[2],xmm10[2],xmm13[3],xmm10[3]
1358 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm10[4],xmm4[5],xmm10[5],xmm4[6],xmm10[6],xmm4[7],xmm10[7]
1359 ; SSE2-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm5
1360 ; SSE2-NEXT: movdqa %xmm5, %xmm14
1361 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
1362 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm5 = xmm5[4],xmm10[4],xmm5[5],xmm10[5],xmm5[6],xmm10[6],xmm5[7],xmm10[7]
1363 ; SSE2-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm6
1364 ; SSE2-NEXT: movdqa %xmm6, %xmm15
1365 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm10[0],xmm15[1],xmm10[1],xmm15[2],xmm10[2],xmm15[3],xmm10[3]
1366 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4],xmm10[4],xmm6[5],xmm10[5],xmm6[6],xmm10[6],xmm6[7],xmm10[7]
1367 ; SSE2-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm8
1368 ; SSE2-NEXT: movdqa %xmm8, %xmm7
1369 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm10[0],xmm7[1],xmm10[1],xmm7[2],xmm10[2],xmm7[3],xmm10[3]
1370 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm8 = xmm8[4],xmm10[4],xmm8[5],xmm10[5],xmm8[6],xmm10[6],xmm8[7],xmm10[7]
1371 ; SSE2-NEXT: movdqa %xmm8, 240(%rdi)
1372 ; SSE2-NEXT: movdqa %xmm7, 224(%rdi)
1373 ; SSE2-NEXT: movdqa %xmm6, 208(%rdi)
1374 ; SSE2-NEXT: movdqa %xmm15, 192(%rdi)
1375 ; SSE2-NEXT: movdqa %xmm5, 176(%rdi)
1376 ; SSE2-NEXT: movdqa %xmm14, 160(%rdi)
1377 ; SSE2-NEXT: movdqa %xmm4, 144(%rdi)
1378 ; SSE2-NEXT: movdqa %xmm13, 128(%rdi)
1379 ; SSE2-NEXT: movdqa %xmm3, 112(%rdi)
1380 ; SSE2-NEXT: movdqa %xmm12, 96(%rdi)
1381 ; SSE2-NEXT: movdqa %xmm2, 80(%rdi)
1382 ; SSE2-NEXT: movdqa %xmm11, 64(%rdi)
1383 ; SSE2-NEXT: movdqa %xmm1, 48(%rdi)
1384 ; SSE2-NEXT: movdqa %xmm9, 32(%rdi)
1385 ; SSE2-NEXT: movdqa %xmm0, 16(%rdi)
1386 ; SSE2-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1387 ; SSE2-NEXT: movaps %xmm0, (%rdi)
1390 ; SSE41-LABEL: zext_mulhuw_v64i16_lshr:
1392 ; SSE41-NEXT: movdqa %xmm0, %xmm8
1393 ; SSE41-NEXT: movq %rdi, %rax
1394 ; SSE41-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm8
1395 ; SSE41-NEXT: pxor %xmm11, %xmm11
1396 ; SSE41-NEXT: movdqa %xmm8, %xmm0
1397 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm11[4],xmm0[5],xmm11[5],xmm0[6],xmm11[6],xmm0[7],xmm11[7]
1398 ; SSE41-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1399 ; SSE41-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm1
1400 ; SSE41-NEXT: movdqa %xmm1, %xmm9
1401 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm9 = xmm9[4],xmm11[4],xmm9[5],xmm11[5],xmm9[6],xmm11[6],xmm9[7],xmm11[7]
1402 ; SSE41-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm2
1403 ; SSE41-NEXT: movdqa %xmm2, %xmm10
1404 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm10 = xmm10[4],xmm11[4],xmm10[5],xmm11[5],xmm10[6],xmm11[6],xmm10[7],xmm11[7]
1405 ; SSE41-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm3
1406 ; SSE41-NEXT: movdqa %xmm3, %xmm12
1407 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm12 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
1408 ; SSE41-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm4
1409 ; SSE41-NEXT: movdqa %xmm4, %xmm13
1410 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm13 = xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
1411 ; SSE41-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm5
1412 ; SSE41-NEXT: movdqa %xmm5, %xmm14
1413 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm14 = xmm14[4],xmm11[4],xmm14[5],xmm11[5],xmm14[6],xmm11[6],xmm14[7],xmm11[7]
1414 ; SSE41-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm6
1415 ; SSE41-NEXT: movdqa %xmm6, %xmm15
1416 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm15 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
1417 ; SSE41-NEXT: pmulhuw {{[0-9]+}}(%rsp), %xmm7
1418 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm7[0],zero,xmm7[1],zero,xmm7[2],zero,xmm7[3],zero
1419 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm11[4],xmm7[5],xmm11[5],xmm7[6],xmm11[6],xmm7[7],xmm11[7]
1420 ; SSE41-NEXT: movdqa %xmm7, 240(%rdi)
1421 ; SSE41-NEXT: movdqa %xmm0, 224(%rdi)
1422 ; SSE41-NEXT: movdqa %xmm15, 208(%rdi)
1423 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm6[0],zero,xmm6[1],zero,xmm6[2],zero,xmm6[3],zero
1424 ; SSE41-NEXT: movdqa %xmm0, 192(%rdi)
1425 ; SSE41-NEXT: movdqa %xmm14, 176(%rdi)
1426 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero
1427 ; SSE41-NEXT: movdqa %xmm0, 160(%rdi)
1428 ; SSE41-NEXT: movdqa %xmm13, 144(%rdi)
1429 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
1430 ; SSE41-NEXT: movdqa %xmm0, 128(%rdi)
1431 ; SSE41-NEXT: movdqa %xmm12, 112(%rdi)
1432 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero
1433 ; SSE41-NEXT: movdqa %xmm0, 96(%rdi)
1434 ; SSE41-NEXT: movdqa %xmm10, 80(%rdi)
1435 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
1436 ; SSE41-NEXT: movdqa %xmm0, 64(%rdi)
1437 ; SSE41-NEXT: movdqa %xmm9, 48(%rdi)
1438 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
1439 ; SSE41-NEXT: movdqa %xmm0, 32(%rdi)
1440 ; SSE41-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1441 ; SSE41-NEXT: movaps %xmm0, 16(%rdi)
1442 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm8[0],zero,xmm8[1],zero,xmm8[2],zero,xmm8[3],zero
1443 ; SSE41-NEXT: movdqa %xmm0, (%rdi)
1446 ; AVX2-LABEL: zext_mulhuw_v64i16_lshr:
1448 ; AVX2-NEXT: movq %rdi, %rax
1449 ; AVX2-NEXT: vpmulhuw %ymm4, %ymm0, %ymm0
1450 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm4 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1451 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm0
1452 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1453 ; AVX2-NEXT: vpmulhuw %ymm5, %ymm1, %ymm1
1454 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm5 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
1455 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm1
1456 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
1457 ; AVX2-NEXT: vpmulhuw %ymm6, %ymm2, %ymm2
1458 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm6 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
1459 ; AVX2-NEXT: vextracti128 $1, %ymm2, %xmm2
1460 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm2 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
1461 ; AVX2-NEXT: vpmulhuw %ymm7, %ymm3, %ymm3
1462 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm7 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
1463 ; AVX2-NEXT: vextracti128 $1, %ymm3, %xmm3
1464 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
1465 ; AVX2-NEXT: vmovdqa %ymm3, 224(%rdi)
1466 ; AVX2-NEXT: vmovdqa %ymm7, 192(%rdi)
1467 ; AVX2-NEXT: vmovdqa %ymm2, 160(%rdi)
1468 ; AVX2-NEXT: vmovdqa %ymm6, 128(%rdi)
1469 ; AVX2-NEXT: vmovdqa %ymm1, 96(%rdi)
1470 ; AVX2-NEXT: vmovdqa %ymm5, 64(%rdi)
1471 ; AVX2-NEXT: vmovdqa %ymm0, 32(%rdi)
1472 ; AVX2-NEXT: vmovdqa %ymm4, (%rdi)
1473 ; AVX2-NEXT: vzeroupper
1476 ; AVX512F-LABEL: zext_mulhuw_v64i16_lshr:
1478 ; AVX512F-NEXT: vpmulhuw %ymm2, %ymm0, %ymm4
1479 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm4 = ymm4[0],zero,ymm4[1],zero,ymm4[2],zero,ymm4[3],zero,ymm4[4],zero,ymm4[5],zero,ymm4[6],zero,ymm4[7],zero,ymm4[8],zero,ymm4[9],zero,ymm4[10],zero,ymm4[11],zero,ymm4[12],zero,ymm4[13],zero,ymm4[14],zero,ymm4[15],zero
1480 ; AVX512F-NEXT: vextracti64x4 $1, %zmm2, %ymm2
1481 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm0
1482 ; AVX512F-NEXT: vpmulhuw %ymm2, %ymm0, %ymm0
1483 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm5 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
1484 ; AVX512F-NEXT: vpmulhuw %ymm3, %ymm1, %ymm0
1485 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm2 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
1486 ; AVX512F-NEXT: vextracti64x4 $1, %zmm3, %ymm0
1487 ; AVX512F-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1488 ; AVX512F-NEXT: vpmulhuw %ymm0, %ymm1, %ymm0
1489 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm3 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
1490 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm0
1491 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1
1492 ; AVX512F-NEXT: retq
1494 ; AVX512BW-LABEL: zext_mulhuw_v64i16_lshr:
1495 ; AVX512BW: # %bb.0:
1496 ; AVX512BW-NEXT: vpmulhuw %zmm2, %zmm0, %zmm2
1497 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm2[0],zero,ymm2[1],zero,ymm2[2],zero,ymm2[3],zero,ymm2[4],zero,ymm2[5],zero,ymm2[6],zero,ymm2[7],zero,ymm2[8],zero,ymm2[9],zero,ymm2[10],zero,ymm2[11],zero,ymm2[12],zero,ymm2[13],zero,ymm2[14],zero,ymm2[15],zero
1498 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm2, %ymm2
1499 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm4 = ymm2[0],zero,ymm2[1],zero,ymm2[2],zero,ymm2[3],zero,ymm2[4],zero,ymm2[5],zero,ymm2[6],zero,ymm2[7],zero,ymm2[8],zero,ymm2[9],zero,ymm2[10],zero,ymm2[11],zero,ymm2[12],zero,ymm2[13],zero,ymm2[14],zero,ymm2[15],zero
1500 ; AVX512BW-NEXT: vpmulhuw %zmm3, %zmm1, %zmm1
1501 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm2 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
1502 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1503 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm3 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
1504 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
1505 ; AVX512BW-NEXT: retq
1506 %a1 = zext <64 x i16> %a to <64 x i32>
1507 %b1 = zext <64 x i16> %b to <64 x i32>
1508 %c = mul <64 x i32> %a1, %b1
1509 %d = lshr <64 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
1513 define <64 x i32> @mulhsw_v64i16_lshr(<64 x i16> %a, <64 x i16> %b) {
1514 ; SSE2-LABEL: mulhsw_v64i16_lshr:
1516 ; SSE2-NEXT: movdqa %xmm7, %xmm8
1517 ; SSE2-NEXT: movq %rdi, %rax
1518 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm0
1519 ; SSE2-NEXT: pxor %xmm10, %xmm10
1520 ; SSE2-NEXT: movdqa %xmm0, %xmm7
1521 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm10[0],xmm7[1],xmm10[1],xmm7[2],xmm10[2],xmm7[3],xmm10[3]
1522 ; SSE2-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1523 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm10[4],xmm0[5],xmm10[5],xmm0[6],xmm10[6],xmm0[7],xmm10[7]
1524 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm1
1525 ; SSE2-NEXT: movdqa %xmm1, %xmm9
1526 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1527 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm10[4],xmm1[5],xmm10[5],xmm1[6],xmm10[6],xmm1[7],xmm10[7]
1528 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm2
1529 ; SSE2-NEXT: movdqa %xmm2, %xmm11
1530 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
1531 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm10[4],xmm2[5],xmm10[5],xmm2[6],xmm10[6],xmm2[7],xmm10[7]
1532 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm3
1533 ; SSE2-NEXT: movdqa %xmm3, %xmm12
1534 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm10[0],xmm12[1],xmm10[1],xmm12[2],xmm10[2],xmm12[3],xmm10[3]
1535 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm10[4],xmm3[5],xmm10[5],xmm3[6],xmm10[6],xmm3[7],xmm10[7]
1536 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm4
1537 ; SSE2-NEXT: movdqa %xmm4, %xmm13
1538 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm13 = xmm13[0],xmm10[0],xmm13[1],xmm10[1],xmm13[2],xmm10[2],xmm13[3],xmm10[3]
1539 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm10[4],xmm4[5],xmm10[5],xmm4[6],xmm10[6],xmm4[7],xmm10[7]
1540 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm5
1541 ; SSE2-NEXT: movdqa %xmm5, %xmm14
1542 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
1543 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm5 = xmm5[4],xmm10[4],xmm5[5],xmm10[5],xmm5[6],xmm10[6],xmm5[7],xmm10[7]
1544 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm6
1545 ; SSE2-NEXT: movdqa %xmm6, %xmm15
1546 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm10[0],xmm15[1],xmm10[1],xmm15[2],xmm10[2],xmm15[3],xmm10[3]
1547 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4],xmm10[4],xmm6[5],xmm10[5],xmm6[6],xmm10[6],xmm6[7],xmm10[7]
1548 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm8
1549 ; SSE2-NEXT: movdqa %xmm8, %xmm7
1550 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm10[0],xmm7[1],xmm10[1],xmm7[2],xmm10[2],xmm7[3],xmm10[3]
1551 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm8 = xmm8[4],xmm10[4],xmm8[5],xmm10[5],xmm8[6],xmm10[6],xmm8[7],xmm10[7]
1552 ; SSE2-NEXT: movdqa %xmm8, 240(%rdi)
1553 ; SSE2-NEXT: movdqa %xmm7, 224(%rdi)
1554 ; SSE2-NEXT: movdqa %xmm6, 208(%rdi)
1555 ; SSE2-NEXT: movdqa %xmm15, 192(%rdi)
1556 ; SSE2-NEXT: movdqa %xmm5, 176(%rdi)
1557 ; SSE2-NEXT: movdqa %xmm14, 160(%rdi)
1558 ; SSE2-NEXT: movdqa %xmm4, 144(%rdi)
1559 ; SSE2-NEXT: movdqa %xmm13, 128(%rdi)
1560 ; SSE2-NEXT: movdqa %xmm3, 112(%rdi)
1561 ; SSE2-NEXT: movdqa %xmm12, 96(%rdi)
1562 ; SSE2-NEXT: movdqa %xmm2, 80(%rdi)
1563 ; SSE2-NEXT: movdqa %xmm11, 64(%rdi)
1564 ; SSE2-NEXT: movdqa %xmm1, 48(%rdi)
1565 ; SSE2-NEXT: movdqa %xmm9, 32(%rdi)
1566 ; SSE2-NEXT: movdqa %xmm0, 16(%rdi)
1567 ; SSE2-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1568 ; SSE2-NEXT: movaps %xmm0, (%rdi)
1571 ; SSE41-LABEL: mulhsw_v64i16_lshr:
1573 ; SSE41-NEXT: movdqa %xmm0, %xmm8
1574 ; SSE41-NEXT: movq %rdi, %rax
1575 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm8
1576 ; SSE41-NEXT: pxor %xmm11, %xmm11
1577 ; SSE41-NEXT: movdqa %xmm8, %xmm0
1578 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm11[4],xmm0[5],xmm11[5],xmm0[6],xmm11[6],xmm0[7],xmm11[7]
1579 ; SSE41-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1580 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm1
1581 ; SSE41-NEXT: movdqa %xmm1, %xmm9
1582 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm9 = xmm9[4],xmm11[4],xmm9[5],xmm11[5],xmm9[6],xmm11[6],xmm9[7],xmm11[7]
1583 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm2
1584 ; SSE41-NEXT: movdqa %xmm2, %xmm10
1585 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm10 = xmm10[4],xmm11[4],xmm10[5],xmm11[5],xmm10[6],xmm11[6],xmm10[7],xmm11[7]
1586 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm3
1587 ; SSE41-NEXT: movdqa %xmm3, %xmm12
1588 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm12 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
1589 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm4
1590 ; SSE41-NEXT: movdqa %xmm4, %xmm13
1591 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm13 = xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
1592 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm5
1593 ; SSE41-NEXT: movdqa %xmm5, %xmm14
1594 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm14 = xmm14[4],xmm11[4],xmm14[5],xmm11[5],xmm14[6],xmm11[6],xmm14[7],xmm11[7]
1595 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm6
1596 ; SSE41-NEXT: movdqa %xmm6, %xmm15
1597 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm15 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
1598 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm7
1599 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm7[0],zero,xmm7[1],zero,xmm7[2],zero,xmm7[3],zero
1600 ; SSE41-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm11[4],xmm7[5],xmm11[5],xmm7[6],xmm11[6],xmm7[7],xmm11[7]
1601 ; SSE41-NEXT: movdqa %xmm7, 240(%rdi)
1602 ; SSE41-NEXT: movdqa %xmm0, 224(%rdi)
1603 ; SSE41-NEXT: movdqa %xmm15, 208(%rdi)
1604 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm6[0],zero,xmm6[1],zero,xmm6[2],zero,xmm6[3],zero
1605 ; SSE41-NEXT: movdqa %xmm0, 192(%rdi)
1606 ; SSE41-NEXT: movdqa %xmm14, 176(%rdi)
1607 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero
1608 ; SSE41-NEXT: movdqa %xmm0, 160(%rdi)
1609 ; SSE41-NEXT: movdqa %xmm13, 144(%rdi)
1610 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm4[0],zero,xmm4[1],zero,xmm4[2],zero,xmm4[3],zero
1611 ; SSE41-NEXT: movdqa %xmm0, 128(%rdi)
1612 ; SSE41-NEXT: movdqa %xmm12, 112(%rdi)
1613 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero
1614 ; SSE41-NEXT: movdqa %xmm0, 96(%rdi)
1615 ; SSE41-NEXT: movdqa %xmm10, 80(%rdi)
1616 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
1617 ; SSE41-NEXT: movdqa %xmm0, 64(%rdi)
1618 ; SSE41-NEXT: movdqa %xmm9, 48(%rdi)
1619 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero
1620 ; SSE41-NEXT: movdqa %xmm0, 32(%rdi)
1621 ; SSE41-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1622 ; SSE41-NEXT: movaps %xmm0, 16(%rdi)
1623 ; SSE41-NEXT: pmovzxwd {{.*#+}} xmm0 = xmm8[0],zero,xmm8[1],zero,xmm8[2],zero,xmm8[3],zero
1624 ; SSE41-NEXT: movdqa %xmm0, (%rdi)
1627 ; AVX2-LABEL: mulhsw_v64i16_lshr:
1629 ; AVX2-NEXT: movq %rdi, %rax
1630 ; AVX2-NEXT: vpmulhw %ymm4, %ymm0, %ymm0
1631 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm4 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1632 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm0
1633 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
1634 ; AVX2-NEXT: vpmulhw %ymm5, %ymm1, %ymm1
1635 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm5 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
1636 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm1
1637 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
1638 ; AVX2-NEXT: vpmulhw %ymm6, %ymm2, %ymm2
1639 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm6 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
1640 ; AVX2-NEXT: vextracti128 $1, %ymm2, %xmm2
1641 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm2 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero,xmm2[4],zero,xmm2[5],zero,xmm2[6],zero,xmm2[7],zero
1642 ; AVX2-NEXT: vpmulhw %ymm7, %ymm3, %ymm3
1643 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm7 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
1644 ; AVX2-NEXT: vextracti128 $1, %ymm3, %xmm3
1645 ; AVX2-NEXT: vpmovzxwd {{.*#+}} ymm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
1646 ; AVX2-NEXT: vmovdqa %ymm3, 224(%rdi)
1647 ; AVX2-NEXT: vmovdqa %ymm7, 192(%rdi)
1648 ; AVX2-NEXT: vmovdqa %ymm2, 160(%rdi)
1649 ; AVX2-NEXT: vmovdqa %ymm6, 128(%rdi)
1650 ; AVX2-NEXT: vmovdqa %ymm1, 96(%rdi)
1651 ; AVX2-NEXT: vmovdqa %ymm5, 64(%rdi)
1652 ; AVX2-NEXT: vmovdqa %ymm0, 32(%rdi)
1653 ; AVX2-NEXT: vmovdqa %ymm4, (%rdi)
1654 ; AVX2-NEXT: vzeroupper
1657 ; AVX512F-LABEL: mulhsw_v64i16_lshr:
1659 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm0, %ymm4
1660 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm4 = ymm4[0],zero,ymm4[1],zero,ymm4[2],zero,ymm4[3],zero,ymm4[4],zero,ymm4[5],zero,ymm4[6],zero,ymm4[7],zero,ymm4[8],zero,ymm4[9],zero,ymm4[10],zero,ymm4[11],zero,ymm4[12],zero,ymm4[13],zero,ymm4[14],zero,ymm4[15],zero
1661 ; AVX512F-NEXT: vextracti64x4 $1, %zmm2, %ymm2
1662 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm0
1663 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm0, %ymm0
1664 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm5 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
1665 ; AVX512F-NEXT: vpmulhw %ymm3, %ymm1, %ymm0
1666 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm2 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
1667 ; AVX512F-NEXT: vextracti64x4 $1, %zmm3, %ymm0
1668 ; AVX512F-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1669 ; AVX512F-NEXT: vpmulhw %ymm0, %ymm1, %ymm0
1670 ; AVX512F-NEXT: vpmovzxwd {{.*#+}} zmm3 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
1671 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm0
1672 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1
1673 ; AVX512F-NEXT: retq
1675 ; AVX512BW-LABEL: mulhsw_v64i16_lshr:
1676 ; AVX512BW: # %bb.0:
1677 ; AVX512BW-NEXT: vpmulhw %zmm2, %zmm0, %zmm2
1678 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm2[0],zero,ymm2[1],zero,ymm2[2],zero,ymm2[3],zero,ymm2[4],zero,ymm2[5],zero,ymm2[6],zero,ymm2[7],zero,ymm2[8],zero,ymm2[9],zero,ymm2[10],zero,ymm2[11],zero,ymm2[12],zero,ymm2[13],zero,ymm2[14],zero,ymm2[15],zero
1679 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm2, %ymm2
1680 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm4 = ymm2[0],zero,ymm2[1],zero,ymm2[2],zero,ymm2[3],zero,ymm2[4],zero,ymm2[5],zero,ymm2[6],zero,ymm2[7],zero,ymm2[8],zero,ymm2[9],zero,ymm2[10],zero,ymm2[11],zero,ymm2[12],zero,ymm2[13],zero,ymm2[14],zero,ymm2[15],zero
1681 ; AVX512BW-NEXT: vpmulhw %zmm3, %zmm1, %zmm1
1682 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm2 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
1683 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1684 ; AVX512BW-NEXT: vpmovzxwd {{.*#+}} zmm3 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
1685 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
1686 ; AVX512BW-NEXT: retq
1687 %a1 = sext <64 x i16> %a to <64 x i32>
1688 %b1 = sext <64 x i16> %b to <64 x i32>
1689 %c = mul <64 x i32> %a1, %b1
1690 %d = lshr <64 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
1694 define <64 x i32> @mulhsw_v64i16_ashr(<64 x i16> %a, <64 x i16> %b) {
1695 ; SSE2-LABEL: mulhsw_v64i16_ashr:
1697 ; SSE2-NEXT: movq %rdi, %rax
1698 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm0
1699 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm0[0],xmm8[1],xmm0[1],xmm8[2],xmm0[2],xmm8[3],xmm0[3]
1700 ; SSE2-NEXT: psrad $16, %xmm8
1701 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
1702 ; SSE2-NEXT: psrad $16, %xmm0
1703 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm1
1704 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm1[0],xmm9[1],xmm1[1],xmm9[2],xmm1[2],xmm9[3],xmm1[3]
1705 ; SSE2-NEXT: psrad $16, %xmm9
1706 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4,4,5,5,6,6,7,7]
1707 ; SSE2-NEXT: psrad $16, %xmm1
1708 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm2
1709 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm2[0],xmm10[1],xmm2[1],xmm10[2],xmm2[2],xmm10[3],xmm2[3]
1710 ; SSE2-NEXT: psrad $16, %xmm10
1711 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4,4,5,5,6,6,7,7]
1712 ; SSE2-NEXT: psrad $16, %xmm2
1713 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm3
1714 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm3[0],xmm11[1],xmm3[1],xmm11[2],xmm3[2],xmm11[3],xmm3[3]
1715 ; SSE2-NEXT: psrad $16, %xmm11
1716 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4,4,5,5,6,6,7,7]
1717 ; SSE2-NEXT: psrad $16, %xmm3
1718 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm4
1719 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm4[0],xmm12[1],xmm4[1],xmm12[2],xmm4[2],xmm12[3],xmm4[3]
1720 ; SSE2-NEXT: psrad $16, %xmm12
1721 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4,4,5,5,6,6,7,7]
1722 ; SSE2-NEXT: psrad $16, %xmm4
1723 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm5
1724 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm13 = xmm13[0],xmm5[0],xmm13[1],xmm5[1],xmm13[2],xmm5[2],xmm13[3],xmm5[3]
1725 ; SSE2-NEXT: psrad $16, %xmm13
1726 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm5 = xmm5[4,4,5,5,6,6,7,7]
1727 ; SSE2-NEXT: psrad $16, %xmm5
1728 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm6
1729 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
1730 ; SSE2-NEXT: psrad $16, %xmm14
1731 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4,4,5,5,6,6,7,7]
1732 ; SSE2-NEXT: psrad $16, %xmm6
1733 ; SSE2-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm7
1734 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm7[0],xmm15[1],xmm7[1],xmm15[2],xmm7[2],xmm15[3],xmm7[3]
1735 ; SSE2-NEXT: psrad $16, %xmm15
1736 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4,4,5,5,6,6,7,7]
1737 ; SSE2-NEXT: psrad $16, %xmm7
1738 ; SSE2-NEXT: movdqa %xmm7, 240(%rdi)
1739 ; SSE2-NEXT: movdqa %xmm15, 224(%rdi)
1740 ; SSE2-NEXT: movdqa %xmm6, 208(%rdi)
1741 ; SSE2-NEXT: movdqa %xmm14, 192(%rdi)
1742 ; SSE2-NEXT: movdqa %xmm5, 176(%rdi)
1743 ; SSE2-NEXT: movdqa %xmm13, 160(%rdi)
1744 ; SSE2-NEXT: movdqa %xmm4, 144(%rdi)
1745 ; SSE2-NEXT: movdqa %xmm12, 128(%rdi)
1746 ; SSE2-NEXT: movdqa %xmm3, 112(%rdi)
1747 ; SSE2-NEXT: movdqa %xmm11, 96(%rdi)
1748 ; SSE2-NEXT: movdqa %xmm2, 80(%rdi)
1749 ; SSE2-NEXT: movdqa %xmm10, 64(%rdi)
1750 ; SSE2-NEXT: movdqa %xmm1, 48(%rdi)
1751 ; SSE2-NEXT: movdqa %xmm9, 32(%rdi)
1752 ; SSE2-NEXT: movdqa %xmm0, 16(%rdi)
1753 ; SSE2-NEXT: movdqa %xmm8, (%rdi)
1756 ; SSE41-LABEL: mulhsw_v64i16_ashr:
1758 ; SSE41-NEXT: movq %rdi, %rax
1759 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm0
1760 ; SSE41-NEXT: pmovsxwd %xmm0, %xmm8
1761 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
1762 ; SSE41-NEXT: pmovsxwd %xmm0, %xmm0
1763 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm1
1764 ; SSE41-NEXT: pmovsxwd %xmm1, %xmm9
1765 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,3,2,3]
1766 ; SSE41-NEXT: pmovsxwd %xmm1, %xmm1
1767 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm2
1768 ; SSE41-NEXT: pmovsxwd %xmm2, %xmm10
1769 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,3,2,3]
1770 ; SSE41-NEXT: pmovsxwd %xmm2, %xmm2
1771 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm3
1772 ; SSE41-NEXT: pmovsxwd %xmm3, %xmm11
1773 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,3,2,3]
1774 ; SSE41-NEXT: pmovsxwd %xmm3, %xmm3
1775 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm4
1776 ; SSE41-NEXT: pmovsxwd %xmm4, %xmm12
1777 ; SSE41-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,3,2,3]
1778 ; SSE41-NEXT: pmovsxwd %xmm4, %xmm4
1779 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm5
1780 ; SSE41-NEXT: pmovsxwd %xmm5, %xmm13
1781 ; SSE41-NEXT: pshufd {{.*#+}} xmm5 = xmm5[2,3,2,3]
1782 ; SSE41-NEXT: pmovsxwd %xmm5, %xmm5
1783 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm6
1784 ; SSE41-NEXT: pmovsxwd %xmm6, %xmm14
1785 ; SSE41-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,3,2,3]
1786 ; SSE41-NEXT: pmovsxwd %xmm6, %xmm6
1787 ; SSE41-NEXT: pmulhw {{[0-9]+}}(%rsp), %xmm7
1788 ; SSE41-NEXT: pmovsxwd %xmm7, %xmm15
1789 ; SSE41-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,3,2,3]
1790 ; SSE41-NEXT: pmovsxwd %xmm7, %xmm7
1791 ; SSE41-NEXT: movdqa %xmm7, 240(%rdi)
1792 ; SSE41-NEXT: movdqa %xmm15, 224(%rdi)
1793 ; SSE41-NEXT: movdqa %xmm6, 208(%rdi)
1794 ; SSE41-NEXT: movdqa %xmm14, 192(%rdi)
1795 ; SSE41-NEXT: movdqa %xmm5, 176(%rdi)
1796 ; SSE41-NEXT: movdqa %xmm13, 160(%rdi)
1797 ; SSE41-NEXT: movdqa %xmm4, 144(%rdi)
1798 ; SSE41-NEXT: movdqa %xmm12, 128(%rdi)
1799 ; SSE41-NEXT: movdqa %xmm3, 112(%rdi)
1800 ; SSE41-NEXT: movdqa %xmm11, 96(%rdi)
1801 ; SSE41-NEXT: movdqa %xmm2, 80(%rdi)
1802 ; SSE41-NEXT: movdqa %xmm10, 64(%rdi)
1803 ; SSE41-NEXT: movdqa %xmm1, 48(%rdi)
1804 ; SSE41-NEXT: movdqa %xmm9, 32(%rdi)
1805 ; SSE41-NEXT: movdqa %xmm0, 16(%rdi)
1806 ; SSE41-NEXT: movdqa %xmm8, (%rdi)
1809 ; AVX2-LABEL: mulhsw_v64i16_ashr:
1811 ; AVX2-NEXT: movq %rdi, %rax
1812 ; AVX2-NEXT: vpmulhw %ymm4, %ymm0, %ymm0
1813 ; AVX2-NEXT: vpmovsxwd %xmm0, %ymm4
1814 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm0
1815 ; AVX2-NEXT: vpmovsxwd %xmm0, %ymm0
1816 ; AVX2-NEXT: vpmulhw %ymm5, %ymm1, %ymm1
1817 ; AVX2-NEXT: vpmovsxwd %xmm1, %ymm5
1818 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm1
1819 ; AVX2-NEXT: vpmovsxwd %xmm1, %ymm1
1820 ; AVX2-NEXT: vpmulhw %ymm6, %ymm2, %ymm2
1821 ; AVX2-NEXT: vpmovsxwd %xmm2, %ymm6
1822 ; AVX2-NEXT: vextracti128 $1, %ymm2, %xmm2
1823 ; AVX2-NEXT: vpmovsxwd %xmm2, %ymm2
1824 ; AVX2-NEXT: vpmulhw %ymm7, %ymm3, %ymm3
1825 ; AVX2-NEXT: vpmovsxwd %xmm3, %ymm7
1826 ; AVX2-NEXT: vextracti128 $1, %ymm3, %xmm3
1827 ; AVX2-NEXT: vpmovsxwd %xmm3, %ymm3
1828 ; AVX2-NEXT: vmovdqa %ymm3, 224(%rdi)
1829 ; AVX2-NEXT: vmovdqa %ymm7, 192(%rdi)
1830 ; AVX2-NEXT: vmovdqa %ymm2, 160(%rdi)
1831 ; AVX2-NEXT: vmovdqa %ymm6, 128(%rdi)
1832 ; AVX2-NEXT: vmovdqa %ymm1, 96(%rdi)
1833 ; AVX2-NEXT: vmovdqa %ymm5, 64(%rdi)
1834 ; AVX2-NEXT: vmovdqa %ymm0, 32(%rdi)
1835 ; AVX2-NEXT: vmovdqa %ymm4, (%rdi)
1836 ; AVX2-NEXT: vzeroupper
1839 ; AVX512F-LABEL: mulhsw_v64i16_ashr:
1841 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm0, %ymm4
1842 ; AVX512F-NEXT: vpmovsxwd %ymm4, %zmm4
1843 ; AVX512F-NEXT: vextracti64x4 $1, %zmm2, %ymm2
1844 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm0
1845 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm0, %ymm0
1846 ; AVX512F-NEXT: vpmovsxwd %ymm0, %zmm5
1847 ; AVX512F-NEXT: vpmulhw %ymm3, %ymm1, %ymm0
1848 ; AVX512F-NEXT: vpmovsxwd %ymm0, %zmm2
1849 ; AVX512F-NEXT: vextracti64x4 $1, %zmm3, %ymm0
1850 ; AVX512F-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1851 ; AVX512F-NEXT: vpmulhw %ymm0, %ymm1, %ymm0
1852 ; AVX512F-NEXT: vpmovsxwd %ymm0, %zmm3
1853 ; AVX512F-NEXT: vmovdqa64 %zmm4, %zmm0
1854 ; AVX512F-NEXT: vmovdqa64 %zmm5, %zmm1
1855 ; AVX512F-NEXT: retq
1857 ; AVX512BW-LABEL: mulhsw_v64i16_ashr:
1858 ; AVX512BW: # %bb.0:
1859 ; AVX512BW-NEXT: vpmulhw %zmm2, %zmm0, %zmm2
1860 ; AVX512BW-NEXT: vpmovsxwd %ymm2, %zmm0
1861 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm2, %ymm2
1862 ; AVX512BW-NEXT: vpmovsxwd %ymm2, %zmm4
1863 ; AVX512BW-NEXT: vpmulhw %zmm3, %zmm1, %zmm1
1864 ; AVX512BW-NEXT: vpmovsxwd %ymm1, %zmm2
1865 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm1, %ymm1
1866 ; AVX512BW-NEXT: vpmovsxwd %ymm1, %zmm3
1867 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm1
1868 ; AVX512BW-NEXT: retq
1869 %a1 = sext <64 x i16> %a to <64 x i32>
1870 %b1 = sext <64 x i16> %b to <64 x i32>
1871 %c = mul <64 x i32> %a1, %b1
1872 %d = ashr <64 x i32> %c, <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
1876 define <8 x i64> @zext_mulhuw_v8i16_lshr_i64(<8 x i16> %a, <8 x i16> %b) {
1877 ; SSE2-LABEL: zext_mulhuw_v8i16_lshr_i64:
1879 ; SSE2-NEXT: movdqa %xmm0, %xmm3
1880 ; SSE2-NEXT: pmulhuw %xmm1, %xmm3
1881 ; SSE2-NEXT: pxor %xmm4, %xmm4
1882 ; SSE2-NEXT: movdqa %xmm3, %xmm1
1883 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
1884 ; SSE2-NEXT: movdqa %xmm1, %xmm0
1885 ; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
1886 ; SSE2-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm4[2],xmm1[3],xmm4[3]
1887 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
1888 ; SSE2-NEXT: movdqa %xmm3, %xmm2
1889 ; SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
1890 ; SSE2-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm4[2],xmm3[3],xmm4[3]
1893 ; SSE41-LABEL: zext_mulhuw_v8i16_lshr_i64:
1895 ; SSE41-NEXT: pmulhuw %xmm1, %xmm0
1896 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm4 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
1897 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,1,1]
1898 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1899 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,3,2,3]
1900 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
1901 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
1902 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm3 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
1903 ; SSE41-NEXT: movdqa %xmm4, %xmm0
1906 ; AVX2-LABEL: zext_mulhuw_v8i16_lshr_i64:
1908 ; AVX2-NEXT: vpmulhuw %xmm1, %xmm0, %xmm1
1909 ; AVX2-NEXT: vpmovzxwq {{.*#+}} ymm0 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero
1910 ; AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,3,2,3]
1911 ; AVX2-NEXT: vpmovzxwq {{.*#+}} ymm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero
1914 ; AVX512-LABEL: zext_mulhuw_v8i16_lshr_i64:
1916 ; AVX512-NEXT: vpmulhuw %xmm1, %xmm0, %xmm0
1917 ; AVX512-NEXT: vpmovzxwq {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero
1919 %a1 = zext <8 x i16> %a to <8 x i64>
1920 %b1 = zext <8 x i16> %b to <8 x i64>
1921 %c = mul <8 x i64> %a1, %b1
1922 %d = lshr <8 x i64> %c, <i64 16, i64 16, i64 16, i64 16, i64 16, i64 16, i64 16, i64 16>
1926 define <8 x i64> @sext_mulhsw_v8i16_lshr_i64(<8 x i16> %a, <8 x i16> %b) {
1927 ; SSE2-LABEL: sext_mulhsw_v8i16_lshr_i64:
1929 ; SSE2-NEXT: movdqa %xmm0, %xmm3
1930 ; SSE2-NEXT: pmulhw %xmm1, %xmm3
1931 ; SSE2-NEXT: pxor %xmm4, %xmm4
1932 ; SSE2-NEXT: movdqa %xmm3, %xmm1
1933 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
1934 ; SSE2-NEXT: movdqa %xmm1, %xmm0
1935 ; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
1936 ; SSE2-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm4[2],xmm1[3],xmm4[3]
1937 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
1938 ; SSE2-NEXT: movdqa %xmm3, %xmm2
1939 ; SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
1940 ; SSE2-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm4[2],xmm3[3],xmm4[3]
1943 ; SSE41-LABEL: sext_mulhsw_v8i16_lshr_i64:
1945 ; SSE41-NEXT: pmulhw %xmm1, %xmm0
1946 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm4 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
1947 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,1,1]
1948 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1949 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,3,2,3]
1950 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
1951 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
1952 ; SSE41-NEXT: pmovzxwq {{.*#+}} xmm3 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
1953 ; SSE41-NEXT: movdqa %xmm4, %xmm0
1956 ; AVX2-LABEL: sext_mulhsw_v8i16_lshr_i64:
1958 ; AVX2-NEXT: vpmulhw %xmm1, %xmm0, %xmm1
1959 ; AVX2-NEXT: vpmovzxwq {{.*#+}} ymm0 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero
1960 ; AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,3,2,3]
1961 ; AVX2-NEXT: vpmovzxwq {{.*#+}} ymm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero
1964 ; AVX512-LABEL: sext_mulhsw_v8i16_lshr_i64:
1966 ; AVX512-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
1967 ; AVX512-NEXT: vpmovzxwq {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero
1969 %a1 = sext <8 x i16> %a to <8 x i64>
1970 %b1 = sext <8 x i16> %b to <8 x i64>
1971 %c = mul <8 x i64> %a1, %b1
1972 %d = lshr <8 x i64> %c, <i64 16, i64 16, i64 16, i64 16, i64 16, i64 16, i64 16, i64 16>
1976 define <8 x i64> @sext_mulhsw_v8i16_ashr_i64(<8 x i16> %a, <8 x i16> %b) {
1977 ; SSE2-LABEL: sext_mulhsw_v8i16_ashr_i64:
1979 ; SSE2-NEXT: pmulhw %xmm1, %xmm0
1980 ; SSE2-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1981 ; SSE2-NEXT: psrad $16, %xmm1
1982 ; SSE2-NEXT: pxor %xmm5, %xmm5
1983 ; SSE2-NEXT: pxor %xmm2, %xmm2
1984 ; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
1985 ; SSE2-NEXT: movdqa %xmm1, %xmm4
1986 ; SSE2-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
1987 ; SSE2-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1988 ; SSE2-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
1989 ; SSE2-NEXT: psrad $16, %xmm3
1990 ; SSE2-NEXT: pcmpgtd %xmm3, %xmm5
1991 ; SSE2-NEXT: movdqa %xmm3, %xmm2
1992 ; SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
1993 ; SSE2-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm5[2],xmm3[3],xmm5[3]
1994 ; SSE2-NEXT: movdqa %xmm4, %xmm0
1997 ; SSE41-LABEL: sext_mulhsw_v8i16_ashr_i64:
1999 ; SSE41-NEXT: pmulhw %xmm1, %xmm0
2000 ; SSE41-NEXT: pmovsxwq %xmm0, %xmm4
2001 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,1,1]
2002 ; SSE41-NEXT: pmovsxwq %xmm1, %xmm1
2003 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,3,2,3]
2004 ; SSE41-NEXT: pmovsxwq %xmm2, %xmm2
2005 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
2006 ; SSE41-NEXT: pmovsxwq %xmm0, %xmm3
2007 ; SSE41-NEXT: movdqa %xmm4, %xmm0
2010 ; AVX2-LABEL: sext_mulhsw_v8i16_ashr_i64:
2012 ; AVX2-NEXT: vpmulhw %xmm1, %xmm0, %xmm1
2013 ; AVX2-NEXT: vpmovsxwq %xmm1, %ymm0
2014 ; AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,3,2,3]
2015 ; AVX2-NEXT: vpmovsxwq %xmm1, %ymm1
2018 ; AVX512-LABEL: sext_mulhsw_v8i16_ashr_i64:
2020 ; AVX512-NEXT: vpmulhw %xmm1, %xmm0, %xmm0
2021 ; AVX512-NEXT: vpmovsxwq %xmm0, %zmm0
2023 %a1 = sext <8 x i16> %a to <8 x i64>
2024 %b1 = sext <8 x i16> %b to <8 x i64>
2025 %c = mul <8 x i64> %a1, %b1
2026 %d = ashr <8 x i64> %c, <i64 16, i64 16, i64 16, i64 16, i64 16, i64 16, i64 16, i64 16>
2030 define <8 x i16> @sse2_pmulh_w_const(<8 x i16> %a0, <8 x i16> %a1) {
2031 ; SSE-LABEL: sse2_pmulh_w_const:
2033 ; SSE-NEXT: movaps {{.*#+}} xmm0 = [0,65535,65535,65535,65535,65535,65535,0]
2036 ; AVX-LABEL: sse2_pmulh_w_const:
2038 ; AVX-NEXT: vmovaps {{.*#+}} xmm0 = [0,65535,65535,65535,65535,65535,65535,0]
2040 %res = call <8 x i16> @llvm.x86.sse2.pmulh.w(<8 x i16> <i16 -1, i16 -2, i16 -3, i16 -4, i16 -5, i16 -6, i16 -7, i16 0>, <8 x i16> <i16 0, i16 1, i16 2, i16 3, i16 4, i16 5, i16 6, i16 7>)
2043 declare <8 x i16> @llvm.x86.sse2.pmulh.w(<8 x i16>, <8 x i16>)
2045 define <8 x i16> @sse2_pmulhu_w_const(<8 x i16> %a0, <8 x i16> %a1) {
2046 ; SSE-LABEL: sse2_pmulhu_w_const:
2048 ; SSE-NEXT: movaps {{.*#+}} xmm0 = [0,0,1,2,3,4,5,0]
2051 ; AVX-LABEL: sse2_pmulhu_w_const:
2053 ; AVX-NEXT: vmovaps {{.*#+}} xmm0 = [0,0,1,2,3,4,5,0]
2055 %res = call <8 x i16> @llvm.x86.sse2.pmulhu.w(<8 x i16> <i16 -1, i16 -2, i16 -3, i16 -4, i16 -5, i16 -6, i16 -7, i16 0>, <8 x i16> <i16 0, i16 1, i16 2, i16 3, i16 4, i16 5, i16 6, i16 7>)
2058 declare <8 x i16> @llvm.x86.sse2.pmulhu.w(<8 x i16>, <8 x i16>)