1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx | FileCheck %s --check-prefixes=X86,X86-AVX,X86-AVX1
3 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=X86,X86-AVX,X86-AVX2
4 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512vl | FileCheck %s --check-prefixes=X86,X86-AVX512,X86-AVX512VL
5 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512fp16 | FileCheck %s --check-prefixes=X86,X86-AVX512,X86-AVX512FP16
6 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512dq,+avx512vl | FileCheck %s --check-prefixes=X86,X86-AVX512,X86-AVX512VLDQ
7 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX1
8 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX2
9 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512vl | FileCheck %s --check-prefixes=X64,X64-AVX512,X64-AVX512VL
10 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512fp16 | FileCheck %s --check-prefixes=X64,X64-AVX512,X64-AVX512FP16
11 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512dq,+avx512vl | FileCheck %s --check-prefixes=X64,X64-AVX512,X64-AVX512VLDQ
17 define <2 x double> @fabs_v2f64(<2 x double> %p) {
18 ; X86-AVX-LABEL: fabs_v2f64:
20 ; X86-AVX-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
23 ; X86-AVX512VL-LABEL: fabs_v2f64:
24 ; X86-AVX512VL: # %bb.0:
25 ; X86-AVX512VL-NEXT: vpandq {{\.?LCPI[0-9]+_[0-9]+}}{1to2}, %xmm0, %xmm0
26 ; X86-AVX512VL-NEXT: retl
28 ; X86-AVX512FP16-LABEL: fabs_v2f64:
29 ; X86-AVX512FP16: # %bb.0:
30 ; X86-AVX512FP16-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}{1to2}, %xmm0, %xmm0
31 ; X86-AVX512FP16-NEXT: retl
33 ; X86-AVX512VLDQ-LABEL: fabs_v2f64:
34 ; X86-AVX512VLDQ: # %bb.0:
35 ; X86-AVX512VLDQ-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}{1to2}, %xmm0, %xmm0
36 ; X86-AVX512VLDQ-NEXT: retl
38 ; X64-AVX-LABEL: fabs_v2f64:
40 ; X64-AVX-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
43 ; X64-AVX512VL-LABEL: fabs_v2f64:
44 ; X64-AVX512VL: # %bb.0:
45 ; X64-AVX512VL-NEXT: vpandq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %xmm0
46 ; X64-AVX512VL-NEXT: retq
48 ; X64-AVX512FP16-LABEL: fabs_v2f64:
49 ; X64-AVX512FP16: # %bb.0:
50 ; X64-AVX512FP16-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %xmm0
51 ; X64-AVX512FP16-NEXT: retq
53 ; X64-AVX512VLDQ-LABEL: fabs_v2f64:
54 ; X64-AVX512VLDQ: # %bb.0:
55 ; X64-AVX512VLDQ-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %xmm0
56 ; X64-AVX512VLDQ-NEXT: retq
57 %t = call <2 x double> @llvm.fabs.v2f64(<2 x double> %p)
60 declare <2 x double> @llvm.fabs.v2f64(<2 x double> %p)
62 define <4 x float> @fabs_v4f32(<4 x float> %p) {
63 ; X86-AVX1-LABEL: fabs_v4f32:
65 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
68 ; X86-AVX2-LABEL: fabs_v4f32:
70 ; X86-AVX2-NEXT: vbroadcastss {{.*#+}} xmm1 = [NaN,NaN,NaN,NaN]
71 ; X86-AVX2-NEXT: vandps %xmm1, %xmm0, %xmm0
74 ; X86-AVX512VL-LABEL: fabs_v4f32:
75 ; X86-AVX512VL: # %bb.0:
76 ; X86-AVX512VL-NEXT: vpandd {{\.?LCPI[0-9]+_[0-9]+}}{1to4}, %xmm0, %xmm0
77 ; X86-AVX512VL-NEXT: retl
79 ; X86-AVX512FP16-LABEL: fabs_v4f32:
80 ; X86-AVX512FP16: # %bb.0:
81 ; X86-AVX512FP16-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}{1to4}, %xmm0, %xmm0
82 ; X86-AVX512FP16-NEXT: retl
84 ; X86-AVX512VLDQ-LABEL: fabs_v4f32:
85 ; X86-AVX512VLDQ: # %bb.0:
86 ; X86-AVX512VLDQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}{1to4}, %xmm0, %xmm0
87 ; X86-AVX512VLDQ-NEXT: retl
89 ; X64-AVX1-LABEL: fabs_v4f32:
91 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
94 ; X64-AVX2-LABEL: fabs_v4f32:
96 ; X64-AVX2-NEXT: vbroadcastss {{.*#+}} xmm1 = [NaN,NaN,NaN,NaN]
97 ; X64-AVX2-NEXT: vandps %xmm1, %xmm0, %xmm0
100 ; X64-AVX512VL-LABEL: fabs_v4f32:
101 ; X64-AVX512VL: # %bb.0:
102 ; X64-AVX512VL-NEXT: vpandd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %xmm0
103 ; X64-AVX512VL-NEXT: retq
105 ; X64-AVX512FP16-LABEL: fabs_v4f32:
106 ; X64-AVX512FP16: # %bb.0:
107 ; X64-AVX512FP16-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %xmm0
108 ; X64-AVX512FP16-NEXT: retq
110 ; X64-AVX512VLDQ-LABEL: fabs_v4f32:
111 ; X64-AVX512VLDQ: # %bb.0:
112 ; X64-AVX512VLDQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %xmm0
113 ; X64-AVX512VLDQ-NEXT: retq
114 %t = call <4 x float> @llvm.fabs.v4f32(<4 x float> %p)
117 declare <4 x float> @llvm.fabs.v4f32(<4 x float> %p)
119 define <8 x half> @fabs_v8f16(ptr %p) {
120 ; X86-AVX1-LABEL: fabs_v8f16:
122 ; X86-AVX1-NEXT: movl {{[0-9]+}}(%esp), %eax
123 ; X86-AVX1-NEXT: vmovaps (%eax), %xmm0
124 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
125 ; X86-AVX1-NEXT: retl
127 ; X86-AVX2-LABEL: fabs_v8f16:
129 ; X86-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax
130 ; X86-AVX2-NEXT: vpbroadcastw {{.*#+}} xmm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
131 ; X86-AVX2-NEXT: vpand (%eax), %xmm0, %xmm0
132 ; X86-AVX2-NEXT: retl
134 ; X86-AVX512-LABEL: fabs_v8f16:
135 ; X86-AVX512: # %bb.0:
136 ; X86-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax
137 ; X86-AVX512-NEXT: vpbroadcastw {{.*#+}} xmm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
138 ; X86-AVX512-NEXT: vpand (%eax), %xmm0, %xmm0
139 ; X86-AVX512-NEXT: retl
141 ; X64-AVX1-LABEL: fabs_v8f16:
143 ; X64-AVX1-NEXT: vmovaps (%rdi), %xmm0
144 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
145 ; X64-AVX1-NEXT: retq
147 ; X64-AVX2-LABEL: fabs_v8f16:
149 ; X64-AVX2-NEXT: vpbroadcastw {{.*#+}} xmm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
150 ; X64-AVX2-NEXT: vpand (%rdi), %xmm0, %xmm0
151 ; X64-AVX2-NEXT: retq
153 ; X64-AVX512-LABEL: fabs_v8f16:
154 ; X64-AVX512: # %bb.0:
155 ; X64-AVX512-NEXT: vpbroadcastw {{.*#+}} xmm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
156 ; X64-AVX512-NEXT: vpand (%rdi), %xmm0, %xmm0
157 ; X64-AVX512-NEXT: retq
158 %v = load <8 x half>, ptr %p, align 16
159 %nnv = call <8 x half> @llvm.fabs.v8f16(<8 x half> %v)
162 declare <8 x half> @llvm.fabs.v8f16(<8 x half> %p)
168 define <4 x double> @fabs_v4f64(<4 x double> %p) {
169 ; X86-AVX1-LABEL: fabs_v4f64:
171 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0
172 ; X86-AVX1-NEXT: retl
174 ; X86-AVX2-LABEL: fabs_v4f64:
176 ; X86-AVX2-NEXT: vbroadcastsd {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN]
177 ; X86-AVX2-NEXT: vandps %ymm1, %ymm0, %ymm0
178 ; X86-AVX2-NEXT: retl
180 ; X86-AVX512VL-LABEL: fabs_v4f64:
181 ; X86-AVX512VL: # %bb.0:
182 ; X86-AVX512VL-NEXT: vpandq {{\.?LCPI[0-9]+_[0-9]+}}{1to4}, %ymm0, %ymm0
183 ; X86-AVX512VL-NEXT: retl
185 ; X86-AVX512FP16-LABEL: fabs_v4f64:
186 ; X86-AVX512FP16: # %bb.0:
187 ; X86-AVX512FP16-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}{1to4}, %ymm0, %ymm0
188 ; X86-AVX512FP16-NEXT: retl
190 ; X86-AVX512VLDQ-LABEL: fabs_v4f64:
191 ; X86-AVX512VLDQ: # %bb.0:
192 ; X86-AVX512VLDQ-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}{1to4}, %ymm0, %ymm0
193 ; X86-AVX512VLDQ-NEXT: retl
195 ; X64-AVX1-LABEL: fabs_v4f64:
197 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
198 ; X64-AVX1-NEXT: retq
200 ; X64-AVX2-LABEL: fabs_v4f64:
202 ; X64-AVX2-NEXT: vbroadcastsd {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN]
203 ; X64-AVX2-NEXT: vandps %ymm1, %ymm0, %ymm0
204 ; X64-AVX2-NEXT: retq
206 ; X64-AVX512VL-LABEL: fabs_v4f64:
207 ; X64-AVX512VL: # %bb.0:
208 ; X64-AVX512VL-NEXT: vpandq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %ymm0
209 ; X64-AVX512VL-NEXT: retq
211 ; X64-AVX512FP16-LABEL: fabs_v4f64:
212 ; X64-AVX512FP16: # %bb.0:
213 ; X64-AVX512FP16-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %ymm0
214 ; X64-AVX512FP16-NEXT: retq
216 ; X64-AVX512VLDQ-LABEL: fabs_v4f64:
217 ; X64-AVX512VLDQ: # %bb.0:
218 ; X64-AVX512VLDQ-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %ymm0
219 ; X64-AVX512VLDQ-NEXT: retq
220 %t = call <4 x double> @llvm.fabs.v4f64(<4 x double> %p)
223 declare <4 x double> @llvm.fabs.v4f64(<4 x double> %p)
225 define <8 x float> @fabs_v8f32(<8 x float> %p) {
226 ; X86-AVX1-LABEL: fabs_v8f32:
228 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0
229 ; X86-AVX1-NEXT: retl
231 ; X86-AVX2-LABEL: fabs_v8f32:
233 ; X86-AVX2-NEXT: vbroadcastss {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
234 ; X86-AVX2-NEXT: vandps %ymm1, %ymm0, %ymm0
235 ; X86-AVX2-NEXT: retl
237 ; X86-AVX512VL-LABEL: fabs_v8f32:
238 ; X86-AVX512VL: # %bb.0:
239 ; X86-AVX512VL-NEXT: vpandd {{\.?LCPI[0-9]+_[0-9]+}}{1to8}, %ymm0, %ymm0
240 ; X86-AVX512VL-NEXT: retl
242 ; X86-AVX512FP16-LABEL: fabs_v8f32:
243 ; X86-AVX512FP16: # %bb.0:
244 ; X86-AVX512FP16-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}{1to8}, %ymm0, %ymm0
245 ; X86-AVX512FP16-NEXT: retl
247 ; X86-AVX512VLDQ-LABEL: fabs_v8f32:
248 ; X86-AVX512VLDQ: # %bb.0:
249 ; X86-AVX512VLDQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}{1to8}, %ymm0, %ymm0
250 ; X86-AVX512VLDQ-NEXT: retl
252 ; X64-AVX1-LABEL: fabs_v8f32:
254 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
255 ; X64-AVX1-NEXT: retq
257 ; X64-AVX2-LABEL: fabs_v8f32:
259 ; X64-AVX2-NEXT: vbroadcastss {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
260 ; X64-AVX2-NEXT: vandps %ymm1, %ymm0, %ymm0
261 ; X64-AVX2-NEXT: retq
263 ; X64-AVX512VL-LABEL: fabs_v8f32:
264 ; X64-AVX512VL: # %bb.0:
265 ; X64-AVX512VL-NEXT: vpandd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %ymm0
266 ; X64-AVX512VL-NEXT: retq
268 ; X64-AVX512FP16-LABEL: fabs_v8f32:
269 ; X64-AVX512FP16: # %bb.0:
270 ; X64-AVX512FP16-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %ymm0
271 ; X64-AVX512FP16-NEXT: retq
273 ; X64-AVX512VLDQ-LABEL: fabs_v8f32:
274 ; X64-AVX512VLDQ: # %bb.0:
275 ; X64-AVX512VLDQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %ymm0
276 ; X64-AVX512VLDQ-NEXT: retq
277 %t = call <8 x float> @llvm.fabs.v8f32(<8 x float> %p)
280 declare <8 x float> @llvm.fabs.v8f32(<8 x float> %p)
282 define <16 x half> @fabs_v16f16(ptr %p) {
283 ; X86-AVX1-LABEL: fabs_v16f16:
285 ; X86-AVX1-NEXT: pushl %esi
286 ; X86-AVX1-NEXT: .cfi_def_cfa_offset 8
287 ; X86-AVX1-NEXT: subl $308, %esp # imm = 0x134
288 ; X86-AVX1-NEXT: .cfi_def_cfa_offset 316
289 ; X86-AVX1-NEXT: .cfi_offset %esi, -8
290 ; X86-AVX1-NEXT: movl {{[0-9]+}}(%esp), %esi
291 ; X86-AVX1-NEXT: vmovdqa (%esi), %xmm0
292 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
293 ; X86-AVX1-NEXT: vmovaps 16(%esi), %xmm1
294 ; X86-AVX1-NEXT: vmovups %xmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
295 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
296 ; X86-AVX1-NEXT: calll __extendhfsf2
297 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
298 ; X86-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
299 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
300 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
301 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
302 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
303 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
304 ; X86-AVX1-NEXT: calll __extendhfsf2
305 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
306 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
307 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
308 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
309 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
310 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
311 ; X86-AVX1-NEXT: calll __truncsfhf2
312 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
313 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
314 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
315 ; X86-AVX1-NEXT: calll __truncsfhf2
316 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
317 ; X86-AVX1-NEXT: vbroadcastss 4(%esi), %xmm0
318 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
319 ; X86-AVX1-NEXT: calll __extendhfsf2
320 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
321 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
322 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
323 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
324 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
325 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
326 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
327 ; X86-AVX1-NEXT: calll __extendhfsf2
328 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
329 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
330 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
331 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
332 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
333 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
334 ; X86-AVX1-NEXT: calll __truncsfhf2
335 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
336 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
337 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
338 ; X86-AVX1-NEXT: calll __truncsfhf2
339 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
340 ; X86-AVX1-NEXT: vbroadcastss 8(%esi), %xmm0
341 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
342 ; X86-AVX1-NEXT: calll __extendhfsf2
343 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
344 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
345 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
346 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
347 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
348 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
349 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
350 ; X86-AVX1-NEXT: calll __extendhfsf2
351 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
352 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
353 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
354 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
355 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
356 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
357 ; X86-AVX1-NEXT: calll __truncsfhf2
358 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
359 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
360 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
361 ; X86-AVX1-NEXT: calll __truncsfhf2
362 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
363 ; X86-AVX1-NEXT: vbroadcastss 12(%esi), %xmm0
364 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
365 ; X86-AVX1-NEXT: calll __extendhfsf2
366 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
367 ; X86-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
368 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
369 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
370 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
371 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
372 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
373 ; X86-AVX1-NEXT: calll __extendhfsf2
374 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
375 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
376 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
377 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
378 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
379 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
380 ; X86-AVX1-NEXT: calll __truncsfhf2
381 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
382 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
383 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
384 ; X86-AVX1-NEXT: calll __truncsfhf2
385 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
386 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
387 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
388 ; X86-AVX1-NEXT: calll __extendhfsf2
389 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
390 ; X86-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
391 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
392 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
393 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
394 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
395 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
396 ; X86-AVX1-NEXT: calll __extendhfsf2
397 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
398 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
399 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
400 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
401 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
402 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
403 ; X86-AVX1-NEXT: calll __truncsfhf2
404 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
405 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
406 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
407 ; X86-AVX1-NEXT: calll __truncsfhf2
408 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
409 ; X86-AVX1-NEXT: vbroadcastss 20(%esi), %xmm0
410 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
411 ; X86-AVX1-NEXT: calll __extendhfsf2
412 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
413 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
414 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
415 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
416 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
417 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
418 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
419 ; X86-AVX1-NEXT: calll __extendhfsf2
420 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
421 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
422 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
423 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
424 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
425 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
426 ; X86-AVX1-NEXT: calll __truncsfhf2
427 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
428 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
429 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
430 ; X86-AVX1-NEXT: calll __truncsfhf2
431 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
432 ; X86-AVX1-NEXT: vbroadcastss 24(%esi), %xmm0
433 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
434 ; X86-AVX1-NEXT: calll __extendhfsf2
435 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
436 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
437 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
438 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
439 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
440 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
441 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
442 ; X86-AVX1-NEXT: calll __extendhfsf2
443 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
444 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
445 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
446 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
447 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
448 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
449 ; X86-AVX1-NEXT: calll __truncsfhf2
450 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
451 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
452 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
453 ; X86-AVX1-NEXT: calll __truncsfhf2
454 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
455 ; X86-AVX1-NEXT: vbroadcastss 28(%esi), %xmm0
456 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
457 ; X86-AVX1-NEXT: calll __extendhfsf2
458 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
459 ; X86-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
460 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
461 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
462 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
463 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
464 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
465 ; X86-AVX1-NEXT: calll __extendhfsf2
466 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
467 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
468 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
469 ; X86-AVX1-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
470 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
471 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
472 ; X86-AVX1-NEXT: calll __truncsfhf2
473 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
474 ; X86-AVX1-NEXT: vmovss %xmm1, (%esp)
475 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
476 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
477 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
478 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
479 ; X86-AVX1-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
480 ; X86-AVX1-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
481 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
482 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
483 ; X86-AVX1-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
484 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
485 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
486 ; X86-AVX1-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
487 ; X86-AVX1-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
488 ; X86-AVX1-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
489 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
490 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
491 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
492 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
493 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
494 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
495 ; X86-AVX1-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
496 ; X86-AVX1-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
497 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
498 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
499 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
500 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
501 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
502 ; X86-AVX1-NEXT: calll __truncsfhf2
503 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
504 ; X86-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
505 ; X86-AVX1-NEXT: vpunpckldq {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
506 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
507 ; X86-AVX1-NEXT: vpunpcklqdq {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
508 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0]
509 ; X86-AVX1-NEXT: vinsertf128 $1, {{[-0-9]+}}(%e{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
510 ; X86-AVX1-NEXT: addl $308, %esp # imm = 0x134
511 ; X86-AVX1-NEXT: .cfi_def_cfa_offset 8
512 ; X86-AVX1-NEXT: popl %esi
513 ; X86-AVX1-NEXT: .cfi_def_cfa_offset 4
514 ; X86-AVX1-NEXT: retl
516 ; X86-AVX2-LABEL: fabs_v16f16:
518 ; X86-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax
519 ; X86-AVX2-NEXT: vpbroadcastw {{.*#+}} ymm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
520 ; X86-AVX2-NEXT: vpand (%eax), %ymm0, %ymm0
521 ; X86-AVX2-NEXT: retl
523 ; X86-AVX512-LABEL: fabs_v16f16:
524 ; X86-AVX512: # %bb.0:
525 ; X86-AVX512-NEXT: movl {{[0-9]+}}(%esp), %eax
526 ; X86-AVX512-NEXT: vpbroadcastw {{.*#+}} ymm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
527 ; X86-AVX512-NEXT: vpand (%eax), %ymm0, %ymm0
528 ; X86-AVX512-NEXT: retl
530 ; X64-AVX1-LABEL: fabs_v16f16:
532 ; X64-AVX1-NEXT: pushq %rbx
533 ; X64-AVX1-NEXT: .cfi_def_cfa_offset 16
534 ; X64-AVX1-NEXT: subq $80, %rsp
535 ; X64-AVX1-NEXT: .cfi_def_cfa_offset 96
536 ; X64-AVX1-NEXT: .cfi_offset %rbx, -16
537 ; X64-AVX1-NEXT: movq %rdi, %rbx
538 ; X64-AVX1-NEXT: vbroadcastss 28(%rdi), %xmm0
539 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
540 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
541 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
542 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
543 ; X64-AVX1-NEXT: vmovaps (%rbx), %xmm0
544 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
545 ; X64-AVX1-NEXT: vmovdqa 16(%rbx), %xmm0
546 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
547 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
548 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
549 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
550 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
551 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
552 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
553 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
554 ; X64-AVX1-NEXT: vbroadcastss 24(%rbx), %xmm0
555 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
556 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
557 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
558 ; X64-AVX1-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
559 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
560 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
561 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
562 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
563 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
564 ; X64-AVX1-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
565 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
566 ; X64-AVX1-NEXT: vpunpckldq {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
567 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
568 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
569 ; X64-AVX1-NEXT: vbroadcastss 20(%rbx), %xmm0
570 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
571 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
572 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
573 ; X64-AVX1-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
574 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
575 ; X64-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
576 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
577 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
578 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
579 ; X64-AVX1-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
580 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
581 ; X64-AVX1-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
582 ; X64-AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
583 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
584 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
585 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
586 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
587 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
588 ; X64-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
589 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
590 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
591 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
592 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
593 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
594 ; X64-AVX1-NEXT: vpunpckldq (%rsp), %xmm0, %xmm0 # 16-byte Folded Reload
595 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
596 ; X64-AVX1-NEXT: vpunpcklqdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
597 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0]
598 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
599 ; X64-AVX1-NEXT: vbroadcastss 12(%rbx), %xmm0
600 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
601 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
602 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
603 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
604 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
605 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
606 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
607 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
608 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
609 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
610 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
611 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
612 ; X64-AVX1-NEXT: vbroadcastss 8(%rbx), %xmm0
613 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
614 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
615 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
616 ; X64-AVX1-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
617 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
618 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
619 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
620 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
621 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
622 ; X64-AVX1-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
623 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
624 ; X64-AVX1-NEXT: vpunpckldq {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
625 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
626 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
627 ; X64-AVX1-NEXT: vbroadcastss 4(%rbx), %xmm0
628 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
629 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
630 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
631 ; X64-AVX1-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
632 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
633 ; X64-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
634 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
635 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
636 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
637 ; X64-AVX1-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
638 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
639 ; X64-AVX1-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
640 ; X64-AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
641 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
642 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
643 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
644 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
645 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
646 ; X64-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
647 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
648 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
649 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
650 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
651 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
652 ; X64-AVX1-NEXT: vpunpckldq (%rsp), %xmm0, %xmm0 # 16-byte Folded Reload
653 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
654 ; X64-AVX1-NEXT: vpunpcklqdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
655 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0]
656 ; X64-AVX1-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
657 ; X64-AVX1-NEXT: addq $80, %rsp
658 ; X64-AVX1-NEXT: .cfi_def_cfa_offset 16
659 ; X64-AVX1-NEXT: popq %rbx
660 ; X64-AVX1-NEXT: .cfi_def_cfa_offset 8
661 ; X64-AVX1-NEXT: retq
663 ; X64-AVX2-LABEL: fabs_v16f16:
665 ; X64-AVX2-NEXT: vpbroadcastw {{.*#+}} ymm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
666 ; X64-AVX2-NEXT: vpand (%rdi), %ymm0, %ymm0
667 ; X64-AVX2-NEXT: retq
669 ; X64-AVX512-LABEL: fabs_v16f16:
670 ; X64-AVX512: # %bb.0:
671 ; X64-AVX512-NEXT: vpbroadcastw {{.*#+}} ymm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
672 ; X64-AVX512-NEXT: vpand (%rdi), %ymm0, %ymm0
673 ; X64-AVX512-NEXT: retq
674 %v = load <16 x half>, ptr %p, align 32
675 %nnv = call <16 x half> @llvm.fabs.v16f16(<16 x half> %v)
678 declare <16 x half> @llvm.fabs.v16f16(<16 x half> %p)
684 define <8 x double> @fabs_v8f64(<8 x double> %p) {
685 ; X86-AVX-LABEL: fabs_v8f64:
687 ; X86-AVX-NEXT: vbroadcastsd {{.*#+}} ymm2 = [NaN,NaN,NaN,NaN]
688 ; X86-AVX-NEXT: vandps %ymm2, %ymm0, %ymm0
689 ; X86-AVX-NEXT: vandps %ymm2, %ymm1, %ymm1
692 ; X86-AVX512VL-LABEL: fabs_v8f64:
693 ; X86-AVX512VL: # %bb.0:
694 ; X86-AVX512VL-NEXT: vpandq {{\.?LCPI[0-9]+_[0-9]+}}{1to8}, %zmm0, %zmm0
695 ; X86-AVX512VL-NEXT: retl
697 ; X86-AVX512FP16-LABEL: fabs_v8f64:
698 ; X86-AVX512FP16: # %bb.0:
699 ; X86-AVX512FP16-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}{1to8}, %zmm0, %zmm0
700 ; X86-AVX512FP16-NEXT: retl
702 ; X86-AVX512VLDQ-LABEL: fabs_v8f64:
703 ; X86-AVX512VLDQ: # %bb.0:
704 ; X86-AVX512VLDQ-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}{1to8}, %zmm0, %zmm0
705 ; X86-AVX512VLDQ-NEXT: retl
707 ; X64-AVX-LABEL: fabs_v8f64:
709 ; X64-AVX-NEXT: vbroadcastsd {{.*#+}} ymm2 = [NaN,NaN,NaN,NaN]
710 ; X64-AVX-NEXT: vandps %ymm2, %ymm0, %ymm0
711 ; X64-AVX-NEXT: vandps %ymm2, %ymm1, %ymm1
714 ; X64-AVX512VL-LABEL: fabs_v8f64:
715 ; X64-AVX512VL: # %bb.0:
716 ; X64-AVX512VL-NEXT: vpandq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %zmm0
717 ; X64-AVX512VL-NEXT: retq
719 ; X64-AVX512FP16-LABEL: fabs_v8f64:
720 ; X64-AVX512FP16: # %bb.0:
721 ; X64-AVX512FP16-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %zmm0
722 ; X64-AVX512FP16-NEXT: retq
724 ; X64-AVX512VLDQ-LABEL: fabs_v8f64:
725 ; X64-AVX512VLDQ: # %bb.0:
726 ; X64-AVX512VLDQ-NEXT: vandpd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %zmm0
727 ; X64-AVX512VLDQ-NEXT: retq
728 %t = call <8 x double> @llvm.fabs.v8f64(<8 x double> %p)
731 declare <8 x double> @llvm.fabs.v8f64(<8 x double> %p)
733 define <16 x float> @fabs_v16f32(<16 x float> %p) {
734 ; X86-AVX-LABEL: fabs_v16f32:
736 ; X86-AVX-NEXT: vbroadcastss {{.*#+}} ymm2 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
737 ; X86-AVX-NEXT: vandps %ymm2, %ymm0, %ymm0
738 ; X86-AVX-NEXT: vandps %ymm2, %ymm1, %ymm1
741 ; X86-AVX512VL-LABEL: fabs_v16f32:
742 ; X86-AVX512VL: # %bb.0:
743 ; X86-AVX512VL-NEXT: vpandd {{\.?LCPI[0-9]+_[0-9]+}}{1to16}, %zmm0, %zmm0
744 ; X86-AVX512VL-NEXT: retl
746 ; X86-AVX512FP16-LABEL: fabs_v16f32:
747 ; X86-AVX512FP16: # %bb.0:
748 ; X86-AVX512FP16-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}{1to16}, %zmm0, %zmm0
749 ; X86-AVX512FP16-NEXT: retl
751 ; X86-AVX512VLDQ-LABEL: fabs_v16f32:
752 ; X86-AVX512VLDQ: # %bb.0:
753 ; X86-AVX512VLDQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}{1to16}, %zmm0, %zmm0
754 ; X86-AVX512VLDQ-NEXT: retl
756 ; X64-AVX-LABEL: fabs_v16f32:
758 ; X64-AVX-NEXT: vbroadcastss {{.*#+}} ymm2 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
759 ; X64-AVX-NEXT: vandps %ymm2, %ymm0, %ymm0
760 ; X64-AVX-NEXT: vandps %ymm2, %ymm1, %ymm1
763 ; X64-AVX512VL-LABEL: fabs_v16f32:
764 ; X64-AVX512VL: # %bb.0:
765 ; X64-AVX512VL-NEXT: vpandd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %zmm0
766 ; X64-AVX512VL-NEXT: retq
768 ; X64-AVX512FP16-LABEL: fabs_v16f32:
769 ; X64-AVX512FP16: # %bb.0:
770 ; X64-AVX512FP16-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %zmm0
771 ; X64-AVX512FP16-NEXT: retq
773 ; X64-AVX512VLDQ-LABEL: fabs_v16f32:
774 ; X64-AVX512VLDQ: # %bb.0:
775 ; X64-AVX512VLDQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %zmm0
776 ; X64-AVX512VLDQ-NEXT: retq
777 %t = call <16 x float> @llvm.fabs.v16f32(<16 x float> %p)
780 declare <16 x float> @llvm.fabs.v16f32(<16 x float> %p)
782 define <32 x half> @fabs_v32f16(ptr %p) {
783 ; X86-AVX1-LABEL: fabs_v32f16:
785 ; X86-AVX1-NEXT: pushl %esi
786 ; X86-AVX1-NEXT: .cfi_def_cfa_offset 8
787 ; X86-AVX1-NEXT: subl $644, %esp # imm = 0x284
788 ; X86-AVX1-NEXT: .cfi_def_cfa_offset 652
789 ; X86-AVX1-NEXT: .cfi_offset %esi, -8
790 ; X86-AVX1-NEXT: movl {{[0-9]+}}(%esp), %esi
791 ; X86-AVX1-NEXT: vmovdqa 32(%esi), %xmm0
792 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
793 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
794 ; X86-AVX1-NEXT: calll __extendhfsf2
795 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
796 ; X86-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
797 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
798 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
799 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
800 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
801 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
802 ; X86-AVX1-NEXT: calll __extendhfsf2
803 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
804 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
805 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
806 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
807 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
808 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
809 ; X86-AVX1-NEXT: calll __truncsfhf2
810 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
811 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
812 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
813 ; X86-AVX1-NEXT: calll __truncsfhf2
814 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
815 ; X86-AVX1-NEXT: vbroadcastss 36(%esi), %xmm0
816 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
817 ; X86-AVX1-NEXT: calll __extendhfsf2
818 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
819 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
820 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
821 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
822 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
823 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
824 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
825 ; X86-AVX1-NEXT: calll __extendhfsf2
826 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
827 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
828 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
829 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
830 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
831 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
832 ; X86-AVX1-NEXT: calll __truncsfhf2
833 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
834 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
835 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
836 ; X86-AVX1-NEXT: calll __truncsfhf2
837 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
838 ; X86-AVX1-NEXT: vbroadcastss 40(%esi), %xmm0
839 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
840 ; X86-AVX1-NEXT: calll __extendhfsf2
841 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
842 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
843 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
844 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
845 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
846 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
847 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
848 ; X86-AVX1-NEXT: calll __extendhfsf2
849 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
850 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
851 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
852 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
853 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
854 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
855 ; X86-AVX1-NEXT: calll __truncsfhf2
856 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
857 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
858 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
859 ; X86-AVX1-NEXT: calll __truncsfhf2
860 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
861 ; X86-AVX1-NEXT: vbroadcastss 44(%esi), %xmm0
862 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
863 ; X86-AVX1-NEXT: calll __extendhfsf2
864 ; X86-AVX1-NEXT: vmovdqa 48(%esi), %xmm0
865 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
866 ; X86-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
867 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
868 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
869 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
870 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
871 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
872 ; X86-AVX1-NEXT: calll __extendhfsf2
873 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
874 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
875 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
876 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
877 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
878 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
879 ; X86-AVX1-NEXT: calll __truncsfhf2
880 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
881 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
882 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
883 ; X86-AVX1-NEXT: calll __truncsfhf2
884 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
885 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
886 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
887 ; X86-AVX1-NEXT: calll __extendhfsf2
888 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
889 ; X86-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
890 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
891 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
892 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
893 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
894 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
895 ; X86-AVX1-NEXT: calll __extendhfsf2
896 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
897 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
898 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
899 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
900 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
901 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
902 ; X86-AVX1-NEXT: calll __truncsfhf2
903 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
904 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
905 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
906 ; X86-AVX1-NEXT: calll __truncsfhf2
907 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
908 ; X86-AVX1-NEXT: vbroadcastss 52(%esi), %xmm0
909 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
910 ; X86-AVX1-NEXT: calll __extendhfsf2
911 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
912 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
913 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
914 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
915 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
916 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
917 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
918 ; X86-AVX1-NEXT: calll __extendhfsf2
919 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
920 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
921 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
922 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
923 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
924 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
925 ; X86-AVX1-NEXT: calll __truncsfhf2
926 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
927 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
928 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
929 ; X86-AVX1-NEXT: calll __truncsfhf2
930 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
931 ; X86-AVX1-NEXT: vbroadcastss 56(%esi), %xmm0
932 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
933 ; X86-AVX1-NEXT: calll __extendhfsf2
934 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
935 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
936 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
937 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
938 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
939 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
940 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
941 ; X86-AVX1-NEXT: calll __extendhfsf2
942 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
943 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
944 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
945 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
946 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
947 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
948 ; X86-AVX1-NEXT: calll __truncsfhf2
949 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
950 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
951 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
952 ; X86-AVX1-NEXT: calll __truncsfhf2
953 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
954 ; X86-AVX1-NEXT: vbroadcastss 60(%esi), %xmm0
955 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
956 ; X86-AVX1-NEXT: calll __extendhfsf2
957 ; X86-AVX1-NEXT: vmovdqa (%esi), %xmm1
958 ; X86-AVX1-NEXT: vmovdqu %xmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
959 ; X86-AVX1-NEXT: vmovaps 16(%esi), %xmm0
960 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
961 ; X86-AVX1-NEXT: vpsrld $16, %xmm1, %xmm0
962 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
963 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
964 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
965 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
966 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
967 ; X86-AVX1-NEXT: calll __extendhfsf2
968 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
969 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
970 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
971 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
972 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
973 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
974 ; X86-AVX1-NEXT: calll __truncsfhf2
975 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
976 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
977 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
978 ; X86-AVX1-NEXT: calll __truncsfhf2
979 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
980 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
981 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
982 ; X86-AVX1-NEXT: calll __extendhfsf2
983 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
984 ; X86-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
985 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
986 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
987 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
988 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
989 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
990 ; X86-AVX1-NEXT: calll __extendhfsf2
991 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
992 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
993 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
994 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
995 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
996 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
997 ; X86-AVX1-NEXT: calll __truncsfhf2
998 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
999 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1000 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1001 ; X86-AVX1-NEXT: calll __truncsfhf2
1002 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1003 ; X86-AVX1-NEXT: vbroadcastss 4(%esi), %xmm0
1004 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1005 ; X86-AVX1-NEXT: calll __extendhfsf2
1006 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1007 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1008 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1009 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1010 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1011 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1012 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1013 ; X86-AVX1-NEXT: calll __extendhfsf2
1014 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1015 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1016 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1017 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1018 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1019 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1020 ; X86-AVX1-NEXT: calll __truncsfhf2
1021 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1022 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1023 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1024 ; X86-AVX1-NEXT: calll __truncsfhf2
1025 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1026 ; X86-AVX1-NEXT: vbroadcastss 8(%esi), %xmm0
1027 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1028 ; X86-AVX1-NEXT: calll __extendhfsf2
1029 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1030 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1031 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1032 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1033 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1034 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1035 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1036 ; X86-AVX1-NEXT: calll __extendhfsf2
1037 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1038 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1039 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1040 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1041 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1042 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1043 ; X86-AVX1-NEXT: calll __truncsfhf2
1044 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1045 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1046 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1047 ; X86-AVX1-NEXT: calll __truncsfhf2
1048 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1049 ; X86-AVX1-NEXT: vbroadcastss 12(%esi), %xmm0
1050 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1051 ; X86-AVX1-NEXT: calll __extendhfsf2
1052 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1053 ; X86-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
1054 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1055 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1056 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1057 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1058 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1059 ; X86-AVX1-NEXT: calll __extendhfsf2
1060 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1061 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1062 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1063 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1064 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1065 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1066 ; X86-AVX1-NEXT: calll __truncsfhf2
1067 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1068 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1069 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1070 ; X86-AVX1-NEXT: calll __truncsfhf2
1071 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1072 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1073 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1074 ; X86-AVX1-NEXT: calll __extendhfsf2
1075 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1076 ; X86-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
1077 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1078 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1079 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1080 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1081 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1082 ; X86-AVX1-NEXT: calll __extendhfsf2
1083 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1084 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1085 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1086 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1087 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1088 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1089 ; X86-AVX1-NEXT: calll __truncsfhf2
1090 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1091 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1092 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1093 ; X86-AVX1-NEXT: calll __truncsfhf2
1094 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1095 ; X86-AVX1-NEXT: vbroadcastss 20(%esi), %xmm0
1096 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1097 ; X86-AVX1-NEXT: calll __extendhfsf2
1098 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1099 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1100 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1101 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1102 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1103 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1104 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1105 ; X86-AVX1-NEXT: calll __extendhfsf2
1106 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1107 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1108 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1109 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1110 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1111 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1112 ; X86-AVX1-NEXT: calll __truncsfhf2
1113 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1114 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1115 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1116 ; X86-AVX1-NEXT: calll __truncsfhf2
1117 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1118 ; X86-AVX1-NEXT: vbroadcastss 24(%esi), %xmm0
1119 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1120 ; X86-AVX1-NEXT: calll __extendhfsf2
1121 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1122 ; X86-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1123 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1124 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1125 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1126 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1127 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1128 ; X86-AVX1-NEXT: calll __extendhfsf2
1129 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1130 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1131 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1132 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1133 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1134 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1135 ; X86-AVX1-NEXT: calll __truncsfhf2
1136 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1137 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1138 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1139 ; X86-AVX1-NEXT: calll __truncsfhf2
1140 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1141 ; X86-AVX1-NEXT: vbroadcastss 28(%esi), %xmm0
1142 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1143 ; X86-AVX1-NEXT: calll __extendhfsf2
1144 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1145 ; X86-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
1146 ; X86-AVX1-NEXT: vpextrw $0, %xmm0, (%esp)
1147 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1148 ; X86-AVX1-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
1149 ; X86-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1150 ; X86-AVX1-NEXT: vmovups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1151 ; X86-AVX1-NEXT: calll __extendhfsf2
1152 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1153 ; X86-AVX1-NEXT: vmovss %xmm0, (%esp)
1154 ; X86-AVX1-NEXT: fstps {{[0-9]+}}(%esp)
1155 ; X86-AVX1-NEXT: vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
1156 ; X86-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
1157 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1158 ; X86-AVX1-NEXT: calll __truncsfhf2
1159 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
1160 ; X86-AVX1-NEXT: vmovss %xmm1, (%esp)
1161 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
1162 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
1163 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
1164 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
1165 ; X86-AVX1-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
1166 ; X86-AVX1-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1167 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
1168 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
1169 ; X86-AVX1-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
1170 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
1171 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
1172 ; X86-AVX1-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
1173 ; X86-AVX1-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1174 ; X86-AVX1-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
1175 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
1176 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
1177 ; X86-AVX1-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
1178 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
1179 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
1180 ; X86-AVX1-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
1181 ; X86-AVX1-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1182 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
1183 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
1184 ; X86-AVX1-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
1185 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm3 # 16-byte Reload
1186 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
1187 ; X86-AVX1-NEXT: # xmm3 = xmm3[0],mem[0],xmm3[1],mem[1],xmm3[2],mem[2],xmm3[3],mem[3]
1188 ; X86-AVX1-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
1189 ; X86-AVX1-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
1190 ; X86-AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1191 ; X86-AVX1-NEXT: vmovups %ymm0, {{[-0-9]+}}(%e{{[sb]}}p) # 32-byte Spill
1192 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1193 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
1194 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
1195 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
1196 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
1197 ; X86-AVX1-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
1198 ; X86-AVX1-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1199 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
1200 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
1201 ; X86-AVX1-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
1202 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
1203 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
1204 ; X86-AVX1-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
1205 ; X86-AVX1-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1206 ; X86-AVX1-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
1207 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1208 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1209 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
1210 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
1211 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
1212 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
1213 ; X86-AVX1-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
1214 ; X86-AVX1-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1215 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1216 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
1217 ; X86-AVX1-NEXT: vpunpcklwd {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
1218 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
1219 ; X86-AVX1-NEXT: vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1220 ; X86-AVX1-NEXT: vzeroupper
1221 ; X86-AVX1-NEXT: calll __truncsfhf2
1222 ; X86-AVX1-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
1223 ; X86-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1224 ; X86-AVX1-NEXT: vpunpckldq {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
1225 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
1226 ; X86-AVX1-NEXT: vpunpcklqdq {{[-0-9]+}}(%e{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
1227 ; X86-AVX1-NEXT: # xmm0 = xmm0[0],mem[0]
1228 ; X86-AVX1-NEXT: vinsertf128 $1, {{[-0-9]+}}(%e{{[sb]}}p), %ymm0, %ymm1 # 16-byte Folded Reload
1229 ; X86-AVX1-NEXT: vmovups {{[-0-9]+}}(%e{{[sb]}}p), %ymm0 # 32-byte Reload
1230 ; X86-AVX1-NEXT: addl $644, %esp # imm = 0x284
1231 ; X86-AVX1-NEXT: .cfi_def_cfa_offset 8
1232 ; X86-AVX1-NEXT: popl %esi
1233 ; X86-AVX1-NEXT: .cfi_def_cfa_offset 4
1234 ; X86-AVX1-NEXT: retl
1236 ; X86-AVX2-LABEL: fabs_v32f16:
1237 ; X86-AVX2: # %bb.0:
1238 ; X86-AVX2-NEXT: movl {{[0-9]+}}(%esp), %eax
1239 ; X86-AVX2-NEXT: vpbroadcastw {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
1240 ; X86-AVX2-NEXT: vpand (%eax), %ymm1, %ymm0
1241 ; X86-AVX2-NEXT: vpand 32(%eax), %ymm1, %ymm1
1242 ; X86-AVX2-NEXT: retl
1244 ; X86-AVX512VL-LABEL: fabs_v32f16:
1245 ; X86-AVX512VL: # %bb.0:
1246 ; X86-AVX512VL-NEXT: subl $128, %esp
1247 ; X86-AVX512VL-NEXT: .cfi_def_cfa_offset 132
1248 ; X86-AVX512VL-NEXT: movl {{[0-9]+}}(%esp), %eax
1249 ; X86-AVX512VL-NEXT: movzwl 60(%eax), %ecx
1250 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm0
1251 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm0, %xmm1
1252 ; X86-AVX512VL-NEXT: vpbroadcastd {{.*#+}} xmm0 = [NaN,NaN,NaN,NaN]
1253 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1254 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1255 ; X86-AVX512VL-NEXT: vmovd %xmm1, %ecx
1256 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1257 ; X86-AVX512VL-NEXT: vmovdqa 48(%eax), %xmm3
1258 ; X86-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm2 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1259 ; X86-AVX512VL-NEXT: vmovdqa %xmm3, %xmm4
1260 ; X86-AVX512VL-NEXT: vmovdqu %xmm3, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1261 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm2, %ecx
1262 ; X86-AVX512VL-NEXT: movzwl %cx, %ecx
1263 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm2
1264 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm2, %xmm2
1265 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm2, %xmm2
1266 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm2, %xmm2
1267 ; X86-AVX512VL-NEXT: vmovd %xmm2, %ecx
1268 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm2
1269 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1270 ; X86-AVX512VL-NEXT: movzwl 44(%eax), %ecx
1271 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm2
1272 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm2, %xmm2
1273 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm2, %xmm2
1274 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm2, %xmm2
1275 ; X86-AVX512VL-NEXT: vmovd %xmm2, %ecx
1276 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm2
1277 ; X86-AVX512VL-NEXT: vmovdqa 32(%eax), %xmm3
1278 ; X86-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm5 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1279 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm5, %ecx
1280 ; X86-AVX512VL-NEXT: movzwl %cx, %ecx
1281 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm5
1282 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm5, %xmm5
1283 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm5, %xmm5
1284 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm5, %xmm5
1285 ; X86-AVX512VL-NEXT: vmovd %xmm5, %ecx
1286 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm5
1287 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
1288 ; X86-AVX512VL-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm5
1289 ; X86-AVX512VL-NEXT: movzwl 28(%eax), %ecx
1290 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm1
1291 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1292 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1293 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1294 ; X86-AVX512VL-NEXT: vmovd %xmm1, %ecx
1295 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm6
1296 ; X86-AVX512VL-NEXT: vmovdqa (%eax), %xmm1
1297 ; X86-AVX512VL-NEXT: vmovdqu %xmm1, (%esp) # 16-byte Spill
1298 ; X86-AVX512VL-NEXT: vmovdqa 16(%eax), %xmm2
1299 ; X86-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm7 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1300 ; X86-AVX512VL-NEXT: vmovdqu %xmm2, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1301 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm7, %ecx
1302 ; X86-AVX512VL-NEXT: movzwl %cx, %ecx
1303 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm7
1304 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
1305 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
1306 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
1307 ; X86-AVX512VL-NEXT: vmovd %xmm7, %ecx
1308 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm7
1309 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1310 ; X86-AVX512VL-NEXT: movzwl 12(%eax), %ecx
1311 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm7
1312 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
1313 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
1314 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
1315 ; X86-AVX512VL-NEXT: vmovd %xmm7, %ecx
1316 ; X86-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm7 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1317 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm7, %edx
1318 ; X86-AVX512VL-NEXT: movzwl %dx, %edx
1319 ; X86-AVX512VL-NEXT: vmovd %edx, %xmm7
1320 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
1321 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
1322 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
1323 ; X86-AVX512VL-NEXT: vmovd %xmm7, %edx
1324 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm7
1325 ; X86-AVX512VL-NEXT: vpinsrw $0, %edx, %xmm0, %xmm1
1326 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3]
1327 ; X86-AVX512VL-NEXT: vinserti128 $1, %xmm6, %ymm1, %ymm1
1328 ; X86-AVX512VL-NEXT: vinserti64x4 $1, %ymm5, %zmm1, %zmm1
1329 ; X86-AVX512VL-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
1330 ; X86-AVX512VL-NEXT: movzwl 56(%eax), %ecx
1331 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm1
1332 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1333 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1334 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1335 ; X86-AVX512VL-NEXT: vmovd %xmm1, %ecx
1336 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1337 ; X86-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm6 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1338 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm6, %ecx
1339 ; X86-AVX512VL-NEXT: movzwl %cx, %ecx
1340 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm6
1341 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm6, %xmm6
1342 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm6, %xmm6
1343 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm6, %xmm6
1344 ; X86-AVX512VL-NEXT: vmovd %xmm6, %ecx
1345 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm6
1346 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
1347 ; X86-AVX512VL-NEXT: movzwl 40(%eax), %ecx
1348 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm1
1349 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1350 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1351 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1352 ; X86-AVX512VL-NEXT: vmovd %xmm1, %ecx
1353 ; X86-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm1 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1354 ; X86-AVX512VL-NEXT: vmovdqa %xmm3, %xmm4
1355 ; X86-AVX512VL-NEXT: vmovdqu %xmm3, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1356 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm1, %edx
1357 ; X86-AVX512VL-NEXT: movzwl %dx, %edx
1358 ; X86-AVX512VL-NEXT: vmovd %edx, %xmm1
1359 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1360 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1361 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1362 ; X86-AVX512VL-NEXT: vmovd %xmm1, %edx
1363 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1364 ; X86-AVX512VL-NEXT: vpinsrw $0, %edx, %xmm0, %xmm7
1365 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
1366 ; X86-AVX512VL-NEXT: vinserti128 $1, %xmm6, %ymm1, %ymm6
1367 ; X86-AVX512VL-NEXT: movzwl 24(%eax), %ecx
1368 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm1
1369 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1370 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1371 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1372 ; X86-AVX512VL-NEXT: vmovd %xmm1, %ecx
1373 ; X86-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm1 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1374 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm1, %edx
1375 ; X86-AVX512VL-NEXT: movzwl %dx, %edx
1376 ; X86-AVX512VL-NEXT: vmovd %edx, %xmm1
1377 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1378 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1379 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1380 ; X86-AVX512VL-NEXT: vmovd %xmm1, %edx
1381 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1382 ; X86-AVX512VL-NEXT: vpinsrw $0, %edx, %xmm0, %xmm7
1383 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
1384 ; X86-AVX512VL-NEXT: movzwl 8(%eax), %ecx
1385 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm1
1386 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1387 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1388 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1389 ; X86-AVX512VL-NEXT: vmovd %xmm1, %ecx
1390 ; X86-AVX512VL-NEXT: vmovdqu (%esp), %xmm2 # 16-byte Reload
1391 ; X86-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm1 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1392 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm1, %edx
1393 ; X86-AVX512VL-NEXT: movzwl %dx, %edx
1394 ; X86-AVX512VL-NEXT: vmovd %edx, %xmm1
1395 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1396 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1397 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1398 ; X86-AVX512VL-NEXT: vmovd %xmm1, %edx
1399 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1400 ; X86-AVX512VL-NEXT: vpinsrw $0, %edx, %xmm0, %xmm5
1401 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
1402 ; X86-AVX512VL-NEXT: vinserti128 $1, %xmm7, %ymm1, %ymm1
1403 ; X86-AVX512VL-NEXT: vinserti64x4 $1, %ymm6, %zmm1, %zmm1
1404 ; X86-AVX512VL-NEXT: vpunpckldq {{[-0-9]+}}(%e{{[sb]}}p), %zmm1, %zmm1 # 64-byte Folded Reload
1405 ; X86-AVX512VL-NEXT: # zmm1 = zmm1[0],mem[0],zmm1[1],mem[1],zmm1[4],mem[4],zmm1[5],mem[5],zmm1[8],mem[8],zmm1[9],mem[9],zmm1[12],mem[12],zmm1[13],mem[13]
1406 ; X86-AVX512VL-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
1407 ; X86-AVX512VL-NEXT: movzwl 52(%eax), %ecx
1408 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm1
1409 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1410 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1411 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1412 ; X86-AVX512VL-NEXT: vmovd %xmm1, %ecx
1413 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1414 ; X86-AVX512VL-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm3 # 16-byte Reload
1415 ; X86-AVX512VL-NEXT: vpsrlq $48, %xmm3, %xmm6
1416 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm6, %ecx
1417 ; X86-AVX512VL-NEXT: movzwl %cx, %ecx
1418 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm6
1419 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm6, %xmm6
1420 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm6, %xmm6
1421 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm6, %xmm6
1422 ; X86-AVX512VL-NEXT: vmovd %xmm6, %ecx
1423 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm6
1424 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
1425 ; X86-AVX512VL-NEXT: movzwl 36(%eax), %ecx
1426 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm1
1427 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1428 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1429 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1430 ; X86-AVX512VL-NEXT: vmovd %xmm1, %ecx
1431 ; X86-AVX512VL-NEXT: vpsrlq $48, %xmm4, %xmm1
1432 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm1, %edx
1433 ; X86-AVX512VL-NEXT: movzwl %dx, %edx
1434 ; X86-AVX512VL-NEXT: vmovd %edx, %xmm1
1435 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1436 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1437 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1438 ; X86-AVX512VL-NEXT: vmovd %xmm1, %edx
1439 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1440 ; X86-AVX512VL-NEXT: vpinsrw $0, %edx, %xmm0, %xmm7
1441 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
1442 ; X86-AVX512VL-NEXT: vinserti128 $1, %xmm6, %ymm1, %ymm6
1443 ; X86-AVX512VL-NEXT: movzwl 20(%eax), %ecx
1444 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm1
1445 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1446 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1447 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1448 ; X86-AVX512VL-NEXT: vmovd %xmm1, %ecx
1449 ; X86-AVX512VL-NEXT: vpsrlq $48, {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1450 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm1, %edx
1451 ; X86-AVX512VL-NEXT: movzwl %dx, %edx
1452 ; X86-AVX512VL-NEXT: vmovd %edx, %xmm1
1453 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1454 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1455 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1456 ; X86-AVX512VL-NEXT: vmovd %xmm1, %edx
1457 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1458 ; X86-AVX512VL-NEXT: vpinsrw $0, %edx, %xmm0, %xmm7
1459 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
1460 ; X86-AVX512VL-NEXT: movzwl 4(%eax), %eax
1461 ; X86-AVX512VL-NEXT: vmovd %eax, %xmm1
1462 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1463 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1464 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1465 ; X86-AVX512VL-NEXT: vmovd %xmm1, %eax
1466 ; X86-AVX512VL-NEXT: vpsrlq $48, %xmm2, %xmm1
1467 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm1, %ecx
1468 ; X86-AVX512VL-NEXT: movzwl %cx, %ecx
1469 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm1
1470 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1471 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1472 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1473 ; X86-AVX512VL-NEXT: vmovd %xmm1, %ecx
1474 ; X86-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm1
1475 ; X86-AVX512VL-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm5
1476 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
1477 ; X86-AVX512VL-NEXT: vinserti128 $1, %xmm7, %ymm1, %ymm1
1478 ; X86-AVX512VL-NEXT: vinserti64x4 $1, %ymm6, %zmm1, %zmm6
1479 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm3, %eax
1480 ; X86-AVX512VL-NEXT: movzwl %ax, %eax
1481 ; X86-AVX512VL-NEXT: vmovd %eax, %xmm1
1482 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1483 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1484 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1485 ; X86-AVX512VL-NEXT: vmovd %xmm1, %eax
1486 ; X86-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm1
1487 ; X86-AVX512VL-NEXT: vpsrld $16, %xmm3, %xmm4
1488 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm4, %eax
1489 ; X86-AVX512VL-NEXT: movzwl %ax, %eax
1490 ; X86-AVX512VL-NEXT: vmovd %eax, %xmm4
1491 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm4, %xmm4
1492 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm4, %xmm4
1493 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm4, %xmm4
1494 ; X86-AVX512VL-NEXT: vmovd %xmm4, %eax
1495 ; X86-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm4
1496 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
1497 ; X86-AVX512VL-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
1498 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm2, %eax
1499 ; X86-AVX512VL-NEXT: movzwl %ax, %eax
1500 ; X86-AVX512VL-NEXT: vmovd %eax, %xmm1
1501 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1502 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1503 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1504 ; X86-AVX512VL-NEXT: vmovd %xmm1, %eax
1505 ; X86-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm1
1506 ; X86-AVX512VL-NEXT: vpsrld $16, %xmm2, %xmm3
1507 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm3, %eax
1508 ; X86-AVX512VL-NEXT: movzwl %ax, %eax
1509 ; X86-AVX512VL-NEXT: vmovd %eax, %xmm3
1510 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm3, %xmm3
1511 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm3, %xmm3
1512 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm3, %xmm3
1513 ; X86-AVX512VL-NEXT: vmovd %xmm3, %eax
1514 ; X86-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm3
1515 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
1516 ; X86-AVX512VL-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm3
1517 ; X86-AVX512VL-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
1518 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm2, %eax
1519 ; X86-AVX512VL-NEXT: movzwl %ax, %eax
1520 ; X86-AVX512VL-NEXT: vmovd %eax, %xmm1
1521 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1522 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1523 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1524 ; X86-AVX512VL-NEXT: vmovd %xmm1, %eax
1525 ; X86-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm1
1526 ; X86-AVX512VL-NEXT: vpsrld $16, %xmm2, %xmm2
1527 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm2, %eax
1528 ; X86-AVX512VL-NEXT: movzwl %ax, %eax
1529 ; X86-AVX512VL-NEXT: vmovd %eax, %xmm2
1530 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm2, %xmm2
1531 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm2, %xmm2
1532 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm2, %xmm2
1533 ; X86-AVX512VL-NEXT: vmovd %xmm2, %eax
1534 ; X86-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm2
1535 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1536 ; X86-AVX512VL-NEXT: vmovdqu (%esp), %xmm4 # 16-byte Reload
1537 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm4, %eax
1538 ; X86-AVX512VL-NEXT: movzwl %ax, %eax
1539 ; X86-AVX512VL-NEXT: vmovd %eax, %xmm1
1540 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1541 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
1542 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1543 ; X86-AVX512VL-NEXT: vmovd %xmm1, %eax
1544 ; X86-AVX512VL-NEXT: vpsrld $16, %xmm4, %xmm1
1545 ; X86-AVX512VL-NEXT: vpextrw $0, %xmm1, %ecx
1546 ; X86-AVX512VL-NEXT: movzwl %cx, %ecx
1547 ; X86-AVX512VL-NEXT: vmovd %ecx, %xmm1
1548 ; X86-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
1549 ; X86-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm0
1550 ; X86-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm1
1551 ; X86-AVX512VL-NEXT: vcvtps2ph $4, %xmm0, %xmm0
1552 ; X86-AVX512VL-NEXT: vmovd %xmm0, %eax
1553 ; X86-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm0
1554 ; X86-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1555 ; X86-AVX512VL-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
1556 ; X86-AVX512VL-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
1557 ; X86-AVX512VL-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm6[0],zmm0[1],zmm6[1],zmm0[4],zmm6[4],zmm0[5],zmm6[5],zmm0[8],zmm6[8],zmm0[9],zmm6[9],zmm0[12],zmm6[12],zmm0[13],zmm6[13]
1558 ; X86-AVX512VL-NEXT: vpunpcklqdq {{[-0-9]+}}(%e{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
1559 ; X86-AVX512VL-NEXT: # zmm0 = zmm0[0],mem[0],zmm0[2],mem[2],zmm0[4],mem[4],zmm0[6],mem[6]
1560 ; X86-AVX512VL-NEXT: addl $128, %esp
1561 ; X86-AVX512VL-NEXT: .cfi_def_cfa_offset 4
1562 ; X86-AVX512VL-NEXT: retl
1564 ; X86-AVX512FP16-LABEL: fabs_v32f16:
1565 ; X86-AVX512FP16: # %bb.0:
1566 ; X86-AVX512FP16-NEXT: movl {{[0-9]+}}(%esp), %eax
1567 ; X86-AVX512FP16-NEXT: vpbroadcastw {{.*#+}} zmm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
1568 ; X86-AVX512FP16-NEXT: vpandq (%eax), %zmm0, %zmm0
1569 ; X86-AVX512FP16-NEXT: retl
1571 ; X86-AVX512VLDQ-LABEL: fabs_v32f16:
1572 ; X86-AVX512VLDQ: # %bb.0:
1573 ; X86-AVX512VLDQ-NEXT: subl $128, %esp
1574 ; X86-AVX512VLDQ-NEXT: .cfi_def_cfa_offset 132
1575 ; X86-AVX512VLDQ-NEXT: movl {{[0-9]+}}(%esp), %eax
1576 ; X86-AVX512VLDQ-NEXT: movzwl 60(%eax), %ecx
1577 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm0
1578 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm0, %xmm1
1579 ; X86-AVX512VLDQ-NEXT: vpbroadcastd {{.*#+}} xmm0 = [NaN,NaN,NaN,NaN]
1580 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1581 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1582 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %ecx
1583 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1584 ; X86-AVX512VLDQ-NEXT: vmovdqa 48(%eax), %xmm3
1585 ; X86-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm2 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1586 ; X86-AVX512VLDQ-NEXT: vmovdqa %xmm3, %xmm4
1587 ; X86-AVX512VLDQ-NEXT: vmovdqu %xmm3, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1588 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm2, %ecx
1589 ; X86-AVX512VLDQ-NEXT: movzwl %cx, %ecx
1590 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm2
1591 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm2, %xmm2
1592 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm2, %xmm2
1593 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm2, %xmm2
1594 ; X86-AVX512VLDQ-NEXT: vmovd %xmm2, %ecx
1595 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm2
1596 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1597 ; X86-AVX512VLDQ-NEXT: movzwl 44(%eax), %ecx
1598 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm2
1599 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm2, %xmm2
1600 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm2, %xmm2
1601 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm2, %xmm2
1602 ; X86-AVX512VLDQ-NEXT: vmovd %xmm2, %ecx
1603 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm2
1604 ; X86-AVX512VLDQ-NEXT: vmovdqa 32(%eax), %xmm3
1605 ; X86-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm5 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1606 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm5, %ecx
1607 ; X86-AVX512VLDQ-NEXT: movzwl %cx, %ecx
1608 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm5
1609 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm5, %xmm5
1610 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm5, %xmm5
1611 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm5, %xmm5
1612 ; X86-AVX512VLDQ-NEXT: vmovd %xmm5, %ecx
1613 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm5
1614 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
1615 ; X86-AVX512VLDQ-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm5
1616 ; X86-AVX512VLDQ-NEXT: movzwl 28(%eax), %ecx
1617 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm1
1618 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1619 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1620 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1621 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %ecx
1622 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm6
1623 ; X86-AVX512VLDQ-NEXT: vmovdqa (%eax), %xmm1
1624 ; X86-AVX512VLDQ-NEXT: vmovdqu %xmm1, (%esp) # 16-byte Spill
1625 ; X86-AVX512VLDQ-NEXT: vmovdqa 16(%eax), %xmm2
1626 ; X86-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm7 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1627 ; X86-AVX512VLDQ-NEXT: vmovdqu %xmm2, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1628 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm7, %ecx
1629 ; X86-AVX512VLDQ-NEXT: movzwl %cx, %ecx
1630 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm7
1631 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
1632 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
1633 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
1634 ; X86-AVX512VLDQ-NEXT: vmovd %xmm7, %ecx
1635 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm7
1636 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1637 ; X86-AVX512VLDQ-NEXT: movzwl 12(%eax), %ecx
1638 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm7
1639 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
1640 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
1641 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
1642 ; X86-AVX512VLDQ-NEXT: vmovd %xmm7, %ecx
1643 ; X86-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm7 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1644 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm7, %edx
1645 ; X86-AVX512VLDQ-NEXT: movzwl %dx, %edx
1646 ; X86-AVX512VLDQ-NEXT: vmovd %edx, %xmm7
1647 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
1648 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
1649 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
1650 ; X86-AVX512VLDQ-NEXT: vmovd %xmm7, %edx
1651 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm7
1652 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %edx, %xmm0, %xmm1
1653 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3]
1654 ; X86-AVX512VLDQ-NEXT: vinserti128 $1, %xmm6, %ymm1, %ymm1
1655 ; X86-AVX512VLDQ-NEXT: vinserti64x4 $1, %ymm5, %zmm1, %zmm1
1656 ; X86-AVX512VLDQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
1657 ; X86-AVX512VLDQ-NEXT: movzwl 56(%eax), %ecx
1658 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm1
1659 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1660 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1661 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1662 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %ecx
1663 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1664 ; X86-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm6 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1665 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm6, %ecx
1666 ; X86-AVX512VLDQ-NEXT: movzwl %cx, %ecx
1667 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm6
1668 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm6, %xmm6
1669 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm6, %xmm6
1670 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm6, %xmm6
1671 ; X86-AVX512VLDQ-NEXT: vmovd %xmm6, %ecx
1672 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm6
1673 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
1674 ; X86-AVX512VLDQ-NEXT: movzwl 40(%eax), %ecx
1675 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm1
1676 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1677 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1678 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1679 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %ecx
1680 ; X86-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm1 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1681 ; X86-AVX512VLDQ-NEXT: vmovdqa %xmm3, %xmm4
1682 ; X86-AVX512VLDQ-NEXT: vmovdqu %xmm3, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
1683 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm1, %edx
1684 ; X86-AVX512VLDQ-NEXT: movzwl %dx, %edx
1685 ; X86-AVX512VLDQ-NEXT: vmovd %edx, %xmm1
1686 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1687 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1688 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1689 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %edx
1690 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1691 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %edx, %xmm0, %xmm7
1692 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
1693 ; X86-AVX512VLDQ-NEXT: vinserti128 $1, %xmm6, %ymm1, %ymm6
1694 ; X86-AVX512VLDQ-NEXT: movzwl 24(%eax), %ecx
1695 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm1
1696 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1697 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1698 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1699 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %ecx
1700 ; X86-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm1 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1701 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm1, %edx
1702 ; X86-AVX512VLDQ-NEXT: movzwl %dx, %edx
1703 ; X86-AVX512VLDQ-NEXT: vmovd %edx, %xmm1
1704 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1705 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1706 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1707 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %edx
1708 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1709 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %edx, %xmm0, %xmm7
1710 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
1711 ; X86-AVX512VLDQ-NEXT: movzwl 8(%eax), %ecx
1712 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm1
1713 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1714 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1715 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1716 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %ecx
1717 ; X86-AVX512VLDQ-NEXT: vmovdqu (%esp), %xmm2 # 16-byte Reload
1718 ; X86-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm1 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1719 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm1, %edx
1720 ; X86-AVX512VLDQ-NEXT: movzwl %dx, %edx
1721 ; X86-AVX512VLDQ-NEXT: vmovd %edx, %xmm1
1722 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1723 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1724 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1725 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %edx
1726 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1727 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %edx, %xmm0, %xmm5
1728 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
1729 ; X86-AVX512VLDQ-NEXT: vinserti128 $1, %xmm7, %ymm1, %ymm1
1730 ; X86-AVX512VLDQ-NEXT: vinserti64x4 $1, %ymm6, %zmm1, %zmm1
1731 ; X86-AVX512VLDQ-NEXT: vpunpckldq {{[-0-9]+}}(%e{{[sb]}}p), %zmm1, %zmm1 # 64-byte Folded Reload
1732 ; X86-AVX512VLDQ-NEXT: # zmm1 = zmm1[0],mem[0],zmm1[1],mem[1],zmm1[4],mem[4],zmm1[5],mem[5],zmm1[8],mem[8],zmm1[9],mem[9],zmm1[12],mem[12],zmm1[13],mem[13]
1733 ; X86-AVX512VLDQ-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
1734 ; X86-AVX512VLDQ-NEXT: movzwl 52(%eax), %ecx
1735 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm1
1736 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1737 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1738 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1739 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %ecx
1740 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1741 ; X86-AVX512VLDQ-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm3 # 16-byte Reload
1742 ; X86-AVX512VLDQ-NEXT: vpsrlq $48, %xmm3, %xmm6
1743 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm6, %ecx
1744 ; X86-AVX512VLDQ-NEXT: movzwl %cx, %ecx
1745 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm6
1746 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm6, %xmm6
1747 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm6, %xmm6
1748 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm6, %xmm6
1749 ; X86-AVX512VLDQ-NEXT: vmovd %xmm6, %ecx
1750 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm6
1751 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
1752 ; X86-AVX512VLDQ-NEXT: movzwl 36(%eax), %ecx
1753 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm1
1754 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1755 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1756 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1757 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %ecx
1758 ; X86-AVX512VLDQ-NEXT: vpsrlq $48, %xmm4, %xmm1
1759 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm1, %edx
1760 ; X86-AVX512VLDQ-NEXT: movzwl %dx, %edx
1761 ; X86-AVX512VLDQ-NEXT: vmovd %edx, %xmm1
1762 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1763 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1764 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1765 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %edx
1766 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1767 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %edx, %xmm0, %xmm7
1768 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
1769 ; X86-AVX512VLDQ-NEXT: vinserti128 $1, %xmm6, %ymm1, %ymm6
1770 ; X86-AVX512VLDQ-NEXT: movzwl 20(%eax), %ecx
1771 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm1
1772 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1773 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1774 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1775 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %ecx
1776 ; X86-AVX512VLDQ-NEXT: vpsrlq $48, {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1777 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm1, %edx
1778 ; X86-AVX512VLDQ-NEXT: movzwl %dx, %edx
1779 ; X86-AVX512VLDQ-NEXT: vmovd %edx, %xmm1
1780 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1781 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1782 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1783 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %edx
1784 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm1
1785 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %edx, %xmm0, %xmm7
1786 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
1787 ; X86-AVX512VLDQ-NEXT: movzwl 4(%eax), %eax
1788 ; X86-AVX512VLDQ-NEXT: vmovd %eax, %xmm1
1789 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1790 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1791 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1792 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %eax
1793 ; X86-AVX512VLDQ-NEXT: vpsrlq $48, %xmm2, %xmm1
1794 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm1, %ecx
1795 ; X86-AVX512VLDQ-NEXT: movzwl %cx, %ecx
1796 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm1
1797 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1798 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1799 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1800 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %ecx
1801 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm1
1802 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %ecx, %xmm0, %xmm5
1803 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
1804 ; X86-AVX512VLDQ-NEXT: vinserti128 $1, %xmm7, %ymm1, %ymm1
1805 ; X86-AVX512VLDQ-NEXT: vinserti64x4 $1, %ymm6, %zmm1, %zmm6
1806 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm3, %eax
1807 ; X86-AVX512VLDQ-NEXT: movzwl %ax, %eax
1808 ; X86-AVX512VLDQ-NEXT: vmovd %eax, %xmm1
1809 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1810 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1811 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1812 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %eax
1813 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm1
1814 ; X86-AVX512VLDQ-NEXT: vpsrld $16, %xmm3, %xmm4
1815 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm4, %eax
1816 ; X86-AVX512VLDQ-NEXT: movzwl %ax, %eax
1817 ; X86-AVX512VLDQ-NEXT: vmovd %eax, %xmm4
1818 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm4, %xmm4
1819 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm4, %xmm4
1820 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm4, %xmm4
1821 ; X86-AVX512VLDQ-NEXT: vmovd %xmm4, %eax
1822 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm4
1823 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
1824 ; X86-AVX512VLDQ-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
1825 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm2, %eax
1826 ; X86-AVX512VLDQ-NEXT: movzwl %ax, %eax
1827 ; X86-AVX512VLDQ-NEXT: vmovd %eax, %xmm1
1828 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1829 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1830 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1831 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %eax
1832 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm1
1833 ; X86-AVX512VLDQ-NEXT: vpsrld $16, %xmm2, %xmm3
1834 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm3, %eax
1835 ; X86-AVX512VLDQ-NEXT: movzwl %ax, %eax
1836 ; X86-AVX512VLDQ-NEXT: vmovd %eax, %xmm3
1837 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm3, %xmm3
1838 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm3, %xmm3
1839 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm3, %xmm3
1840 ; X86-AVX512VLDQ-NEXT: vmovd %xmm3, %eax
1841 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm3
1842 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
1843 ; X86-AVX512VLDQ-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm3
1844 ; X86-AVX512VLDQ-NEXT: vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
1845 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm2, %eax
1846 ; X86-AVX512VLDQ-NEXT: movzwl %ax, %eax
1847 ; X86-AVX512VLDQ-NEXT: vmovd %eax, %xmm1
1848 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1849 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1850 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1851 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %eax
1852 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm1
1853 ; X86-AVX512VLDQ-NEXT: vpsrld $16, %xmm2, %xmm2
1854 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm2, %eax
1855 ; X86-AVX512VLDQ-NEXT: movzwl %ax, %eax
1856 ; X86-AVX512VLDQ-NEXT: vmovd %eax, %xmm2
1857 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm2, %xmm2
1858 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm2, %xmm2
1859 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm2, %xmm2
1860 ; X86-AVX512VLDQ-NEXT: vmovd %xmm2, %eax
1861 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm2
1862 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1863 ; X86-AVX512VLDQ-NEXT: vmovdqu (%esp), %xmm4 # 16-byte Reload
1864 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm4, %eax
1865 ; X86-AVX512VLDQ-NEXT: movzwl %ax, %eax
1866 ; X86-AVX512VLDQ-NEXT: vmovd %eax, %xmm1
1867 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1868 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
1869 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
1870 ; X86-AVX512VLDQ-NEXT: vmovd %xmm1, %eax
1871 ; X86-AVX512VLDQ-NEXT: vpsrld $16, %xmm4, %xmm1
1872 ; X86-AVX512VLDQ-NEXT: vpextrw $0, %xmm1, %ecx
1873 ; X86-AVX512VLDQ-NEXT: movzwl %cx, %ecx
1874 ; X86-AVX512VLDQ-NEXT: vmovd %ecx, %xmm1
1875 ; X86-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
1876 ; X86-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm0
1877 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm1
1878 ; X86-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm0, %xmm0
1879 ; X86-AVX512VLDQ-NEXT: vmovd %xmm0, %eax
1880 ; X86-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm0
1881 ; X86-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1882 ; X86-AVX512VLDQ-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
1883 ; X86-AVX512VLDQ-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
1884 ; X86-AVX512VLDQ-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm6[0],zmm0[1],zmm6[1],zmm0[4],zmm6[4],zmm0[5],zmm6[5],zmm0[8],zmm6[8],zmm0[9],zmm6[9],zmm0[12],zmm6[12],zmm0[13],zmm6[13]
1885 ; X86-AVX512VLDQ-NEXT: vpunpcklqdq {{[-0-9]+}}(%e{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
1886 ; X86-AVX512VLDQ-NEXT: # zmm0 = zmm0[0],mem[0],zmm0[2],mem[2],zmm0[4],mem[4],zmm0[6],mem[6]
1887 ; X86-AVX512VLDQ-NEXT: addl $128, %esp
1888 ; X86-AVX512VLDQ-NEXT: .cfi_def_cfa_offset 4
1889 ; X86-AVX512VLDQ-NEXT: retl
1891 ; X64-AVX1-LABEL: fabs_v32f16:
1892 ; X64-AVX1: # %bb.0:
1893 ; X64-AVX1-NEXT: pushq %rbx
1894 ; X64-AVX1-NEXT: .cfi_def_cfa_offset 16
1895 ; X64-AVX1-NEXT: subq $128, %rsp
1896 ; X64-AVX1-NEXT: .cfi_def_cfa_offset 144
1897 ; X64-AVX1-NEXT: .cfi_offset %rbx, -16
1898 ; X64-AVX1-NEXT: movq %rdi, %rbx
1899 ; X64-AVX1-NEXT: vbroadcastss 28(%rdi), %xmm0
1900 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1901 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1902 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1903 ; X64-AVX1-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
1904 ; X64-AVX1-NEXT: vmovaps (%rbx), %xmm0
1905 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1906 ; X64-AVX1-NEXT: vmovdqa 16(%rbx), %xmm1
1907 ; X64-AVX1-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1908 ; X64-AVX1-NEXT: vmovaps 32(%rbx), %xmm0
1909 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1910 ; X64-AVX1-NEXT: vmovaps 48(%rbx), %xmm0
1911 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1912 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1913 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1914 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1915 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1916 ; X64-AVX1-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
1917 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1918 ; X64-AVX1-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
1919 ; X64-AVX1-NEXT: vbroadcastss 24(%rbx), %xmm0
1920 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1921 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1922 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1923 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1924 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1925 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1926 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1927 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1928 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1929 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1930 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1931 ; X64-AVX1-NEXT: vpunpckldq (%rsp), %xmm0, %xmm0 # 16-byte Folded Reload
1932 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
1933 ; X64-AVX1-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
1934 ; X64-AVX1-NEXT: vbroadcastss 20(%rbx), %xmm0
1935 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1936 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1937 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1938 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1939 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1940 ; X64-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
1941 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1942 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1943 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1944 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1945 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1946 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1947 ; X64-AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1948 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1949 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1950 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1951 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1952 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1953 ; X64-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
1954 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1955 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1956 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1957 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1958 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1959 ; X64-AVX1-NEXT: vpunpckldq {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
1960 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
1961 ; X64-AVX1-NEXT: vpunpcklqdq (%rsp), %xmm0, %xmm0 # 16-byte Folded Reload
1962 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0]
1963 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1964 ; X64-AVX1-NEXT: vbroadcastss 12(%rbx), %xmm0
1965 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1966 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1967 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1968 ; X64-AVX1-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
1969 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1970 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1971 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1972 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1973 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1974 ; X64-AVX1-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
1975 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1976 ; X64-AVX1-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
1977 ; X64-AVX1-NEXT: vbroadcastss 8(%rbx), %xmm0
1978 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1979 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1980 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1981 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1982 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1983 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1984 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1985 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1986 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1987 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1988 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1989 ; X64-AVX1-NEXT: vpunpckldq (%rsp), %xmm0, %xmm0 # 16-byte Folded Reload
1990 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
1991 ; X64-AVX1-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
1992 ; X64-AVX1-NEXT: vbroadcastss 4(%rbx), %xmm0
1993 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
1994 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
1995 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
1996 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1997 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1998 ; X64-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
1999 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2000 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2001 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2002 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2003 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2004 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2005 ; X64-AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2006 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2007 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2008 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2009 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2010 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2011 ; X64-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
2012 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2013 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2014 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2015 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2016 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2017 ; X64-AVX1-NEXT: vpunpckldq {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
2018 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
2019 ; X64-AVX1-NEXT: vpunpcklqdq (%rsp), %xmm0, %xmm0 # 16-byte Folded Reload
2020 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0]
2021 ; X64-AVX1-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 16-byte Folded Reload
2022 ; X64-AVX1-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2023 ; X64-AVX1-NEXT: vbroadcastss 60(%rbx), %xmm0
2024 ; X64-AVX1-NEXT: vzeroupper
2025 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2026 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2027 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2028 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2029 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2030 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2031 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2032 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2033 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2034 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2035 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2036 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2037 ; X64-AVX1-NEXT: vbroadcastss 56(%rbx), %xmm0
2038 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2039 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2040 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2041 ; X64-AVX1-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
2042 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2043 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2044 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2045 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2046 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2047 ; X64-AVX1-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
2048 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2049 ; X64-AVX1-NEXT: vpunpckldq {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
2050 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
2051 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2052 ; X64-AVX1-NEXT: vbroadcastss 52(%rbx), %xmm0
2053 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2054 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2055 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2056 ; X64-AVX1-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
2057 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2058 ; X64-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
2059 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2060 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2061 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2062 ; X64-AVX1-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
2063 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2064 ; X64-AVX1-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
2065 ; X64-AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2066 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2067 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2068 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2069 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2070 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2071 ; X64-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
2072 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2073 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2074 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2075 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2076 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2077 ; X64-AVX1-NEXT: vpunpckldq (%rsp), %xmm0, %xmm0 # 16-byte Folded Reload
2078 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
2079 ; X64-AVX1-NEXT: vpunpcklqdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
2080 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0]
2081 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2082 ; X64-AVX1-NEXT: vbroadcastss 44(%rbx), %xmm0
2083 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2084 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2085 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2086 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2087 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2088 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2089 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2090 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2091 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2092 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2093 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2094 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2095 ; X64-AVX1-NEXT: vbroadcastss 40(%rbx), %xmm0
2096 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2097 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2098 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2099 ; X64-AVX1-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
2100 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2101 ; X64-AVX1-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2102 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2103 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2104 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2105 ; X64-AVX1-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
2106 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2107 ; X64-AVX1-NEXT: vpunpckldq {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
2108 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
2109 ; X64-AVX1-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2110 ; X64-AVX1-NEXT: vbroadcastss 36(%rbx), %xmm0
2111 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2112 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2113 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2114 ; X64-AVX1-NEXT: vmovaps %xmm0, (%rsp) # 16-byte Spill
2115 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2116 ; X64-AVX1-NEXT: vpsrlq $48, %xmm0, %xmm0
2117 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2118 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2119 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2120 ; X64-AVX1-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
2121 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2122 ; X64-AVX1-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
2123 ; X64-AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2124 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2125 ; X64-AVX1-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2126 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2127 ; X64-AVX1-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2128 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2129 ; X64-AVX1-NEXT: vpsrld $16, %xmm0, %xmm0
2130 ; X64-AVX1-NEXT: callq __extendhfsf2@PLT
2131 ; X64-AVX1-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
2132 ; X64-AVX1-NEXT: callq __truncsfhf2@PLT
2133 ; X64-AVX1-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2134 ; X64-AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
2135 ; X64-AVX1-NEXT: vpunpckldq (%rsp), %xmm0, %xmm0 # 16-byte Folded Reload
2136 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
2137 ; X64-AVX1-NEXT: vpunpcklqdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
2138 ; X64-AVX1-NEXT: # xmm0 = xmm0[0],mem[0]
2139 ; X64-AVX1-NEXT: vinsertf128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 16-byte Folded Reload
2140 ; X64-AVX1-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2141 ; X64-AVX1-NEXT: addq $128, %rsp
2142 ; X64-AVX1-NEXT: .cfi_def_cfa_offset 16
2143 ; X64-AVX1-NEXT: popq %rbx
2144 ; X64-AVX1-NEXT: .cfi_def_cfa_offset 8
2145 ; X64-AVX1-NEXT: retq
2147 ; X64-AVX2-LABEL: fabs_v32f16:
2148 ; X64-AVX2: # %bb.0:
2149 ; X64-AVX2-NEXT: vpbroadcastw {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
2150 ; X64-AVX2-NEXT: vpand (%rdi), %ymm1, %ymm0
2151 ; X64-AVX2-NEXT: vpand 32(%rdi), %ymm1, %ymm1
2152 ; X64-AVX2-NEXT: retq
2154 ; X64-AVX512VL-LABEL: fabs_v32f16:
2155 ; X64-AVX512VL: # %bb.0:
2156 ; X64-AVX512VL-NEXT: movzwl 60(%rdi), %eax
2157 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm0
2158 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm0, %xmm1
2159 ; X64-AVX512VL-NEXT: vpbroadcastd {{.*#+}} xmm0 = [NaN,NaN,NaN,NaN]
2160 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm1
2161 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm1, %xmm1
2162 ; X64-AVX512VL-NEXT: vmovd %xmm1, %eax
2163 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm5
2164 ; X64-AVX512VL-NEXT: vmovdqa (%rdi), %xmm1
2165 ; X64-AVX512VL-NEXT: vmovdqa 16(%rdi), %xmm2
2166 ; X64-AVX512VL-NEXT: vmovdqa 32(%rdi), %xmm3
2167 ; X64-AVX512VL-NEXT: vmovdqa 48(%rdi), %xmm4
2168 ; X64-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm6 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2169 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm6, %eax
2170 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2171 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm6
2172 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm6, %xmm6
2173 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm6, %xmm6
2174 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm6, %xmm6
2175 ; X64-AVX512VL-NEXT: vmovd %xmm6, %eax
2176 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm6
2177 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
2178 ; X64-AVX512VL-NEXT: movzwl 44(%rdi), %eax
2179 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm6
2180 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm6, %xmm6
2181 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm6, %xmm6
2182 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm6, %xmm6
2183 ; X64-AVX512VL-NEXT: vmovd %xmm6, %eax
2184 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm6
2185 ; X64-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm7 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2186 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm7, %eax
2187 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2188 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2189 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2190 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2191 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2192 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2193 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2194 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2195 ; X64-AVX512VL-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
2196 ; X64-AVX512VL-NEXT: movzwl 28(%rdi), %eax
2197 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm6
2198 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm6, %xmm6
2199 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm6, %xmm6
2200 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm6, %xmm6
2201 ; X64-AVX512VL-NEXT: vmovd %xmm6, %eax
2202 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm6
2203 ; X64-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm7 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2204 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm7, %eax
2205 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2206 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2207 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2208 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2209 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2210 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2211 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2212 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2213 ; X64-AVX512VL-NEXT: movzwl 12(%rdi), %eax
2214 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2215 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2216 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2217 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2218 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2219 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2220 ; X64-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm8 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2221 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm8, %eax
2222 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2223 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm8
2224 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm8, %xmm8
2225 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm8, %xmm8
2226 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2227 ; X64-AVX512VL-NEXT: vmovd %xmm8, %eax
2228 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2229 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
2230 ; X64-AVX512VL-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
2231 ; X64-AVX512VL-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm5
2232 ; X64-AVX512VL-NEXT: movzwl 56(%rdi), %eax
2233 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm6
2234 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm6, %xmm6
2235 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm6, %xmm6
2236 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm6, %xmm6
2237 ; X64-AVX512VL-NEXT: vmovd %xmm6, %eax
2238 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm6
2239 ; X64-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm7 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2240 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm7, %eax
2241 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2242 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2243 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2244 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2245 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2246 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2247 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2248 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2249 ; X64-AVX512VL-NEXT: movzwl 40(%rdi), %eax
2250 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2251 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2252 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2253 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2254 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2255 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2256 ; X64-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm8 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2257 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm8, %eax
2258 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2259 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm8
2260 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm8, %xmm8
2261 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm8, %xmm8
2262 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2263 ; X64-AVX512VL-NEXT: vmovd %xmm8, %eax
2264 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2265 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
2266 ; X64-AVX512VL-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
2267 ; X64-AVX512VL-NEXT: movzwl 24(%rdi), %eax
2268 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2269 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2270 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2271 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2272 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2273 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2274 ; X64-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm8 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2275 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm8, %eax
2276 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2277 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm8
2278 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm8, %xmm8
2279 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm8, %xmm8
2280 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2281 ; X64-AVX512VL-NEXT: vmovd %xmm8, %eax
2282 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2283 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
2284 ; X64-AVX512VL-NEXT: movzwl 8(%rdi), %eax
2285 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm8
2286 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm8, %xmm8
2287 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm8, %xmm8
2288 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2289 ; X64-AVX512VL-NEXT: vmovd %xmm8, %eax
2290 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2291 ; X64-AVX512VL-NEXT: vpsrldq {{.*#+}} xmm9 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2292 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm9, %eax
2293 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2294 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm9
2295 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm9, %xmm9
2296 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm9, %xmm9
2297 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm9, %xmm9
2298 ; X64-AVX512VL-NEXT: vmovd %xmm9, %eax
2299 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm9
2300 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
2301 ; X64-AVX512VL-NEXT: vinserti128 $1, %xmm7, %ymm8, %ymm7
2302 ; X64-AVX512VL-NEXT: vinserti64x4 $1, %ymm6, %zmm7, %zmm6
2303 ; X64-AVX512VL-NEXT: vpunpckldq {{.*#+}} zmm5 = zmm6[0],zmm5[0],zmm6[1],zmm5[1],zmm6[4],zmm5[4],zmm6[5],zmm5[5],zmm6[8],zmm5[8],zmm6[9],zmm5[9],zmm6[12],zmm5[12],zmm6[13],zmm5[13]
2304 ; X64-AVX512VL-NEXT: movzwl 52(%rdi), %eax
2305 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm6
2306 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm6, %xmm6
2307 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm6, %xmm6
2308 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm6, %xmm6
2309 ; X64-AVX512VL-NEXT: vmovd %xmm6, %eax
2310 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm6
2311 ; X64-AVX512VL-NEXT: vpsrlq $48, %xmm4, %xmm7
2312 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm7, %eax
2313 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2314 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2315 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2316 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2317 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2318 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2319 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2320 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2321 ; X64-AVX512VL-NEXT: movzwl 36(%rdi), %eax
2322 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2323 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2324 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2325 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2326 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2327 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2328 ; X64-AVX512VL-NEXT: vpsrlq $48, %xmm3, %xmm8
2329 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm8, %eax
2330 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2331 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm8
2332 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm8, %xmm8
2333 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm8, %xmm8
2334 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2335 ; X64-AVX512VL-NEXT: vmovd %xmm8, %eax
2336 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2337 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
2338 ; X64-AVX512VL-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
2339 ; X64-AVX512VL-NEXT: movzwl 20(%rdi), %eax
2340 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2341 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2342 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2343 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2344 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2345 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2346 ; X64-AVX512VL-NEXT: vpsrlq $48, %xmm2, %xmm8
2347 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm8, %eax
2348 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2349 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm8
2350 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm8, %xmm8
2351 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm8, %xmm8
2352 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2353 ; X64-AVX512VL-NEXT: vmovd %xmm8, %eax
2354 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2355 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
2356 ; X64-AVX512VL-NEXT: movzwl 4(%rdi), %eax
2357 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm8
2358 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm8, %xmm8
2359 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm8, %xmm8
2360 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2361 ; X64-AVX512VL-NEXT: vmovd %xmm8, %eax
2362 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2363 ; X64-AVX512VL-NEXT: vpsrlq $48, %xmm1, %xmm9
2364 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm9, %eax
2365 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2366 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm9
2367 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm9, %xmm9
2368 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm9, %xmm9
2369 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm9, %xmm9
2370 ; X64-AVX512VL-NEXT: vmovd %xmm9, %eax
2371 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm9
2372 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
2373 ; X64-AVX512VL-NEXT: vinserti128 $1, %xmm7, %ymm8, %ymm7
2374 ; X64-AVX512VL-NEXT: vinserti64x4 $1, %ymm6, %zmm7, %zmm6
2375 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm4, %eax
2376 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2377 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2378 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2379 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2380 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2381 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2382 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2383 ; X64-AVX512VL-NEXT: vpsrld $16, %xmm4, %xmm4
2384 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm4, %eax
2385 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2386 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm4
2387 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm4, %xmm4
2388 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm4, %xmm4
2389 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm4, %xmm4
2390 ; X64-AVX512VL-NEXT: vmovd %xmm4, %eax
2391 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm4
2392 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3]
2393 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm3, %eax
2394 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2395 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm7
2396 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm7, %xmm7
2397 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm7, %xmm7
2398 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2399 ; X64-AVX512VL-NEXT: vmovd %xmm7, %eax
2400 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2401 ; X64-AVX512VL-NEXT: vpsrld $16, %xmm3, %xmm3
2402 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm3, %eax
2403 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2404 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm3
2405 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm3, %xmm3
2406 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm3, %xmm3
2407 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm3, %xmm3
2408 ; X64-AVX512VL-NEXT: vmovd %xmm3, %eax
2409 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm3
2410 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3]
2411 ; X64-AVX512VL-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
2412 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm2, %eax
2413 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2414 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm4
2415 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm4, %xmm4
2416 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm4, %xmm4
2417 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm4, %xmm4
2418 ; X64-AVX512VL-NEXT: vmovd %xmm4, %eax
2419 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm4
2420 ; X64-AVX512VL-NEXT: vpsrld $16, %xmm2, %xmm2
2421 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm2, %eax
2422 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2423 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm2
2424 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm2, %xmm2
2425 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm2, %xmm2
2426 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm2, %xmm2
2427 ; X64-AVX512VL-NEXT: vmovd %xmm2, %eax
2428 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm2
2429 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
2430 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm1, %eax
2431 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2432 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm4
2433 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm4, %xmm4
2434 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm4, %xmm4
2435 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm4, %xmm4
2436 ; X64-AVX512VL-NEXT: vmovd %xmm4, %eax
2437 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm4
2438 ; X64-AVX512VL-NEXT: vpsrld $16, %xmm1, %xmm1
2439 ; X64-AVX512VL-NEXT: vpextrw $0, %xmm1, %eax
2440 ; X64-AVX512VL-NEXT: movzwl %ax, %eax
2441 ; X64-AVX512VL-NEXT: vmovd %eax, %xmm1
2442 ; X64-AVX512VL-NEXT: vcvtph2ps %xmm1, %xmm1
2443 ; X64-AVX512VL-NEXT: vpand %xmm0, %xmm1, %xmm0
2444 ; X64-AVX512VL-NEXT: vcvtps2ph $4, %xmm0, %xmm0
2445 ; X64-AVX512VL-NEXT: vmovd %xmm0, %eax
2446 ; X64-AVX512VL-NEXT: vpinsrw $0, %eax, %xmm0, %xmm0
2447 ; X64-AVX512VL-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
2448 ; X64-AVX512VL-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
2449 ; X64-AVX512VL-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
2450 ; X64-AVX512VL-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm6[0],zmm0[1],zmm6[1],zmm0[4],zmm6[4],zmm0[5],zmm6[5],zmm0[8],zmm6[8],zmm0[9],zmm6[9],zmm0[12],zmm6[12],zmm0[13],zmm6[13]
2451 ; X64-AVX512VL-NEXT: vpunpcklqdq {{.*#+}} zmm0 = zmm0[0],zmm5[0],zmm0[2],zmm5[2],zmm0[4],zmm5[4],zmm0[6],zmm5[6]
2452 ; X64-AVX512VL-NEXT: retq
2454 ; X64-AVX512FP16-LABEL: fabs_v32f16:
2455 ; X64-AVX512FP16: # %bb.0:
2456 ; X64-AVX512FP16-NEXT: vpbroadcastw {{.*#+}} zmm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
2457 ; X64-AVX512FP16-NEXT: vpandq (%rdi), %zmm0, %zmm0
2458 ; X64-AVX512FP16-NEXT: retq
2460 ; X64-AVX512VLDQ-LABEL: fabs_v32f16:
2461 ; X64-AVX512VLDQ: # %bb.0:
2462 ; X64-AVX512VLDQ-NEXT: movzwl 60(%rdi), %eax
2463 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm0
2464 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm0, %xmm1
2465 ; X64-AVX512VLDQ-NEXT: vpbroadcastd {{.*#+}} xmm0 = [NaN,NaN,NaN,NaN]
2466 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm1
2467 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm1, %xmm1
2468 ; X64-AVX512VLDQ-NEXT: vmovd %xmm1, %eax
2469 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm5
2470 ; X64-AVX512VLDQ-NEXT: vmovdqa (%rdi), %xmm1
2471 ; X64-AVX512VLDQ-NEXT: vmovdqa 16(%rdi), %xmm2
2472 ; X64-AVX512VLDQ-NEXT: vmovdqa 32(%rdi), %xmm3
2473 ; X64-AVX512VLDQ-NEXT: vmovdqa 48(%rdi), %xmm4
2474 ; X64-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm6 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2475 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm6, %eax
2476 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2477 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm6
2478 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm6, %xmm6
2479 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm6, %xmm6
2480 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm6, %xmm6
2481 ; X64-AVX512VLDQ-NEXT: vmovd %xmm6, %eax
2482 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm6
2483 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
2484 ; X64-AVX512VLDQ-NEXT: movzwl 44(%rdi), %eax
2485 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm6
2486 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm6, %xmm6
2487 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm6, %xmm6
2488 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm6, %xmm6
2489 ; X64-AVX512VLDQ-NEXT: vmovd %xmm6, %eax
2490 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm6
2491 ; X64-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm7 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2492 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm7, %eax
2493 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2494 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2495 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2496 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2497 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2498 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2499 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2500 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2501 ; X64-AVX512VLDQ-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
2502 ; X64-AVX512VLDQ-NEXT: movzwl 28(%rdi), %eax
2503 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm6
2504 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm6, %xmm6
2505 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm6, %xmm6
2506 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm6, %xmm6
2507 ; X64-AVX512VLDQ-NEXT: vmovd %xmm6, %eax
2508 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm6
2509 ; X64-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm7 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2510 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm7, %eax
2511 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2512 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2513 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2514 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2515 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2516 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2517 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2518 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2519 ; X64-AVX512VLDQ-NEXT: movzwl 12(%rdi), %eax
2520 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2521 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2522 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2523 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2524 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2525 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2526 ; X64-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm8 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2527 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm8, %eax
2528 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2529 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm8
2530 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm8, %xmm8
2531 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm8, %xmm8
2532 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2533 ; X64-AVX512VLDQ-NEXT: vmovd %xmm8, %eax
2534 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2535 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
2536 ; X64-AVX512VLDQ-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
2537 ; X64-AVX512VLDQ-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm5
2538 ; X64-AVX512VLDQ-NEXT: movzwl 56(%rdi), %eax
2539 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm6
2540 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm6, %xmm6
2541 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm6, %xmm6
2542 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm6, %xmm6
2543 ; X64-AVX512VLDQ-NEXT: vmovd %xmm6, %eax
2544 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm6
2545 ; X64-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm7 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2546 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm7, %eax
2547 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2548 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2549 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2550 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2551 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2552 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2553 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2554 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2555 ; X64-AVX512VLDQ-NEXT: movzwl 40(%rdi), %eax
2556 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2557 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2558 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2559 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2560 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2561 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2562 ; X64-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm8 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2563 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm8, %eax
2564 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2565 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm8
2566 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm8, %xmm8
2567 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm8, %xmm8
2568 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2569 ; X64-AVX512VLDQ-NEXT: vmovd %xmm8, %eax
2570 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2571 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
2572 ; X64-AVX512VLDQ-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
2573 ; X64-AVX512VLDQ-NEXT: movzwl 24(%rdi), %eax
2574 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2575 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2576 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2577 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2578 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2579 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2580 ; X64-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm8 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2581 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm8, %eax
2582 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2583 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm8
2584 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm8, %xmm8
2585 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm8, %xmm8
2586 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2587 ; X64-AVX512VLDQ-NEXT: vmovd %xmm8, %eax
2588 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2589 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
2590 ; X64-AVX512VLDQ-NEXT: movzwl 8(%rdi), %eax
2591 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm8
2592 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm8, %xmm8
2593 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm8, %xmm8
2594 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2595 ; X64-AVX512VLDQ-NEXT: vmovd %xmm8, %eax
2596 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2597 ; X64-AVX512VLDQ-NEXT: vpsrldq {{.*#+}} xmm9 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2598 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm9, %eax
2599 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2600 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm9
2601 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm9, %xmm9
2602 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm9, %xmm9
2603 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm9, %xmm9
2604 ; X64-AVX512VLDQ-NEXT: vmovd %xmm9, %eax
2605 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm9
2606 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
2607 ; X64-AVX512VLDQ-NEXT: vinserti128 $1, %xmm7, %ymm8, %ymm7
2608 ; X64-AVX512VLDQ-NEXT: vinserti64x4 $1, %ymm6, %zmm7, %zmm6
2609 ; X64-AVX512VLDQ-NEXT: vpunpckldq {{.*#+}} zmm5 = zmm6[0],zmm5[0],zmm6[1],zmm5[1],zmm6[4],zmm5[4],zmm6[5],zmm5[5],zmm6[8],zmm5[8],zmm6[9],zmm5[9],zmm6[12],zmm5[12],zmm6[13],zmm5[13]
2610 ; X64-AVX512VLDQ-NEXT: movzwl 52(%rdi), %eax
2611 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm6
2612 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm6, %xmm6
2613 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm6, %xmm6
2614 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm6, %xmm6
2615 ; X64-AVX512VLDQ-NEXT: vmovd %xmm6, %eax
2616 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm6
2617 ; X64-AVX512VLDQ-NEXT: vpsrlq $48, %xmm4, %xmm7
2618 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm7, %eax
2619 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2620 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2621 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2622 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2623 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2624 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2625 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2626 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
2627 ; X64-AVX512VLDQ-NEXT: movzwl 36(%rdi), %eax
2628 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2629 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2630 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2631 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2632 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2633 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2634 ; X64-AVX512VLDQ-NEXT: vpsrlq $48, %xmm3, %xmm8
2635 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm8, %eax
2636 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2637 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm8
2638 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm8, %xmm8
2639 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm8, %xmm8
2640 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2641 ; X64-AVX512VLDQ-NEXT: vmovd %xmm8, %eax
2642 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2643 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
2644 ; X64-AVX512VLDQ-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
2645 ; X64-AVX512VLDQ-NEXT: movzwl 20(%rdi), %eax
2646 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2647 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2648 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2649 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2650 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2651 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2652 ; X64-AVX512VLDQ-NEXT: vpsrlq $48, %xmm2, %xmm8
2653 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm8, %eax
2654 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2655 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm8
2656 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm8, %xmm8
2657 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm8, %xmm8
2658 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2659 ; X64-AVX512VLDQ-NEXT: vmovd %xmm8, %eax
2660 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2661 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
2662 ; X64-AVX512VLDQ-NEXT: movzwl 4(%rdi), %eax
2663 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm8
2664 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm8, %xmm8
2665 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm8, %xmm8
2666 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm8, %xmm8
2667 ; X64-AVX512VLDQ-NEXT: vmovd %xmm8, %eax
2668 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm8
2669 ; X64-AVX512VLDQ-NEXT: vpsrlq $48, %xmm1, %xmm9
2670 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm9, %eax
2671 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2672 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm9
2673 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm9, %xmm9
2674 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm9, %xmm9
2675 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm9, %xmm9
2676 ; X64-AVX512VLDQ-NEXT: vmovd %xmm9, %eax
2677 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm9
2678 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
2679 ; X64-AVX512VLDQ-NEXT: vinserti128 $1, %xmm7, %ymm8, %ymm7
2680 ; X64-AVX512VLDQ-NEXT: vinserti64x4 $1, %ymm6, %zmm7, %zmm6
2681 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm4, %eax
2682 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2683 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2684 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2685 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2686 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2687 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2688 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2689 ; X64-AVX512VLDQ-NEXT: vpsrld $16, %xmm4, %xmm4
2690 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm4, %eax
2691 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2692 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm4
2693 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm4, %xmm4
2694 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm4, %xmm4
2695 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm4, %xmm4
2696 ; X64-AVX512VLDQ-NEXT: vmovd %xmm4, %eax
2697 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm4
2698 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3]
2699 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm3, %eax
2700 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2701 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm7
2702 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm7, %xmm7
2703 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm7, %xmm7
2704 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm7, %xmm7
2705 ; X64-AVX512VLDQ-NEXT: vmovd %xmm7, %eax
2706 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm7
2707 ; X64-AVX512VLDQ-NEXT: vpsrld $16, %xmm3, %xmm3
2708 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm3, %eax
2709 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2710 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm3
2711 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm3, %xmm3
2712 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm3, %xmm3
2713 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm3, %xmm3
2714 ; X64-AVX512VLDQ-NEXT: vmovd %xmm3, %eax
2715 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm3
2716 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3]
2717 ; X64-AVX512VLDQ-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
2718 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm2, %eax
2719 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2720 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm4
2721 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm4, %xmm4
2722 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm4, %xmm4
2723 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm4, %xmm4
2724 ; X64-AVX512VLDQ-NEXT: vmovd %xmm4, %eax
2725 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm4
2726 ; X64-AVX512VLDQ-NEXT: vpsrld $16, %xmm2, %xmm2
2727 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm2, %eax
2728 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2729 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm2
2730 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm2, %xmm2
2731 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm2, %xmm2
2732 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm2, %xmm2
2733 ; X64-AVX512VLDQ-NEXT: vmovd %xmm2, %eax
2734 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm2
2735 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
2736 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm1, %eax
2737 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2738 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm4
2739 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm4, %xmm4
2740 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm4, %xmm4
2741 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm4, %xmm4
2742 ; X64-AVX512VLDQ-NEXT: vmovd %xmm4, %eax
2743 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm4
2744 ; X64-AVX512VLDQ-NEXT: vpsrld $16, %xmm1, %xmm1
2745 ; X64-AVX512VLDQ-NEXT: vpextrw $0, %xmm1, %eax
2746 ; X64-AVX512VLDQ-NEXT: movzwl %ax, %eax
2747 ; X64-AVX512VLDQ-NEXT: vmovd %eax, %xmm1
2748 ; X64-AVX512VLDQ-NEXT: vcvtph2ps %xmm1, %xmm1
2749 ; X64-AVX512VLDQ-NEXT: vpand %xmm0, %xmm1, %xmm0
2750 ; X64-AVX512VLDQ-NEXT: vcvtps2ph $4, %xmm0, %xmm0
2751 ; X64-AVX512VLDQ-NEXT: vmovd %xmm0, %eax
2752 ; X64-AVX512VLDQ-NEXT: vpinsrw $0, %eax, %xmm0, %xmm0
2753 ; X64-AVX512VLDQ-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
2754 ; X64-AVX512VLDQ-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
2755 ; X64-AVX512VLDQ-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
2756 ; X64-AVX512VLDQ-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm6[0],zmm0[1],zmm6[1],zmm0[4],zmm6[4],zmm0[5],zmm6[5],zmm0[8],zmm6[8],zmm0[9],zmm6[9],zmm0[12],zmm6[12],zmm0[13],zmm6[13]
2757 ; X64-AVX512VLDQ-NEXT: vpunpcklqdq {{.*#+}} zmm0 = zmm0[0],zmm5[0],zmm0[2],zmm5[2],zmm0[4],zmm5[4],zmm0[6],zmm5[6]
2758 ; X64-AVX512VLDQ-NEXT: retq
2759 %v = load <32 x half>, ptr %p, align 64
2760 %nnv = call <32 x half> @llvm.fabs.v32f16(<32 x half> %v)
2761 ret <32 x half> %nnv
2763 declare <32 x half> @llvm.fabs.v32f16(<32 x half> %p)
2765 ; PR20354: when generating code for a vector fabs op,
2766 ; make sure that we're only turning off the sign bit of each float value.
2767 ; No constant pool loads or vector ops are needed for the fabs of a
2768 ; bitcasted integer constant; we should just return an integer constant
2769 ; that has the sign bits turned off.
2771 ; So instead of something like this:
2772 ; movabsq (constant pool load of mask for sign bits)
2773 ; vmovq (move from integer register to vector/fp register)
2774 ; vandps (mask off sign bits)
2775 ; vmovq (move vector/fp register back to integer return register)
2777 ; We should generate:
2778 ; mov (put constant value in return register)
2780 define i64 @fabs_v2f32_1() {
2781 ; X86-LABEL: fabs_v2f32_1:
2783 ; X86-NEXT: xorl %eax, %eax
2784 ; X86-NEXT: movl $2147483647, %edx # imm = 0x7FFFFFFF
2787 ; X64-LABEL: fabs_v2f32_1:
2789 ; X64-NEXT: movabsq $9223372032559808512, %rax # imm = 0x7FFFFFFF00000000
2791 %bitcast = bitcast i64 18446744069414584320 to <2 x float> ; 0xFFFF_FFFF_0000_0000
2792 %fabs = call <2 x float> @llvm.fabs.v2f32(<2 x float> %bitcast)
2793 %ret = bitcast <2 x float> %fabs to i64
2797 define i64 @fabs_v2f32_2() {
2798 ; X86-LABEL: fabs_v2f32_2:
2800 ; X86-NEXT: movl $2147483647, %eax # imm = 0x7FFFFFFF
2801 ; X86-NEXT: xorl %edx, %edx
2804 ; X64-LABEL: fabs_v2f32_2:
2806 ; X64-NEXT: movl $2147483647, %eax # imm = 0x7FFFFFFF
2808 %bitcast = bitcast i64 4294967295 to <2 x float> ; 0x0000_0000_FFFF_FFFF
2809 %fabs = call <2 x float> @llvm.fabs.v2f32(<2 x float> %bitcast)
2810 %ret = bitcast <2 x float> %fabs to i64
2814 declare <2 x float> @llvm.fabs.v2f32(<2 x float> %p)
2816 ; PR70947 - remove duplicate xmm/ymm constant loads
2817 define void @PR70947(ptr %src, ptr %dst) {
2818 ; X86-LABEL: PR70947:
2820 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
2821 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
2822 ; X86-NEXT: vbroadcastsd {{.*#+}} ymm0 = [NaN,NaN,NaN,NaN]
2823 ; X86-NEXT: vandps (%ecx), %ymm0, %ymm1
2824 ; X86-NEXT: vandps 32(%ecx), %xmm0, %xmm0
2825 ; X86-NEXT: vmovups %ymm1, (%eax)
2826 ; X86-NEXT: vmovups %xmm0, 16(%eax)
2827 ; X86-NEXT: vzeroupper
2830 ; X64-LABEL: PR70947:
2832 ; X64-NEXT: vbroadcastsd {{.*#+}} ymm0 = [NaN,NaN,NaN,NaN]
2833 ; X64-NEXT: vandps (%rdi), %ymm0, %ymm1
2834 ; X64-NEXT: vandps 32(%rdi), %xmm0, %xmm0
2835 ; X64-NEXT: vmovups %ymm1, (%rsi)
2836 ; X64-NEXT: vmovups %xmm0, 16(%rsi)
2837 ; X64-NEXT: vzeroupper
2839 %src4 = getelementptr inbounds double, ptr %src, i64 4
2840 %dst4 = getelementptr inbounds i32, ptr %dst, i64 4
2841 %ld0 = load <4 x double>, ptr %src, align 8
2842 %ld4 = load <2 x double>, ptr %src4, align 8
2843 %fabs0 = tail call <4 x double> @llvm.fabs.v4f64(<4 x double> %ld0)
2844 %fabs4 = tail call <2 x double> @llvm.fabs.v2f64(<2 x double> %ld4)
2845 store <4 x double> %fabs0, ptr %dst, align 4
2846 store <2 x double> %fabs4, ptr %dst4, align 4