1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i686-unknown -mattr=+sse2 | FileCheck %s --check-prefix=X86-SSE
3 ; RUN: llc < %s -mtriple=i686-unknown -mattr=+avx | FileCheck %s --check-prefix=X86-AVX
4 ; RUN: llc < %s -mtriple=i686-unknown -mattr=+avx512f,+avx512vl | FileCheck %s --check-prefix=X86-AVX512F
5 ; RUN: llc < %s -mtriple=i686-unknown -mattr=+avx512f,+avx512dq,+avx512vl | FileCheck %s --check-prefix=X86-AVX512DQ
6 ; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+sse2 | FileCheck %s --check-prefix=X64-SSE
7 ; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+avx | FileCheck %s --check-prefix=X64-AVX
8 ; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+avx512f,+avx512vl | FileCheck %s --check-prefix=X64-AVX512F
9 ; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+avx512f,+avx512dq,+avx512vl | FileCheck %s --check-prefix=X64-AVX512DQ
13 define <2 x double> @mask_sitofp_2i64_2f64(<2 x i64> %a) nounwind {
14 ; X86-SSE-LABEL: mask_sitofp_2i64_2f64:
16 ; X86-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
17 ; X86-SSE-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
18 ; X86-SSE-NEXT: cvtdq2pd %xmm0, %xmm0
21 ; X86-AVX-LABEL: mask_sitofp_2i64_2f64:
23 ; X86-AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
24 ; X86-AVX-NEXT: vcvtdq2pd %xmm0, %xmm0
27 ; X86-AVX512F-LABEL: mask_sitofp_2i64_2f64:
28 ; X86-AVX512F: # %bb.0:
29 ; X86-AVX512F-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
30 ; X86-AVX512F-NEXT: vcvtdq2pd %xmm0, %xmm0
31 ; X86-AVX512F-NEXT: retl
33 ; X86-AVX512DQ-LABEL: mask_sitofp_2i64_2f64:
34 ; X86-AVX512DQ: # %bb.0:
35 ; X86-AVX512DQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
36 ; X86-AVX512DQ-NEXT: vcvtqq2pd %xmm0, %xmm0
37 ; X86-AVX512DQ-NEXT: retl
39 ; X64-SSE-LABEL: mask_sitofp_2i64_2f64:
41 ; X64-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
42 ; X64-SSE-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
43 ; X64-SSE-NEXT: cvtdq2pd %xmm0, %xmm0
46 ; X64-AVX-LABEL: mask_sitofp_2i64_2f64:
48 ; X64-AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
49 ; X64-AVX-NEXT: vcvtdq2pd %xmm0, %xmm0
52 ; X64-AVX512F-LABEL: mask_sitofp_2i64_2f64:
53 ; X64-AVX512F: # %bb.0:
54 ; X64-AVX512F-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
55 ; X64-AVX512F-NEXT: vcvtdq2pd %xmm0, %xmm0
56 ; X64-AVX512F-NEXT: retq
58 ; X64-AVX512DQ-LABEL: mask_sitofp_2i64_2f64:
59 ; X64-AVX512DQ: # %bb.0:
60 ; X64-AVX512DQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
61 ; X64-AVX512DQ-NEXT: vcvtqq2pd %xmm0, %xmm0
62 ; X64-AVX512DQ-NEXT: retq
63 %and = and <2 x i64> %a, <i64 255, i64 65535>
64 %cvt = sitofp <2 x i64> %and to <2 x double>
68 define <2 x double> @mask_uitofp_2i64_2f64(<2 x i64> %a) nounwind {
69 ; X86-SSE-LABEL: mask_uitofp_2i64_2f64:
71 ; X86-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
72 ; X86-SSE-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
73 ; X86-SSE-NEXT: cvtdq2pd %xmm0, %xmm0
76 ; X86-AVX-LABEL: mask_uitofp_2i64_2f64:
78 ; X86-AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
79 ; X86-AVX-NEXT: vcvtdq2pd %xmm0, %xmm0
82 ; X86-AVX512F-LABEL: mask_uitofp_2i64_2f64:
83 ; X86-AVX512F: # %bb.0:
84 ; X86-AVX512F-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
85 ; X86-AVX512F-NEXT: vcvtdq2pd %xmm0, %xmm0
86 ; X86-AVX512F-NEXT: retl
88 ; X86-AVX512DQ-LABEL: mask_uitofp_2i64_2f64:
89 ; X86-AVX512DQ: # %bb.0:
90 ; X86-AVX512DQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
91 ; X86-AVX512DQ-NEXT: vcvtqq2pd %xmm0, %xmm0
92 ; X86-AVX512DQ-NEXT: retl
94 ; X64-SSE-LABEL: mask_uitofp_2i64_2f64:
96 ; X64-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
97 ; X64-SSE-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
98 ; X64-SSE-NEXT: cvtdq2pd %xmm0, %xmm0
101 ; X64-AVX-LABEL: mask_uitofp_2i64_2f64:
103 ; X64-AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
104 ; X64-AVX-NEXT: vcvtdq2pd %xmm0, %xmm0
107 ; X64-AVX512F-LABEL: mask_uitofp_2i64_2f64:
108 ; X64-AVX512F: # %bb.0:
109 ; X64-AVX512F-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
110 ; X64-AVX512F-NEXT: vcvtdq2pd %xmm0, %xmm0
111 ; X64-AVX512F-NEXT: retq
113 ; X64-AVX512DQ-LABEL: mask_uitofp_2i64_2f64:
114 ; X64-AVX512DQ: # %bb.0:
115 ; X64-AVX512DQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
116 ; X64-AVX512DQ-NEXT: vcvtqq2pd %xmm0, %xmm0
117 ; X64-AVX512DQ-NEXT: retq
118 %and = and <2 x i64> %a, <i64 255, i64 65535>
119 %cvt = uitofp <2 x i64> %and to <2 x double>
120 ret <2 x double> %cvt
123 define <4 x float> @mask_sitofp_4i64_4f32(<4 x i64> %a) nounwind {
124 ; X86-SSE-LABEL: mask_sitofp_4i64_4f32:
126 ; X86-SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
127 ; X86-SSE-NEXT: andps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
128 ; X86-SSE-NEXT: cvtdq2ps %xmm0, %xmm0
131 ; X86-AVX-LABEL: mask_sitofp_4i64_4f32:
133 ; X86-AVX-NEXT: vextractf128 $1, %ymm0, %xmm1
134 ; X86-AVX-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
135 ; X86-AVX-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
136 ; X86-AVX-NEXT: vcvtdq2ps %xmm0, %xmm0
137 ; X86-AVX-NEXT: vzeroupper
140 ; X86-AVX512F-LABEL: mask_sitofp_4i64_4f32:
141 ; X86-AVX512F: # %bb.0:
142 ; X86-AVX512F-NEXT: vpmovqd %ymm0, %xmm0
143 ; X86-AVX512F-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
144 ; X86-AVX512F-NEXT: vcvtdq2ps %xmm0, %xmm0
145 ; X86-AVX512F-NEXT: vzeroupper
146 ; X86-AVX512F-NEXT: retl
148 ; X86-AVX512DQ-LABEL: mask_sitofp_4i64_4f32:
149 ; X86-AVX512DQ: # %bb.0:
150 ; X86-AVX512DQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0
151 ; X86-AVX512DQ-NEXT: vcvtqq2ps %ymm0, %xmm0
152 ; X86-AVX512DQ-NEXT: vzeroupper
153 ; X86-AVX512DQ-NEXT: retl
155 ; X64-SSE-LABEL: mask_sitofp_4i64_4f32:
157 ; X64-SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
158 ; X64-SSE-NEXT: andps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
159 ; X64-SSE-NEXT: cvtdq2ps %xmm0, %xmm0
162 ; X64-AVX-LABEL: mask_sitofp_4i64_4f32:
164 ; X64-AVX-NEXT: vextractf128 $1, %ymm0, %xmm1
165 ; X64-AVX-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
166 ; X64-AVX-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
167 ; X64-AVX-NEXT: vcvtdq2ps %xmm0, %xmm0
168 ; X64-AVX-NEXT: vzeroupper
171 ; X64-AVX512F-LABEL: mask_sitofp_4i64_4f32:
172 ; X64-AVX512F: # %bb.0:
173 ; X64-AVX512F-NEXT: vpmovqd %ymm0, %xmm0
174 ; X64-AVX512F-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
175 ; X64-AVX512F-NEXT: vcvtdq2ps %xmm0, %xmm0
176 ; X64-AVX512F-NEXT: vzeroupper
177 ; X64-AVX512F-NEXT: retq
179 ; X64-AVX512DQ-LABEL: mask_sitofp_4i64_4f32:
180 ; X64-AVX512DQ: # %bb.0:
181 ; X64-AVX512DQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
182 ; X64-AVX512DQ-NEXT: vcvtqq2ps %ymm0, %xmm0
183 ; X64-AVX512DQ-NEXT: vzeroupper
184 ; X64-AVX512DQ-NEXT: retq
185 %and = and <4 x i64> %a, <i64 127, i64 255, i64 4095, i64 65535>
186 %cvt = sitofp <4 x i64> %and to <4 x float>
190 define <4 x float> @mask_uitofp_4i64_4f32(<4 x i64> %a) nounwind {
191 ; X86-SSE-LABEL: mask_uitofp_4i64_4f32:
193 ; X86-SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
194 ; X86-SSE-NEXT: andps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
195 ; X86-SSE-NEXT: cvtdq2ps %xmm0, %xmm0
198 ; X86-AVX-LABEL: mask_uitofp_4i64_4f32:
200 ; X86-AVX-NEXT: vextractf128 $1, %ymm0, %xmm1
201 ; X86-AVX-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
202 ; X86-AVX-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
203 ; X86-AVX-NEXT: vcvtdq2ps %xmm0, %xmm0
204 ; X86-AVX-NEXT: vzeroupper
207 ; X86-AVX512F-LABEL: mask_uitofp_4i64_4f32:
208 ; X86-AVX512F: # %bb.0:
209 ; X86-AVX512F-NEXT: vpmovqd %ymm0, %xmm0
210 ; X86-AVX512F-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
211 ; X86-AVX512F-NEXT: vcvtdq2ps %xmm0, %xmm0
212 ; X86-AVX512F-NEXT: vzeroupper
213 ; X86-AVX512F-NEXT: retl
215 ; X86-AVX512DQ-LABEL: mask_uitofp_4i64_4f32:
216 ; X86-AVX512DQ: # %bb.0:
217 ; X86-AVX512DQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0
218 ; X86-AVX512DQ-NEXT: vcvtqq2ps %ymm0, %xmm0
219 ; X86-AVX512DQ-NEXT: vzeroupper
220 ; X86-AVX512DQ-NEXT: retl
222 ; X64-SSE-LABEL: mask_uitofp_4i64_4f32:
224 ; X64-SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
225 ; X64-SSE-NEXT: andps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
226 ; X64-SSE-NEXT: cvtdq2ps %xmm0, %xmm0
229 ; X64-AVX-LABEL: mask_uitofp_4i64_4f32:
231 ; X64-AVX-NEXT: vextractf128 $1, %ymm0, %xmm1
232 ; X64-AVX-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
233 ; X64-AVX-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
234 ; X64-AVX-NEXT: vcvtdq2ps %xmm0, %xmm0
235 ; X64-AVX-NEXT: vzeroupper
238 ; X64-AVX512F-LABEL: mask_uitofp_4i64_4f32:
239 ; X64-AVX512F: # %bb.0:
240 ; X64-AVX512F-NEXT: vpmovqd %ymm0, %xmm0
241 ; X64-AVX512F-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
242 ; X64-AVX512F-NEXT: vcvtdq2ps %xmm0, %xmm0
243 ; X64-AVX512F-NEXT: vzeroupper
244 ; X64-AVX512F-NEXT: retq
246 ; X64-AVX512DQ-LABEL: mask_uitofp_4i64_4f32:
247 ; X64-AVX512DQ: # %bb.0:
248 ; X64-AVX512DQ-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
249 ; X64-AVX512DQ-NEXT: vcvtqq2ps %ymm0, %xmm0
250 ; X64-AVX512DQ-NEXT: vzeroupper
251 ; X64-AVX512DQ-NEXT: retq
252 %and = and <4 x i64> %a, <i64 127, i64 255, i64 4095, i64 65535>
253 %cvt = uitofp <4 x i64> %and to <4 x float>
257 define <2 x double> @clamp_sitofp_2i64_2f64(<2 x i64> %a) nounwind {
258 ; X86-SSE-LABEL: clamp_sitofp_2i64_2f64:
260 ; X86-SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
261 ; X86-SSE-NEXT: movdqa %xmm0, %xmm1
262 ; X86-SSE-NEXT: pxor %xmm2, %xmm1
263 ; X86-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm1[1,1,3,3]
264 ; X86-SSE-NEXT: pcmpeqd %xmm4, %xmm4
265 ; X86-SSE-NEXT: pcmpeqd %xmm3, %xmm4
266 ; X86-SSE-NEXT: pcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1
267 ; X86-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm1[0,0,2,2]
268 ; X86-SSE-NEXT: pand %xmm4, %xmm3
269 ; X86-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
270 ; X86-SSE-NEXT: por %xmm3, %xmm1
271 ; X86-SSE-NEXT: pand %xmm1, %xmm0
272 ; X86-SSE-NEXT: pandn {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1
273 ; X86-SSE-NEXT: por %xmm0, %xmm1
274 ; X86-SSE-NEXT: pxor %xmm1, %xmm2
275 ; X86-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
276 ; X86-SSE-NEXT: pxor %xmm3, %xmm3
277 ; X86-SSE-NEXT: pcmpeqd %xmm0, %xmm3
278 ; X86-SSE-NEXT: movdqa {{.*#+}} xmm0 = [2147483903,0,2147483903,0]
279 ; X86-SSE-NEXT: pcmpgtd %xmm2, %xmm0
280 ; X86-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm0[0,0,2,2]
281 ; X86-SSE-NEXT: pand %xmm3, %xmm2
282 ; X86-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
283 ; X86-SSE-NEXT: por %xmm2, %xmm0
284 ; X86-SSE-NEXT: pand %xmm0, %xmm1
285 ; X86-SSE-NEXT: pandn {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
286 ; X86-SSE-NEXT: por %xmm1, %xmm0
287 ; X86-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
288 ; X86-SSE-NEXT: cvtdq2pd %xmm0, %xmm0
291 ; X86-AVX-LABEL: clamp_sitofp_2i64_2f64:
293 ; X86-AVX-NEXT: vmovddup {{.*#+}} xmm1 = [4294967041,4294967295,4294967041,4294967295]
294 ; X86-AVX-NEXT: # xmm1 = mem[0,0]
295 ; X86-AVX-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm2
296 ; X86-AVX-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
297 ; X86-AVX-NEXT: vmovddup {{.*#+}} xmm1 = [255,0,255,0]
298 ; X86-AVX-NEXT: # xmm1 = mem[0,0]
299 ; X86-AVX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm2
300 ; X86-AVX-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
301 ; X86-AVX-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2,2,3]
302 ; X86-AVX-NEXT: vcvtdq2pd %xmm0, %xmm0
305 ; X86-AVX512F-LABEL: clamp_sitofp_2i64_2f64:
306 ; X86-AVX512F: # %bb.0:
307 ; X86-AVX512F-NEXT: vpmaxsq {{\.?LCPI[0-9]+_[0-9]+}}{1to2}, %xmm0, %xmm0
308 ; X86-AVX512F-NEXT: vpminsq {{\.?LCPI[0-9]+_[0-9]+}}{1to2}, %xmm0, %xmm0
309 ; X86-AVX512F-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
310 ; X86-AVX512F-NEXT: vcvtdq2pd %xmm0, %xmm0
311 ; X86-AVX512F-NEXT: retl
313 ; X86-AVX512DQ-LABEL: clamp_sitofp_2i64_2f64:
314 ; X86-AVX512DQ: # %bb.0:
315 ; X86-AVX512DQ-NEXT: vpmaxsq {{\.?LCPI[0-9]+_[0-9]+}}{1to2}, %xmm0, %xmm0
316 ; X86-AVX512DQ-NEXT: vpminsq {{\.?LCPI[0-9]+_[0-9]+}}{1to2}, %xmm0, %xmm0
317 ; X86-AVX512DQ-NEXT: vcvtqq2pd %xmm0, %xmm0
318 ; X86-AVX512DQ-NEXT: retl
320 ; X64-SSE-LABEL: clamp_sitofp_2i64_2f64:
322 ; X64-SSE-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,2147483648]
323 ; X64-SSE-NEXT: movdqa %xmm0, %xmm2
324 ; X64-SSE-NEXT: pxor %xmm1, %xmm2
325 ; X64-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,3,3]
326 ; X64-SSE-NEXT: pcmpeqd %xmm4, %xmm4
327 ; X64-SSE-NEXT: pcmpeqd %xmm3, %xmm4
328 ; X64-SSE-NEXT: pcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2
329 ; X64-SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2]
330 ; X64-SSE-NEXT: pand %xmm4, %xmm3
331 ; X64-SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
332 ; X64-SSE-NEXT: por %xmm3, %xmm2
333 ; X64-SSE-NEXT: pand %xmm2, %xmm0
334 ; X64-SSE-NEXT: pandn {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2
335 ; X64-SSE-NEXT: por %xmm0, %xmm2
336 ; X64-SSE-NEXT: pxor %xmm2, %xmm1
337 ; X64-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3]
338 ; X64-SSE-NEXT: pxor %xmm3, %xmm3
339 ; X64-SSE-NEXT: pcmpeqd %xmm0, %xmm3
340 ; X64-SSE-NEXT: movdqa {{.*#+}} xmm0 = [2147483903,2147483903]
341 ; X64-SSE-NEXT: pcmpgtd %xmm1, %xmm0
342 ; X64-SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm0[0,0,2,2]
343 ; X64-SSE-NEXT: pand %xmm3, %xmm1
344 ; X64-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3]
345 ; X64-SSE-NEXT: por %xmm1, %xmm0
346 ; X64-SSE-NEXT: pand %xmm0, %xmm2
347 ; X64-SSE-NEXT: pandn {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
348 ; X64-SSE-NEXT: por %xmm2, %xmm0
349 ; X64-SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
350 ; X64-SSE-NEXT: cvtdq2pd %xmm0, %xmm0
353 ; X64-AVX-LABEL: clamp_sitofp_2i64_2f64:
355 ; X64-AVX-NEXT: vpmovsxwq {{.*#+}} xmm1 = [18446744073709551361,18446744073709551361]
356 ; X64-AVX-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm2
357 ; X64-AVX-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
358 ; X64-AVX-NEXT: vpmovzxbq {{.*#+}} xmm1 = [255,255]
359 ; X64-AVX-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm2
360 ; X64-AVX-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
361 ; X64-AVX-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2,2,3]
362 ; X64-AVX-NEXT: vcvtdq2pd %xmm0, %xmm0
365 ; X64-AVX512F-LABEL: clamp_sitofp_2i64_2f64:
366 ; X64-AVX512F: # %bb.0:
367 ; X64-AVX512F-NEXT: vpmaxsq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %xmm0
368 ; X64-AVX512F-NEXT: vpminsq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %xmm0
369 ; X64-AVX512F-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
370 ; X64-AVX512F-NEXT: vcvtdq2pd %xmm0, %xmm0
371 ; X64-AVX512F-NEXT: retq
373 ; X64-AVX512DQ-LABEL: clamp_sitofp_2i64_2f64:
374 ; X64-AVX512DQ: # %bb.0:
375 ; X64-AVX512DQ-NEXT: vpmaxsq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %xmm0
376 ; X64-AVX512DQ-NEXT: vpminsq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %xmm0
377 ; X64-AVX512DQ-NEXT: vcvtqq2pd %xmm0, %xmm0
378 ; X64-AVX512DQ-NEXT: retq
379 %clo = icmp slt <2 x i64> %a, <i64 -255, i64 -255>
380 %lo = select <2 x i1> %clo, <2 x i64> <i64 -255, i64 -255>, <2 x i64> %a
381 %chi = icmp sgt <2 x i64> %lo, <i64 255, i64 255>
382 %hi = select <2 x i1> %chi, <2 x i64> <i64 255, i64 255>, <2 x i64> %lo
383 %cvt = sitofp <2 x i64> %hi to <2 x double>
384 ret <2 x double> %cvt