1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=avx512fp16,avx512vl -O3 | FileCheck %s --check-prefixes=CHECK,X86
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx512fp16,avx512vl -O3 | FileCheck %s --check-prefixes=CHECK,X64
5 declare <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i1(<16 x i1>, metadata, metadata)
6 declare <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i1(<16 x i1>, metadata, metadata)
7 declare <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i8(<16 x i8>, metadata, metadata)
8 declare <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i8(<16 x i8>, metadata, metadata)
9 declare <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i16(<16 x i16>, metadata, metadata)
10 declare <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i16(<16 x i16>, metadata, metadata)
11 declare <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i32(<8 x i32>, metadata, metadata)
12 declare <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i32(<8 x i32>, metadata, metadata)
13 declare <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i64(<4 x i64>, metadata, metadata)
14 declare <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i64(<4 x i64>, metadata, metadata)
15 declare <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i64(<8 x i64>, metadata, metadata)
16 declare <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i64(<8 x i64>, metadata, metadata)
18 define <16 x half> @sitofp_v16i1_v16f16(<16 x i1> %x) #0 {
19 ; CHECK-LABEL: sitofp_v16i1_v16f16:
21 ; CHECK-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
22 ; CHECK-NEXT: vpsllw $15, %ymm0, %ymm0
23 ; CHECK-NEXT: vpsraw $15, %ymm0, %ymm0
24 ; CHECK-NEXT: vcvtw2ph %ymm0, %ymm0
25 ; CHECK-NEXT: ret{{[l|q]}}
26 %result = call <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i1(<16 x i1> %x,
27 metadata !"round.dynamic",
28 metadata !"fpexcept.strict") #0
29 ret <16 x half> %result
32 define <16 x half> @uitofp_v16i1_v16f16(<16 x i1> %x) #0 {
33 ; X86-LABEL: uitofp_v16i1_v16f16:
35 ; X86-NEXT: vpandd {{\.?LCPI[0-9]+_[0-9]+}}{1to4}, %xmm0, %xmm0
36 ; X86-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
37 ; X86-NEXT: vcvtw2ph %ymm0, %ymm0
40 ; X64-LABEL: uitofp_v16i1_v16f16:
42 ; X64-NEXT: vpandd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %xmm0
43 ; X64-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
44 ; X64-NEXT: vcvtw2ph %ymm0, %ymm0
46 %result = call <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i1(<16 x i1> %x,
47 metadata !"round.dynamic",
48 metadata !"fpexcept.strict") #0
49 ret <16 x half> %result
52 define <16 x half> @sitofp_v16i8_v16f16(<16 x i8> %x) #0 {
53 ; CHECK-LABEL: sitofp_v16i8_v16f16:
55 ; CHECK-NEXT: vpmovsxbw %xmm0, %ymm0
56 ; CHECK-NEXT: vcvtw2ph %ymm0, %ymm0
57 ; CHECK-NEXT: ret{{[l|q]}}
58 %result = call <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i8(<16 x i8> %x,
59 metadata !"round.dynamic",
60 metadata !"fpexcept.strict") #0
61 ret <16 x half> %result
64 define <16 x half> @uitofp_v16i8_v16f16(<16 x i8> %x) #0 {
65 ; CHECK-LABEL: uitofp_v16i8_v16f16:
67 ; CHECK-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
68 ; CHECK-NEXT: vcvtw2ph %ymm0, %ymm0
69 ; CHECK-NEXT: ret{{[l|q]}}
70 %result = call <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i8(<16 x i8> %x,
71 metadata !"round.dynamic",
72 metadata !"fpexcept.strict") #0
73 ret <16 x half> %result
76 define <16 x half> @sitofp_v16i16_v16f16(<16 x i16> %x) #0 {
77 ; CHECK-LABEL: sitofp_v16i16_v16f16:
79 ; CHECK-NEXT: vcvtw2ph %ymm0, %ymm0
80 ; CHECK-NEXT: ret{{[l|q]}}
81 %result = call <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i16(<16 x i16> %x,
82 metadata !"round.dynamic",
83 metadata !"fpexcept.strict") #0
84 ret <16 x half> %result
87 define <16 x half> @uitofp_v16i16_v16f16(<16 x i16> %x) #0 {
88 ; CHECK-LABEL: uitofp_v16i16_v16f16:
90 ; CHECK-NEXT: vcvtuw2ph %ymm0, %ymm0
91 ; CHECK-NEXT: ret{{[l|q]}}
92 %result = call <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i16(<16 x i16> %x,
93 metadata !"round.dynamic",
94 metadata !"fpexcept.strict") #0
95 ret <16 x half> %result
98 define <8 x half> @sitofp_v8i32_v8f16(<8 x i32> %x) #0 {
99 ; CHECK-LABEL: sitofp_v8i32_v8f16:
101 ; CHECK-NEXT: vcvtdq2ph %ymm0, %xmm0
102 ; CHECK-NEXT: vzeroupper
103 ; CHECK-NEXT: ret{{[l|q]}}
104 %result = call <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i32(<8 x i32> %x,
105 metadata !"round.dynamic",
106 metadata !"fpexcept.strict") #0
107 ret <8 x half> %result
110 define <8 x half> @uitofp_v8i32_v8f16(<8 x i32> %x) #0 {
111 ; CHECK-LABEL: uitofp_v8i32_v8f16:
113 ; CHECK-NEXT: vcvtudq2ph %ymm0, %xmm0
114 ; CHECK-NEXT: vzeroupper
115 ; CHECK-NEXT: ret{{[l|q]}}
116 %result = call <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i32(<8 x i32> %x,
117 metadata !"round.dynamic",
118 metadata !"fpexcept.strict") #0
119 ret <8 x half> %result
122 define <4 x half> @sitofp_v4i64_v4f16(<4 x i64> %x) #0 {
123 ; CHECK-LABEL: sitofp_v4i64_v4f16:
125 ; CHECK-NEXT: vcvtqq2ph %ymm0, %xmm0
126 ; CHECK-NEXT: vzeroupper
127 ; CHECK-NEXT: ret{{[l|q]}}
128 %result = call <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i64(<4 x i64> %x,
129 metadata !"round.dynamic",
130 metadata !"fpexcept.strict") #0
131 ret <4 x half> %result
134 define <4 x half> @uitofp_v4i64_v4f16(<4 x i64> %x) #0 {
135 ; CHECK-LABEL: uitofp_v4i64_v4f16:
137 ; CHECK-NEXT: vcvtuqq2ph %ymm0, %xmm0
138 ; CHECK-NEXT: vzeroupper
139 ; CHECK-NEXT: ret{{[l|q]}}
140 %result = call <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i64(<4 x i64> %x,
141 metadata !"round.dynamic",
142 metadata !"fpexcept.strict") #0
143 ret <4 x half> %result
146 define <8 x half> @sitofp_v8i64_v8f16(<8 x i64> %x) #1 {
147 ; CHECK-LABEL: sitofp_v8i64_v8f16:
149 ; CHECK-NEXT: vcvtqq2ph %ymm1, %xmm1
150 ; CHECK-NEXT: vcvtqq2ph %ymm0, %xmm0
151 ; CHECK-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
152 ; CHECK-NEXT: vzeroupper
153 ; CHECK-NEXT: ret{{[l|q]}}
154 %result = call <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i64(<8 x i64> %x,
155 metadata !"round.dynamic",
156 metadata !"fpexcept.strict") #0
157 ret <8 x half> %result
160 define <8 x half> @uitofp_v8i64_v8f16(<8 x i64> %x) #1 {
161 ; CHECK-LABEL: uitofp_v8i64_v8f16:
163 ; CHECK-NEXT: vcvtuqq2ph %ymm1, %xmm1
164 ; CHECK-NEXT: vcvtuqq2ph %ymm0, %xmm0
165 ; CHECK-NEXT: vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
166 ; CHECK-NEXT: vzeroupper
167 ; CHECK-NEXT: ret{{[l|q]}}
168 %result = call <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i64(<8 x i64> %x,
169 metadata !"round.dynamic",
170 metadata !"fpexcept.strict") #0
171 ret <8 x half> %result
174 attributes #0 = { strictfp }
175 attributes #1 = { strictfp "min-legal-vector-width"="256" "prefer-vector-width"="256" }