1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse2 | FileCheck %s --check-prefixes=SSE2-SSSE3,SSE2
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+ssse3 | FileCheck %s --check-prefixes=SSE2-SSSE3,SSSE3
4 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefixes=AVX12,AVX1
5 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=AVX12,AVX2
6 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f,+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX512
8 define <2 x i1> @bitcast_i2_2i1(i2 zeroext %a0) {
9 ; SSE2-SSSE3-LABEL: bitcast_i2_2i1:
10 ; SSE2-SSSE3: # %bb.0:
11 ; SSE2-SSSE3-NEXT: movd %edi, %xmm0
12 ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm0[0,1,0,1]
13 ; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [1,2]
14 ; SSE2-SSSE3-NEXT: pand %xmm0, %xmm1
15 ; SSE2-SSSE3-NEXT: pcmpeqd %xmm0, %xmm1
16 ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,0,3,2]
17 ; SSE2-SSSE3-NEXT: pand %xmm1, %xmm0
18 ; SSE2-SSSE3-NEXT: psrlq $63, %xmm0
19 ; SSE2-SSSE3-NEXT: retq
21 ; AVX1-LABEL: bitcast_i2_2i1:
23 ; AVX1-NEXT: vmovd %edi, %xmm0
24 ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
25 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [1,2]
26 ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
27 ; AVX1-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm0
28 ; AVX1-NEXT: vpsrlq $63, %xmm0, %xmm0
31 ; AVX2-LABEL: bitcast_i2_2i1:
33 ; AVX2-NEXT: vmovd %edi, %xmm0
34 ; AVX2-NEXT: vpbroadcastq %xmm0, %xmm0
35 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [1,2]
36 ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
37 ; AVX2-NEXT: vpcmpeqq %xmm1, %xmm0, %xmm0
38 ; AVX2-NEXT: vpsrlq $63, %xmm0, %xmm0
41 ; AVX512-LABEL: bitcast_i2_2i1:
43 ; AVX512-NEXT: kmovd %edi, %k1
44 ; AVX512-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
45 ; AVX512-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z}
47 %1 = bitcast i2 %a0 to <2 x i1>
51 define <4 x i1> @bitcast_i4_4i1(i4 zeroext %a0) {
52 ; SSE2-SSSE3-LABEL: bitcast_i4_4i1:
53 ; SSE2-SSSE3: # %bb.0:
54 ; SSE2-SSSE3-NEXT: movd %edi, %xmm0
55 ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
56 ; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [1,2,4,8]
57 ; SSE2-SSSE3-NEXT: pand %xmm1, %xmm0
58 ; SSE2-SSSE3-NEXT: pcmpeqd %xmm1, %xmm0
59 ; SSE2-SSSE3-NEXT: psrld $31, %xmm0
60 ; SSE2-SSSE3-NEXT: retq
62 ; AVX1-LABEL: bitcast_i4_4i1:
64 ; AVX1-NEXT: vmovd %edi, %xmm0
65 ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
66 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [1,2,4,8]
67 ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
68 ; AVX1-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
69 ; AVX1-NEXT: vpsrld $31, %xmm0, %xmm0
72 ; AVX2-LABEL: bitcast_i4_4i1:
74 ; AVX2-NEXT: vmovd %edi, %xmm0
75 ; AVX2-NEXT: vpbroadcastd %xmm0, %xmm0
76 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [1,2,4,8]
77 ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
78 ; AVX2-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
79 ; AVX2-NEXT: vpsrld $31, %xmm0, %xmm0
82 ; AVX512-LABEL: bitcast_i4_4i1:
84 ; AVX512-NEXT: kmovd %edi, %k1
85 ; AVX512-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
86 ; AVX512-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
88 %1 = bitcast i4 %a0 to <4 x i1>
92 define <8 x i1> @bitcast_i8_8i1(i8 zeroext %a0) {
93 ; SSE2-SSSE3-LABEL: bitcast_i8_8i1:
94 ; SSE2-SSSE3: # %bb.0:
95 ; SSE2-SSSE3-NEXT: movd %edi, %xmm0
96 ; SSE2-SSSE3-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,0,2,3,4,5,6,7]
97 ; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
98 ; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [1,2,4,8,16,32,64,128]
99 ; SSE2-SSSE3-NEXT: pand %xmm1, %xmm0
100 ; SSE2-SSSE3-NEXT: pcmpeqw %xmm1, %xmm0
101 ; SSE2-SSSE3-NEXT: psrlw $15, %xmm0
102 ; SSE2-SSSE3-NEXT: retq
104 ; AVX1-LABEL: bitcast_i8_8i1:
106 ; AVX1-NEXT: vmovd %edi, %xmm0
107 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,0,2,3,4,5,6,7]
108 ; AVX1-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
109 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [1,2,4,8,16,32,64,128]
110 ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
111 ; AVX1-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0
112 ; AVX1-NEXT: vpsrlw $15, %xmm0, %xmm0
115 ; AVX2-LABEL: bitcast_i8_8i1:
117 ; AVX2-NEXT: vmovd %edi, %xmm0
118 ; AVX2-NEXT: vpbroadcastw %xmm0, %xmm0
119 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [1,2,4,8,16,32,64,128]
120 ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
121 ; AVX2-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0
122 ; AVX2-NEXT: vpsrlw $15, %xmm0, %xmm0
125 ; AVX512-LABEL: bitcast_i8_8i1:
127 ; AVX512-NEXT: kmovd %edi, %k0
128 ; AVX512-NEXT: vpmovm2w %k0, %xmm0
130 %1 = bitcast i8 %a0 to <8 x i1>
134 define <16 x i1> @bitcast_i16_16i1(i16 zeroext %a0) {
135 ; SSE2-LABEL: bitcast_i16_16i1:
137 ; SSE2-NEXT: movd %edi, %xmm0
138 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
139 ; SSE2-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,0,1,1,4,5,6,7]
140 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,1,1]
141 ; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [1,2,4,8,16,32,64,128,1,2,4,8,16,32,64,128]
142 ; SSE2-NEXT: pand %xmm1, %xmm0
143 ; SSE2-NEXT: pcmpeqb %xmm1, %xmm0
144 ; SSE2-NEXT: psrlw $7, %xmm0
145 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm0
148 ; SSSE3-LABEL: bitcast_i16_16i1:
150 ; SSSE3-NEXT: movd %edi, %xmm0
151 ; SSSE3-NEXT: pshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
152 ; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [1,2,4,8,16,32,64,128,1,2,4,8,16,32,64,128]
153 ; SSSE3-NEXT: pand %xmm1, %xmm0
154 ; SSSE3-NEXT: pcmpeqb %xmm1, %xmm0
155 ; SSSE3-NEXT: psrlw $7, %xmm0
156 ; SSSE3-NEXT: pand {{.*}}(%rip), %xmm0
159 ; AVX1-LABEL: bitcast_i16_16i1:
161 ; AVX1-NEXT: vmovd %edi, %xmm0
162 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
163 ; AVX1-NEXT: vmovddup {{.*#+}} xmm1 = [-1.7939930131212661E-307,-1.7939930131212661E-307]
164 ; AVX1-NEXT: # xmm1 = mem[0,0]
165 ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0
166 ; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
167 ; AVX1-NEXT: vpsrlw $7, %xmm0, %xmm0
168 ; AVX1-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
171 ; AVX2-LABEL: bitcast_i16_16i1:
173 ; AVX2-NEXT: vmovd %edi, %xmm0
174 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
175 ; AVX2-NEXT: vpbroadcastq {{.*#+}} xmm1 = [9241421688590303745,9241421688590303745]
176 ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
177 ; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
178 ; AVX2-NEXT: vpsrlw $7, %xmm0, %xmm0
179 ; AVX2-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
182 ; AVX512-LABEL: bitcast_i16_16i1:
184 ; AVX512-NEXT: kmovd %edi, %k0
185 ; AVX512-NEXT: vpmovm2b %k0, %xmm0
187 %1 = bitcast i16 %a0 to <16 x i1>
191 define <32 x i1> @bitcast_i32_32i1(i32 %a0) {
192 ; SSE2-SSSE3-LABEL: bitcast_i32_32i1:
193 ; SSE2-SSSE3: # %bb.0:
194 ; SSE2-SSSE3-NEXT: movq %rdi, %rax
195 ; SSE2-SSSE3-NEXT: movl %esi, (%rdi)
196 ; SSE2-SSSE3-NEXT: retq
198 ; AVX1-LABEL: bitcast_i32_32i1:
200 ; AVX1-NEXT: vmovd %edi, %xmm0
201 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
202 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,1,1,4,5,6,7]
203 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[2,2,3,3,4,5,6,7]
204 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
205 ; AVX1-NEXT: vpermilps {{.*#+}} ymm0 = ymm0[0,0,1,1,4,4,5,5]
206 ; AVX1-NEXT: vandps {{.*}}(%rip), %ymm0, %ymm0
207 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1
208 ; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2
209 ; AVX1-NEXT: vpcmpeqb %xmm2, %xmm1, %xmm1
210 ; AVX1-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3
211 ; AVX1-NEXT: vpxor %xmm3, %xmm1, %xmm1
212 ; AVX1-NEXT: vpsrlw $7, %xmm1, %xmm1
213 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
214 ; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm1
215 ; AVX1-NEXT: vpcmpeqb %xmm2, %xmm0, %xmm0
216 ; AVX1-NEXT: vpxor %xmm3, %xmm0, %xmm0
217 ; AVX1-NEXT: vpsrlw $7, %xmm0, %xmm0
218 ; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm0
219 ; AVX1-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
222 ; AVX2-LABEL: bitcast_i32_32i1:
224 ; AVX2-NEXT: vmovd %edi, %xmm0
225 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
226 ; AVX2-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,18,18,18,18,18,18,18,18,19,19,19,19,19,19,19,19]
227 ; AVX2-NEXT: vpbroadcastq {{.*#+}} ymm1 = [9241421688590303745,9241421688590303745,9241421688590303745,9241421688590303745]
228 ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0
229 ; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0
230 ; AVX2-NEXT: vpsrlw $7, %ymm0, %ymm0
231 ; AVX2-NEXT: vpand {{.*}}(%rip), %ymm0, %ymm0
234 ; AVX512-LABEL: bitcast_i32_32i1:
236 ; AVX512-NEXT: kmovd %edi, %k0
237 ; AVX512-NEXT: vpmovm2b %k0, %ymm0
239 %1 = bitcast i32 %a0 to <32 x i1>
243 define <64 x i1> @bitcast_i64_64i1(i64 %a0) {
244 ; SSE2-SSSE3-LABEL: bitcast_i64_64i1:
245 ; SSE2-SSSE3: # %bb.0:
246 ; SSE2-SSSE3-NEXT: movq %rdi, %rax
247 ; SSE2-SSSE3-NEXT: movq %rsi, (%rdi)
248 ; SSE2-SSSE3-NEXT: retq
250 ; AVX12-LABEL: bitcast_i64_64i1:
252 ; AVX12-NEXT: movq %rdi, %rax
253 ; AVX12-NEXT: movq %rsi, (%rdi)
256 ; AVX512-LABEL: bitcast_i64_64i1:
258 ; AVX512-NEXT: kmovq %rdi, %k0
259 ; AVX512-NEXT: vpmovm2b %k0, %zmm0
261 %1 = bitcast i64 %a0 to <64 x i1>