1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=X32,X32-SLOW
3 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx2,+fast-variable-shuffle | FileCheck %s --check-prefixes=X32,X32-FAST
4 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=X64,X64-SLOW
5 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2,+fast-variable-shuffle | FileCheck %s --check-prefixes=X64,X64-FAST
7 define <4 x i32> @trunc4(<4 x i64> %A) nounwind {
8 ; X32-SLOW-LABEL: trunc4:
10 ; X32-SLOW-NEXT: vextractf128 $1, %ymm0, %xmm1
11 ; X32-SLOW-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
12 ; X32-SLOW-NEXT: vzeroupper
15 ; X32-FAST-LABEL: trunc4:
17 ; X32-FAST-NEXT: vmovaps {{.*#+}} ymm1 = [0,2,4,6,4,6,6,7]
18 ; X32-FAST-NEXT: vpermps %ymm0, %ymm1, %ymm0
19 ; X32-FAST-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
20 ; X32-FAST-NEXT: vzeroupper
23 ; X64-SLOW-LABEL: trunc4:
25 ; X64-SLOW-NEXT: vextractf128 $1, %ymm0, %xmm1
26 ; X64-SLOW-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
27 ; X64-SLOW-NEXT: vzeroupper
30 ; X64-FAST-LABEL: trunc4:
32 ; X64-FAST-NEXT: vmovaps {{.*#+}} ymm1 = [0,2,4,6,4,6,6,7]
33 ; X64-FAST-NEXT: vpermps %ymm0, %ymm1, %ymm0
34 ; X64-FAST-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
35 ; X64-FAST-NEXT: vzeroupper
37 %B = trunc <4 x i64> %A to <4 x i32>
41 define <8 x i16> @trunc8(<8 x i32> %A) nounwind {
44 ; X32-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15,16,17,20,21,24,25,28,29,24,25,28,29,28,29,30,31]
45 ; X32-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
46 ; X32-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
47 ; X32-NEXT: vzeroupper
52 ; X64-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,1,4,5,8,9,12,13,8,9,12,13,12,13,14,15,16,17,20,21,24,25,28,29,24,25,28,29,28,29,30,31]
53 ; X64-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,2,3]
54 ; X64-NEXT: # kill: def $xmm0 killed $xmm0 killed $ymm0
55 ; X64-NEXT: vzeroupper
57 %B = trunc <8 x i32> %A to <8 x i16>
61 define <4 x i64> @sext4(<4 x i32> %A) nounwind {
64 ; X32-NEXT: vpmovsxdq %xmm0, %ymm0
69 ; X64-NEXT: vpmovsxdq %xmm0, %ymm0
71 %B = sext <4 x i32> %A to <4 x i64>
75 define <8 x i32> @sext8(<8 x i16> %A) nounwind {
78 ; X32-NEXT: vpmovsxwd %xmm0, %ymm0
83 ; X64-NEXT: vpmovsxwd %xmm0, %ymm0
85 %B = sext <8 x i16> %A to <8 x i32>
89 define <4 x i64> @zext4(<4 x i32> %A) nounwind {
92 ; X32-NEXT: vpmovzxdq {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
97 ; X64-NEXT: vpmovzxdq {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
99 %B = zext <4 x i32> %A to <4 x i64>
103 define <8 x i32> @zext8(<8 x i16> %A) nounwind {
106 ; X32-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
111 ; X64-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
113 %B = zext <8 x i16> %A to <8 x i32>
117 define <8 x i32> @zext_8i8_8i32(<8 x i8> %A) nounwind {
118 ; X32-LABEL: zext_8i8_8i32:
120 ; X32-NEXT: vpmovzxbd {{.*#+}} ymm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero
123 ; X64-LABEL: zext_8i8_8i32:
125 ; X64-NEXT: vpmovzxbd {{.*#+}} ymm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero
127 %B = zext <8 x i8> %A to <8 x i32>
131 define <16 x i16> @zext_16i8_16i16(<16 x i8> %z) {
132 ; X32-LABEL: zext_16i8_16i16:
134 ; X32-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
137 ; X64-LABEL: zext_16i8_16i16:
139 ; X64-NEXT: vpmovzxbw {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero,xmm0[8],zero,xmm0[9],zero,xmm0[10],zero,xmm0[11],zero,xmm0[12],zero,xmm0[13],zero,xmm0[14],zero,xmm0[15],zero
141 %t = zext <16 x i8> %z to <16 x i16>
145 define <16 x i16> @sext_16i8_16i16(<16 x i8> %z) {
146 ; X32-LABEL: sext_16i8_16i16:
148 ; X32-NEXT: vpmovsxbw %xmm0, %ymm0
151 ; X64-LABEL: sext_16i8_16i16:
153 ; X64-NEXT: vpmovsxbw %xmm0, %ymm0
155 %t = sext <16 x i8> %z to <16 x i16>
159 define <16 x i8> @trunc_16i16_16i8(<16 x i16> %z) {
160 ; X32-LABEL: trunc_16i16_16i8:
162 ; X32-NEXT: vpand {{\.LCPI.*}}, %ymm0, %ymm0
163 ; X32-NEXT: vextracti128 $1, %ymm0, %xmm1
164 ; X32-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
165 ; X32-NEXT: vzeroupper
168 ; X64-LABEL: trunc_16i16_16i8:
170 ; X64-NEXT: vpand {{.*}}(%rip), %ymm0, %ymm0
171 ; X64-NEXT: vextracti128 $1, %ymm0, %xmm1
172 ; X64-NEXT: vpackuswb %xmm1, %xmm0, %xmm0
173 ; X64-NEXT: vzeroupper
175 %t = trunc <16 x i16> %z to <16 x i8>
179 define <4 x i64> @load_sext_test1(<4 x i32> *%ptr) {
180 ; X32-LABEL: load_sext_test1:
182 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
183 ; X32-NEXT: vpmovsxdq (%eax), %ymm0
186 ; X64-LABEL: load_sext_test1:
188 ; X64-NEXT: vpmovsxdq (%rdi), %ymm0
190 %X = load <4 x i32>, <4 x i32>* %ptr
191 %Y = sext <4 x i32> %X to <4 x i64>
195 define <4 x i64> @load_sext_test2(<4 x i8> *%ptr) {
196 ; X32-LABEL: load_sext_test2:
198 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
199 ; X32-NEXT: vpmovsxbq (%eax), %ymm0
202 ; X64-LABEL: load_sext_test2:
204 ; X64-NEXT: vpmovsxbq (%rdi), %ymm0
206 %X = load <4 x i8>, <4 x i8>* %ptr
207 %Y = sext <4 x i8> %X to <4 x i64>
211 define <4 x i64> @load_sext_test3(<4 x i16> *%ptr) {
212 ; X32-LABEL: load_sext_test3:
214 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
215 ; X32-NEXT: vpmovsxwq (%eax), %ymm0
218 ; X64-LABEL: load_sext_test3:
220 ; X64-NEXT: vpmovsxwq (%rdi), %ymm0
222 %X = load <4 x i16>, <4 x i16>* %ptr
223 %Y = sext <4 x i16> %X to <4 x i64>
227 define <8 x i32> @load_sext_test4(<8 x i16> *%ptr) {
228 ; X32-LABEL: load_sext_test4:
230 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
231 ; X32-NEXT: vpmovsxwd (%eax), %ymm0
234 ; X64-LABEL: load_sext_test4:
236 ; X64-NEXT: vpmovsxwd (%rdi), %ymm0
238 %X = load <8 x i16>, <8 x i16>* %ptr
239 %Y = sext <8 x i16> %X to <8 x i32>
243 define <8 x i32> @load_sext_test5(<8 x i8> *%ptr) {
244 ; X32-LABEL: load_sext_test5:
246 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
247 ; X32-NEXT: vpmovsxbd (%eax), %ymm0
250 ; X64-LABEL: load_sext_test5:
252 ; X64-NEXT: vpmovsxbd (%rdi), %ymm0
254 %X = load <8 x i8>, <8 x i8>* %ptr
255 %Y = sext <8 x i8> %X to <8 x i32>