1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=i686-apple-darwin -mattr=+sse2 | FileCheck %s --check-prefix=X86 --check-prefix=X86-SSE --check-prefix=X86-SSE2
3 ; RUN: llc < %s -mtriple=i686-apple-darwin -mattr=+sse4.2 | FileCheck %s --check-prefix=X86 --check-prefix=X86-SSE --check-prefix=X86-SSE42
4 ; RUN: llc < %s -mtriple=i686-apple-darwin -mattr=+avx | FileCheck %s --check-prefix=X86 --check-prefix=X86-AVX --check-prefix=X86-AVX1
5 ; RUN: llc < %s -mtriple=i686-apple-darwin -mattr=+avx2 | FileCheck %s --check-prefix=X86 --check-prefix=X86-AVX --check-prefix=X86-AVX2
6 ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+sse2 | FileCheck %s --check-prefix=X64 --check-prefix=X64-SSE --check-prefix=X64-SSE2
7 ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+sse4.2 | FileCheck %s --check-prefix=X64 --check-prefix=X64-SSE --check-prefix=X64-SSE42
8 ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+avx | FileCheck %s --check-prefix=X64 --check-prefix=X64-AVX --check-prefix=X64-AVX1
9 ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+avx2 | FileCheck %s --check-prefix=X64 --check-prefix=X64-AVX --check-prefix=X64-AVX2
10 ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mattr=+avx512f,+avx512bw,+avx512dq,+avx512vl | FileCheck %s --check-prefix=X64 --check-prefix=X64-AVX --check-prefix=X64-AVX512
16 define i64 @test_reduce_v2i64(<2 x i64> %a0) {
17 ; X86-SSE2-LABEL: test_reduce_v2i64:
19 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
20 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
21 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm3
22 ; X86-SSE2-NEXT: pxor %xmm2, %xmm3
23 ; X86-SSE2-NEXT: pxor %xmm1, %xmm2
24 ; X86-SSE2-NEXT: movdqa %xmm3, %xmm4
25 ; X86-SSE2-NEXT: pcmpgtd %xmm2, %xmm4
26 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2]
27 ; X86-SSE2-NEXT: pcmpeqd %xmm3, %xmm2
28 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
29 ; X86-SSE2-NEXT: pand %xmm5, %xmm2
30 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3]
31 ; X86-SSE2-NEXT: por %xmm2, %xmm3
32 ; X86-SSE2-NEXT: pand %xmm3, %xmm0
33 ; X86-SSE2-NEXT: pandn %xmm1, %xmm3
34 ; X86-SSE2-NEXT: por %xmm0, %xmm3
35 ; X86-SSE2-NEXT: movd %xmm3, %eax
36 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,2,3]
37 ; X86-SSE2-NEXT: movd %xmm0, %edx
40 ; X86-SSE42-LABEL: test_reduce_v2i64:
41 ; X86-SSE42: ## %bb.0:
42 ; X86-SSE42-NEXT: movdqa %xmm0, %xmm1
43 ; X86-SSE42-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,3,0,1]
44 ; X86-SSE42-NEXT: movdqa {{.*#+}} xmm3 = [0,2147483648,0,2147483648]
45 ; X86-SSE42-NEXT: pxor %xmm3, %xmm0
46 ; X86-SSE42-NEXT: pxor %xmm2, %xmm3
47 ; X86-SSE42-NEXT: pcmpgtq %xmm3, %xmm0
48 ; X86-SSE42-NEXT: blendvpd %xmm0, %xmm1, %xmm2
49 ; X86-SSE42-NEXT: movd %xmm2, %eax
50 ; X86-SSE42-NEXT: pextrd $1, %xmm2, %edx
51 ; X86-SSE42-NEXT: retl
53 ; X86-AVX1-LABEL: test_reduce_v2i64:
55 ; X86-AVX1-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1]
56 ; X86-AVX1-NEXT: vmovddup {{.*#+}} xmm2 = [-0.0E+0,-0.0E+0]
57 ; X86-AVX1-NEXT: ## xmm2 = mem[0,0]
58 ; X86-AVX1-NEXT: vxorps %xmm2, %xmm0, %xmm3
59 ; X86-AVX1-NEXT: vxorps %xmm2, %xmm1, %xmm2
60 ; X86-AVX1-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
61 ; X86-AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
62 ; X86-AVX1-NEXT: vmovd %xmm0, %eax
63 ; X86-AVX1-NEXT: vpextrd $1, %xmm0, %edx
66 ; X86-AVX2-LABEL: test_reduce_v2i64:
68 ; X86-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
69 ; X86-AVX2-NEXT: vpbroadcastq {{.*#+}} xmm2 = [-0.0E+0,-0.0E+0]
70 ; X86-AVX2-NEXT: vpxor %xmm2, %xmm0, %xmm3
71 ; X86-AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm2
72 ; X86-AVX2-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
73 ; X86-AVX2-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
74 ; X86-AVX2-NEXT: vmovd %xmm0, %eax
75 ; X86-AVX2-NEXT: vpextrd $1, %xmm0, %edx
78 ; X64-SSE2-LABEL: test_reduce_v2i64:
80 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
81 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
82 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm3
83 ; X64-SSE2-NEXT: pxor %xmm2, %xmm3
84 ; X64-SSE2-NEXT: pxor %xmm1, %xmm2
85 ; X64-SSE2-NEXT: movdqa %xmm3, %xmm4
86 ; X64-SSE2-NEXT: pcmpgtd %xmm2, %xmm4
87 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2]
88 ; X64-SSE2-NEXT: pcmpeqd %xmm3, %xmm2
89 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
90 ; X64-SSE2-NEXT: pand %xmm5, %xmm2
91 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3]
92 ; X64-SSE2-NEXT: por %xmm2, %xmm3
93 ; X64-SSE2-NEXT: pand %xmm3, %xmm0
94 ; X64-SSE2-NEXT: pandn %xmm1, %xmm3
95 ; X64-SSE2-NEXT: por %xmm0, %xmm3
96 ; X64-SSE2-NEXT: movq %xmm3, %rax
99 ; X64-SSE42-LABEL: test_reduce_v2i64:
100 ; X64-SSE42: ## %bb.0:
101 ; X64-SSE42-NEXT: movdqa %xmm0, %xmm1
102 ; X64-SSE42-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,3,0,1]
103 ; X64-SSE42-NEXT: movdqa {{.*#+}} xmm3 = [9223372036854775808,9223372036854775808]
104 ; X64-SSE42-NEXT: pxor %xmm3, %xmm0
105 ; X64-SSE42-NEXT: pxor %xmm2, %xmm3
106 ; X64-SSE42-NEXT: pcmpgtq %xmm3, %xmm0
107 ; X64-SSE42-NEXT: blendvpd %xmm0, %xmm1, %xmm2
108 ; X64-SSE42-NEXT: movq %xmm2, %rax
109 ; X64-SSE42-NEXT: retq
111 ; X64-AVX1-LABEL: test_reduce_v2i64:
112 ; X64-AVX1: ## %bb.0:
113 ; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
114 ; X64-AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808]
115 ; X64-AVX1-NEXT: vpxor %xmm2, %xmm0, %xmm3
116 ; X64-AVX1-NEXT: vpxor %xmm2, %xmm1, %xmm2
117 ; X64-AVX1-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
118 ; X64-AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
119 ; X64-AVX1-NEXT: vmovq %xmm0, %rax
120 ; X64-AVX1-NEXT: retq
122 ; X64-AVX2-LABEL: test_reduce_v2i64:
123 ; X64-AVX2: ## %bb.0:
124 ; X64-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
125 ; X64-AVX2-NEXT: vmovdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808]
126 ; X64-AVX2-NEXT: vpxor %xmm2, %xmm0, %xmm3
127 ; X64-AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm2
128 ; X64-AVX2-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
129 ; X64-AVX2-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
130 ; X64-AVX2-NEXT: vmovq %xmm0, %rax
131 ; X64-AVX2-NEXT: retq
133 ; X64-AVX512-LABEL: test_reduce_v2i64:
134 ; X64-AVX512: ## %bb.0:
135 ; X64-AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
136 ; X64-AVX512-NEXT: vpmaxuq %xmm1, %xmm0, %xmm0
137 ; X64-AVX512-NEXT: vmovq %xmm0, %rax
138 ; X64-AVX512-NEXT: retq
139 %1 = shufflevector <2 x i64> %a0, <2 x i64> undef, <2 x i32> <i32 1, i32 undef>
140 %2 = icmp ugt <2 x i64> %a0, %1
141 %3 = select <2 x i1> %2, <2 x i64> %a0, <2 x i64> %1
142 %4 = extractelement <2 x i64> %3, i32 0
146 define i32 @test_reduce_v4i32(<4 x i32> %a0) {
147 ; X86-SSE2-LABEL: test_reduce_v4i32:
148 ; X86-SSE2: ## %bb.0:
149 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
150 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
151 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm3
152 ; X86-SSE2-NEXT: pxor %xmm2, %xmm3
153 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm4
154 ; X86-SSE2-NEXT: pxor %xmm2, %xmm4
155 ; X86-SSE2-NEXT: pcmpgtd %xmm4, %xmm3
156 ; X86-SSE2-NEXT: pand %xmm3, %xmm0
157 ; X86-SSE2-NEXT: pandn %xmm1, %xmm3
158 ; X86-SSE2-NEXT: por %xmm0, %xmm3
159 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,2,3]
160 ; X86-SSE2-NEXT: movdqa %xmm3, %xmm1
161 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
162 ; X86-SSE2-NEXT: pxor %xmm0, %xmm2
163 ; X86-SSE2-NEXT: pcmpgtd %xmm2, %xmm1
164 ; X86-SSE2-NEXT: pand %xmm1, %xmm3
165 ; X86-SSE2-NEXT: pandn %xmm0, %xmm1
166 ; X86-SSE2-NEXT: por %xmm3, %xmm1
167 ; X86-SSE2-NEXT: movd %xmm1, %eax
168 ; X86-SSE2-NEXT: retl
170 ; X86-SSE42-LABEL: test_reduce_v4i32:
171 ; X86-SSE42: ## %bb.0:
172 ; X86-SSE42-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
173 ; X86-SSE42-NEXT: pmaxud %xmm0, %xmm1
174 ; X86-SSE42-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
175 ; X86-SSE42-NEXT: pmaxud %xmm1, %xmm0
176 ; X86-SSE42-NEXT: movd %xmm0, %eax
177 ; X86-SSE42-NEXT: retl
179 ; X86-AVX-LABEL: test_reduce_v4i32:
181 ; X86-AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
182 ; X86-AVX-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
183 ; X86-AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
184 ; X86-AVX-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
185 ; X86-AVX-NEXT: vmovd %xmm0, %eax
188 ; X64-SSE2-LABEL: test_reduce_v4i32:
189 ; X64-SSE2: ## %bb.0:
190 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
191 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
192 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm3
193 ; X64-SSE2-NEXT: pxor %xmm2, %xmm3
194 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm4
195 ; X64-SSE2-NEXT: pxor %xmm2, %xmm4
196 ; X64-SSE2-NEXT: pcmpgtd %xmm4, %xmm3
197 ; X64-SSE2-NEXT: pand %xmm3, %xmm0
198 ; X64-SSE2-NEXT: pandn %xmm1, %xmm3
199 ; X64-SSE2-NEXT: por %xmm0, %xmm3
200 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,2,3]
201 ; X64-SSE2-NEXT: movdqa %xmm3, %xmm1
202 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
203 ; X64-SSE2-NEXT: pxor %xmm0, %xmm2
204 ; X64-SSE2-NEXT: pcmpgtd %xmm2, %xmm1
205 ; X64-SSE2-NEXT: pand %xmm1, %xmm3
206 ; X64-SSE2-NEXT: pandn %xmm0, %xmm1
207 ; X64-SSE2-NEXT: por %xmm3, %xmm1
208 ; X64-SSE2-NEXT: movd %xmm1, %eax
209 ; X64-SSE2-NEXT: retq
211 ; X64-SSE42-LABEL: test_reduce_v4i32:
212 ; X64-SSE42: ## %bb.0:
213 ; X64-SSE42-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
214 ; X64-SSE42-NEXT: pmaxud %xmm0, %xmm1
215 ; X64-SSE42-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
216 ; X64-SSE42-NEXT: pmaxud %xmm1, %xmm0
217 ; X64-SSE42-NEXT: movd %xmm0, %eax
218 ; X64-SSE42-NEXT: retq
220 ; X64-AVX-LABEL: test_reduce_v4i32:
222 ; X64-AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
223 ; X64-AVX-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
224 ; X64-AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
225 ; X64-AVX-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
226 ; X64-AVX-NEXT: vmovd %xmm0, %eax
228 %1 = shufflevector <4 x i32> %a0, <4 x i32> undef, <4 x i32> <i32 2, i32 3, i32 undef, i32 undef>
229 %2 = icmp ugt <4 x i32> %a0, %1
230 %3 = select <4 x i1> %2, <4 x i32> %a0, <4 x i32> %1
231 %4 = shufflevector <4 x i32> %3, <4 x i32> undef, <4 x i32> <i32 1, i32 undef, i32 undef, i32 undef>
232 %5 = icmp ugt <4 x i32> %3, %4
233 %6 = select <4 x i1> %5, <4 x i32> %3, <4 x i32> %4
234 %7 = extractelement <4 x i32> %6, i32 0
238 define i16 @test_reduce_v8i16(<8 x i16> %a0) {
239 ; X86-SSE2-LABEL: test_reduce_v8i16:
240 ; X86-SSE2: ## %bb.0:
241 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
242 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [32768,32768,32768,32768,32768,32768,32768,32768]
243 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
244 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
245 ; X86-SSE2-NEXT: pmaxsw %xmm0, %xmm1
246 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm0
247 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
248 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
249 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
250 ; X86-SSE2-NEXT: pmaxsw %xmm1, %xmm0
251 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
252 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
253 ; X86-SSE2-NEXT: psrld $16, %xmm1
254 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
255 ; X86-SSE2-NEXT: pmaxsw %xmm0, %xmm1
256 ; X86-SSE2-NEXT: movd %xmm1, %eax
257 ; X86-SSE2-NEXT: xorl $32768, %eax ## imm = 0x8000
258 ; X86-SSE2-NEXT: ## kill: def $ax killed $ax killed $eax
259 ; X86-SSE2-NEXT: retl
261 ; X86-SSE42-LABEL: test_reduce_v8i16:
262 ; X86-SSE42: ## %bb.0:
263 ; X86-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
264 ; X86-SSE42-NEXT: pxor %xmm0, %xmm1
265 ; X86-SSE42-NEXT: phminposuw %xmm1, %xmm0
266 ; X86-SSE42-NEXT: movd %xmm0, %eax
267 ; X86-SSE42-NEXT: notl %eax
268 ; X86-SSE42-NEXT: ## kill: def $ax killed $ax killed $eax
269 ; X86-SSE42-NEXT: retl
271 ; X86-AVX-LABEL: test_reduce_v8i16:
273 ; X86-AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
274 ; X86-AVX-NEXT: vpxor %xmm1, %xmm0, %xmm0
275 ; X86-AVX-NEXT: vphminposuw %xmm0, %xmm0
276 ; X86-AVX-NEXT: vmovd %xmm0, %eax
277 ; X86-AVX-NEXT: notl %eax
278 ; X86-AVX-NEXT: ## kill: def $ax killed $ax killed $eax
281 ; X64-SSE2-LABEL: test_reduce_v8i16:
282 ; X64-SSE2: ## %bb.0:
283 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
284 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [32768,32768,32768,32768,32768,32768,32768,32768]
285 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
286 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
287 ; X64-SSE2-NEXT: pmaxsw %xmm0, %xmm1
288 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm0
289 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
290 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
291 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
292 ; X64-SSE2-NEXT: pmaxsw %xmm1, %xmm0
293 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
294 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
295 ; X64-SSE2-NEXT: psrld $16, %xmm1
296 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
297 ; X64-SSE2-NEXT: pmaxsw %xmm0, %xmm1
298 ; X64-SSE2-NEXT: movd %xmm1, %eax
299 ; X64-SSE2-NEXT: xorl $32768, %eax ## imm = 0x8000
300 ; X64-SSE2-NEXT: ## kill: def $ax killed $ax killed $eax
301 ; X64-SSE2-NEXT: retq
303 ; X64-SSE42-LABEL: test_reduce_v8i16:
304 ; X64-SSE42: ## %bb.0:
305 ; X64-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
306 ; X64-SSE42-NEXT: pxor %xmm0, %xmm1
307 ; X64-SSE42-NEXT: phminposuw %xmm1, %xmm0
308 ; X64-SSE42-NEXT: movd %xmm0, %eax
309 ; X64-SSE42-NEXT: notl %eax
310 ; X64-SSE42-NEXT: ## kill: def $ax killed $ax killed $eax
311 ; X64-SSE42-NEXT: retq
313 ; X64-AVX1-LABEL: test_reduce_v8i16:
314 ; X64-AVX1: ## %bb.0:
315 ; X64-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
316 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
317 ; X64-AVX1-NEXT: vphminposuw %xmm0, %xmm0
318 ; X64-AVX1-NEXT: vmovd %xmm0, %eax
319 ; X64-AVX1-NEXT: notl %eax
320 ; X64-AVX1-NEXT: ## kill: def $ax killed $ax killed $eax
321 ; X64-AVX1-NEXT: retq
323 ; X64-AVX2-LABEL: test_reduce_v8i16:
324 ; X64-AVX2: ## %bb.0:
325 ; X64-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
326 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
327 ; X64-AVX2-NEXT: vphminposuw %xmm0, %xmm0
328 ; X64-AVX2-NEXT: vmovd %xmm0, %eax
329 ; X64-AVX2-NEXT: notl %eax
330 ; X64-AVX2-NEXT: ## kill: def $ax killed $ax killed $eax
331 ; X64-AVX2-NEXT: retq
333 ; X64-AVX512-LABEL: test_reduce_v8i16:
334 ; X64-AVX512: ## %bb.0:
335 ; X64-AVX512-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
336 ; X64-AVX512-NEXT: vphminposuw %xmm0, %xmm0
337 ; X64-AVX512-NEXT: vmovd %xmm0, %eax
338 ; X64-AVX512-NEXT: notl %eax
339 ; X64-AVX512-NEXT: ## kill: def $ax killed $ax killed $eax
340 ; X64-AVX512-NEXT: retq
341 %1 = shufflevector <8 x i16> %a0, <8 x i16> undef, <8 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef>
342 %2 = icmp ugt <8 x i16> %a0, %1
343 %3 = select <8 x i1> %2, <8 x i16> %a0, <8 x i16> %1
344 %4 = shufflevector <8 x i16> %3, <8 x i16> undef, <8 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
345 %5 = icmp ugt <8 x i16> %3, %4
346 %6 = select <8 x i1> %5, <8 x i16> %3, <8 x i16> %4
347 %7 = shufflevector <8 x i16> %6, <8 x i16> undef, <8 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
348 %8 = icmp ugt <8 x i16> %6, %7
349 %9 = select <8 x i1> %8, <8 x i16> %6, <8 x i16> %7
350 %10 = extractelement <8 x i16> %9, i32 0
354 define i8 @test_reduce_v16i8(<16 x i8> %a0) {
355 ; X86-SSE2-LABEL: test_reduce_v16i8:
356 ; X86-SSE2: ## %bb.0:
357 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
358 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
359 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
360 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
361 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
362 ; X86-SSE2-NEXT: psrld $16, %xmm1
363 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
364 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm0
365 ; X86-SSE2-NEXT: psrlw $8, %xmm0
366 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
367 ; X86-SSE2-NEXT: movd %xmm0, %eax
368 ; X86-SSE2-NEXT: ## kill: def $al killed $al killed $eax
369 ; X86-SSE2-NEXT: retl
371 ; X86-SSE42-LABEL: test_reduce_v16i8:
372 ; X86-SSE42: ## %bb.0:
373 ; X86-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
374 ; X86-SSE42-NEXT: pxor %xmm0, %xmm1
375 ; X86-SSE42-NEXT: movdqa %xmm1, %xmm0
376 ; X86-SSE42-NEXT: psrlw $8, %xmm0
377 ; X86-SSE42-NEXT: pminub %xmm1, %xmm0
378 ; X86-SSE42-NEXT: phminposuw %xmm0, %xmm0
379 ; X86-SSE42-NEXT: pextrb $0, %xmm0, %eax
380 ; X86-SSE42-NEXT: notb %al
381 ; X86-SSE42-NEXT: ## kill: def $al killed $al killed $eax
382 ; X86-SSE42-NEXT: retl
384 ; X86-AVX-LABEL: test_reduce_v16i8:
386 ; X86-AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
387 ; X86-AVX-NEXT: vpxor %xmm1, %xmm0, %xmm0
388 ; X86-AVX-NEXT: vpsrlw $8, %xmm0, %xmm1
389 ; X86-AVX-NEXT: vpminub %xmm1, %xmm0, %xmm0
390 ; X86-AVX-NEXT: vphminposuw %xmm0, %xmm0
391 ; X86-AVX-NEXT: vpextrb $0, %xmm0, %eax
392 ; X86-AVX-NEXT: notb %al
393 ; X86-AVX-NEXT: ## kill: def $al killed $al killed $eax
396 ; X64-SSE2-LABEL: test_reduce_v16i8:
397 ; X64-SSE2: ## %bb.0:
398 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
399 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
400 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
401 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
402 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
403 ; X64-SSE2-NEXT: psrld $16, %xmm1
404 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
405 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm0
406 ; X64-SSE2-NEXT: psrlw $8, %xmm0
407 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
408 ; X64-SSE2-NEXT: movd %xmm0, %eax
409 ; X64-SSE2-NEXT: ## kill: def $al killed $al killed $eax
410 ; X64-SSE2-NEXT: retq
412 ; X64-SSE42-LABEL: test_reduce_v16i8:
413 ; X64-SSE42: ## %bb.0:
414 ; X64-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
415 ; X64-SSE42-NEXT: pxor %xmm0, %xmm1
416 ; X64-SSE42-NEXT: movdqa %xmm1, %xmm0
417 ; X64-SSE42-NEXT: psrlw $8, %xmm0
418 ; X64-SSE42-NEXT: pminub %xmm1, %xmm0
419 ; X64-SSE42-NEXT: phminposuw %xmm0, %xmm0
420 ; X64-SSE42-NEXT: pextrb $0, %xmm0, %eax
421 ; X64-SSE42-NEXT: notb %al
422 ; X64-SSE42-NEXT: ## kill: def $al killed $al killed $eax
423 ; X64-SSE42-NEXT: retq
425 ; X64-AVX1-LABEL: test_reduce_v16i8:
426 ; X64-AVX1: ## %bb.0:
427 ; X64-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
428 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
429 ; X64-AVX1-NEXT: vpsrlw $8, %xmm0, %xmm1
430 ; X64-AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm0
431 ; X64-AVX1-NEXT: vphminposuw %xmm0, %xmm0
432 ; X64-AVX1-NEXT: vpextrb $0, %xmm0, %eax
433 ; X64-AVX1-NEXT: notb %al
434 ; X64-AVX1-NEXT: ## kill: def $al killed $al killed $eax
435 ; X64-AVX1-NEXT: retq
437 ; X64-AVX2-LABEL: test_reduce_v16i8:
438 ; X64-AVX2: ## %bb.0:
439 ; X64-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
440 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
441 ; X64-AVX2-NEXT: vpsrlw $8, %xmm0, %xmm1
442 ; X64-AVX2-NEXT: vpminub %xmm1, %xmm0, %xmm0
443 ; X64-AVX2-NEXT: vphminposuw %xmm0, %xmm0
444 ; X64-AVX2-NEXT: vpextrb $0, %xmm0, %eax
445 ; X64-AVX2-NEXT: notb %al
446 ; X64-AVX2-NEXT: ## kill: def $al killed $al killed $eax
447 ; X64-AVX2-NEXT: retq
449 ; X64-AVX512-LABEL: test_reduce_v16i8:
450 ; X64-AVX512: ## %bb.0:
451 ; X64-AVX512-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
452 ; X64-AVX512-NEXT: vpsrlw $8, %xmm0, %xmm1
453 ; X64-AVX512-NEXT: vpminub %xmm1, %xmm0, %xmm0
454 ; X64-AVX512-NEXT: vphminposuw %xmm0, %xmm0
455 ; X64-AVX512-NEXT: vpextrb $0, %xmm0, %eax
456 ; X64-AVX512-NEXT: notb %al
457 ; X64-AVX512-NEXT: ## kill: def $al killed $al killed $eax
458 ; X64-AVX512-NEXT: retq
459 %1 = shufflevector <16 x i8> %a0, <16 x i8> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
460 %2 = icmp ugt <16 x i8> %a0, %1
461 %3 = select <16 x i1> %2, <16 x i8> %a0, <16 x i8> %1
462 %4 = shufflevector <16 x i8> %3, <16 x i8> undef, <16 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
463 %5 = icmp ugt <16 x i8> %3, %4
464 %6 = select <16 x i1> %5, <16 x i8> %3, <16 x i8> %4
465 %7 = shufflevector <16 x i8> %6, <16 x i8> undef, <16 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
466 %8 = icmp ugt <16 x i8> %6, %7
467 %9 = select <16 x i1> %8, <16 x i8> %6, <16 x i8> %7
468 %10 = shufflevector <16 x i8> %9, <16 x i8> undef, <16 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
469 %11 = icmp ugt <16 x i8> %9, %10
470 %12 = select <16 x i1> %11, <16 x i8> %9, <16 x i8> %10
471 %13 = extractelement <16 x i8> %12, i32 0
479 define i64 @test_reduce_v4i64(<4 x i64> %a0) {
480 ; X86-SSE2-LABEL: test_reduce_v4i64:
481 ; X86-SSE2: ## %bb.0:
482 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
483 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm3
484 ; X86-SSE2-NEXT: pxor %xmm2, %xmm3
485 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm4
486 ; X86-SSE2-NEXT: pxor %xmm2, %xmm4
487 ; X86-SSE2-NEXT: movdqa %xmm4, %xmm5
488 ; X86-SSE2-NEXT: pcmpgtd %xmm3, %xmm5
489 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
490 ; X86-SSE2-NEXT: pcmpeqd %xmm3, %xmm4
491 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3]
492 ; X86-SSE2-NEXT: pand %xmm6, %xmm3
493 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3]
494 ; X86-SSE2-NEXT: por %xmm3, %xmm4
495 ; X86-SSE2-NEXT: pand %xmm4, %xmm0
496 ; X86-SSE2-NEXT: pandn %xmm1, %xmm4
497 ; X86-SSE2-NEXT: por %xmm0, %xmm4
498 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm4[2,3,0,1]
499 ; X86-SSE2-NEXT: movdqa %xmm4, %xmm1
500 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
501 ; X86-SSE2-NEXT: pxor %xmm0, %xmm2
502 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm3
503 ; X86-SSE2-NEXT: pcmpgtd %xmm2, %xmm3
504 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm3[0,0,2,2]
505 ; X86-SSE2-NEXT: pcmpeqd %xmm1, %xmm2
506 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3]
507 ; X86-SSE2-NEXT: pand %xmm5, %xmm1
508 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,1,3,3]
509 ; X86-SSE2-NEXT: por %xmm1, %xmm2
510 ; X86-SSE2-NEXT: pand %xmm2, %xmm4
511 ; X86-SSE2-NEXT: pandn %xmm0, %xmm2
512 ; X86-SSE2-NEXT: por %xmm4, %xmm2
513 ; X86-SSE2-NEXT: movd %xmm2, %eax
514 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,2,3]
515 ; X86-SSE2-NEXT: movd %xmm0, %edx
516 ; X86-SSE2-NEXT: retl
518 ; X86-SSE42-LABEL: test_reduce_v4i64:
519 ; X86-SSE42: ## %bb.0:
520 ; X86-SSE42-NEXT: movdqa %xmm0, %xmm2
521 ; X86-SSE42-NEXT: movdqa {{.*#+}} xmm3 = [0,2147483648,0,2147483648]
522 ; X86-SSE42-NEXT: movdqa %xmm1, %xmm4
523 ; X86-SSE42-NEXT: pxor %xmm3, %xmm4
524 ; X86-SSE42-NEXT: pxor %xmm3, %xmm0
525 ; X86-SSE42-NEXT: pcmpgtq %xmm4, %xmm0
526 ; X86-SSE42-NEXT: blendvpd %xmm0, %xmm2, %xmm1
527 ; X86-SSE42-NEXT: pshufd {{.*#+}} xmm2 = xmm1[2,3,0,1]
528 ; X86-SSE42-NEXT: movdqa %xmm1, %xmm0
529 ; X86-SSE42-NEXT: pxor %xmm3, %xmm0
530 ; X86-SSE42-NEXT: pxor %xmm2, %xmm3
531 ; X86-SSE42-NEXT: pcmpgtq %xmm3, %xmm0
532 ; X86-SSE42-NEXT: blendvpd %xmm0, %xmm1, %xmm2
533 ; X86-SSE42-NEXT: movd %xmm2, %eax
534 ; X86-SSE42-NEXT: pextrd $1, %xmm2, %edx
535 ; X86-SSE42-NEXT: retl
537 ; X86-AVX1-LABEL: test_reduce_v4i64:
538 ; X86-AVX1: ## %bb.0:
539 ; X86-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1
540 ; X86-AVX1-NEXT: vmovddup {{.*#+}} xmm2 = [-0.0E+0,-0.0E+0]
541 ; X86-AVX1-NEXT: ## xmm2 = mem[0,0]
542 ; X86-AVX1-NEXT: vxorps %xmm2, %xmm1, %xmm3
543 ; X86-AVX1-NEXT: vxorps %xmm2, %xmm0, %xmm4
544 ; X86-AVX1-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm3
545 ; X86-AVX1-NEXT: vblendvpd %xmm3, %xmm0, %xmm1, %xmm0
546 ; X86-AVX1-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1]
547 ; X86-AVX1-NEXT: vxorpd %xmm2, %xmm0, %xmm3
548 ; X86-AVX1-NEXT: vxorpd %xmm2, %xmm1, %xmm2
549 ; X86-AVX1-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
550 ; X86-AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
551 ; X86-AVX1-NEXT: vmovd %xmm0, %eax
552 ; X86-AVX1-NEXT: vpextrd $1, %xmm0, %edx
553 ; X86-AVX1-NEXT: vzeroupper
554 ; X86-AVX1-NEXT: retl
556 ; X86-AVX2-LABEL: test_reduce_v4i64:
557 ; X86-AVX2: ## %bb.0:
558 ; X86-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
559 ; X86-AVX2-NEXT: vpbroadcastq {{.*#+}} xmm2 = [-0.0E+0,-0.0E+0]
560 ; X86-AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm3
561 ; X86-AVX2-NEXT: vpxor %xmm2, %xmm0, %xmm4
562 ; X86-AVX2-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm3
563 ; X86-AVX2-NEXT: vblendvpd %xmm3, %xmm0, %xmm1, %xmm0
564 ; X86-AVX2-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1]
565 ; X86-AVX2-NEXT: vxorpd %xmm2, %xmm0, %xmm3
566 ; X86-AVX2-NEXT: vxorpd %xmm2, %xmm1, %xmm2
567 ; X86-AVX2-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
568 ; X86-AVX2-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
569 ; X86-AVX2-NEXT: vmovd %xmm0, %eax
570 ; X86-AVX2-NEXT: vpextrd $1, %xmm0, %edx
571 ; X86-AVX2-NEXT: vzeroupper
572 ; X86-AVX2-NEXT: retl
574 ; X64-SSE2-LABEL: test_reduce_v4i64:
575 ; X64-SSE2: ## %bb.0:
576 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
577 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm3
578 ; X64-SSE2-NEXT: pxor %xmm2, %xmm3
579 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm4
580 ; X64-SSE2-NEXT: pxor %xmm2, %xmm4
581 ; X64-SSE2-NEXT: movdqa %xmm4, %xmm5
582 ; X64-SSE2-NEXT: pcmpgtd %xmm3, %xmm5
583 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
584 ; X64-SSE2-NEXT: pcmpeqd %xmm3, %xmm4
585 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3]
586 ; X64-SSE2-NEXT: pand %xmm6, %xmm3
587 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3]
588 ; X64-SSE2-NEXT: por %xmm3, %xmm4
589 ; X64-SSE2-NEXT: pand %xmm4, %xmm0
590 ; X64-SSE2-NEXT: pandn %xmm1, %xmm4
591 ; X64-SSE2-NEXT: por %xmm0, %xmm4
592 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm4[2,3,0,1]
593 ; X64-SSE2-NEXT: movdqa %xmm4, %xmm1
594 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
595 ; X64-SSE2-NEXT: pxor %xmm0, %xmm2
596 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm3
597 ; X64-SSE2-NEXT: pcmpgtd %xmm2, %xmm3
598 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm3[0,0,2,2]
599 ; X64-SSE2-NEXT: pcmpeqd %xmm1, %xmm2
600 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3]
601 ; X64-SSE2-NEXT: pand %xmm5, %xmm1
602 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,1,3,3]
603 ; X64-SSE2-NEXT: por %xmm1, %xmm2
604 ; X64-SSE2-NEXT: pand %xmm2, %xmm4
605 ; X64-SSE2-NEXT: pandn %xmm0, %xmm2
606 ; X64-SSE2-NEXT: por %xmm4, %xmm2
607 ; X64-SSE2-NEXT: movq %xmm2, %rax
608 ; X64-SSE2-NEXT: retq
610 ; X64-SSE42-LABEL: test_reduce_v4i64:
611 ; X64-SSE42: ## %bb.0:
612 ; X64-SSE42-NEXT: movdqa %xmm0, %xmm2
613 ; X64-SSE42-NEXT: movdqa {{.*#+}} xmm3 = [9223372036854775808,9223372036854775808]
614 ; X64-SSE42-NEXT: movdqa %xmm1, %xmm4
615 ; X64-SSE42-NEXT: pxor %xmm3, %xmm4
616 ; X64-SSE42-NEXT: pxor %xmm3, %xmm0
617 ; X64-SSE42-NEXT: pcmpgtq %xmm4, %xmm0
618 ; X64-SSE42-NEXT: blendvpd %xmm0, %xmm2, %xmm1
619 ; X64-SSE42-NEXT: pshufd {{.*#+}} xmm2 = xmm1[2,3,0,1]
620 ; X64-SSE42-NEXT: movdqa %xmm1, %xmm0
621 ; X64-SSE42-NEXT: pxor %xmm3, %xmm0
622 ; X64-SSE42-NEXT: pxor %xmm2, %xmm3
623 ; X64-SSE42-NEXT: pcmpgtq %xmm3, %xmm0
624 ; X64-SSE42-NEXT: blendvpd %xmm0, %xmm1, %xmm2
625 ; X64-SSE42-NEXT: movq %xmm2, %rax
626 ; X64-SSE42-NEXT: retq
628 ; X64-AVX1-LABEL: test_reduce_v4i64:
629 ; X64-AVX1: ## %bb.0:
630 ; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1
631 ; X64-AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808]
632 ; X64-AVX1-NEXT: vpxor %xmm2, %xmm1, %xmm3
633 ; X64-AVX1-NEXT: vpxor %xmm2, %xmm0, %xmm4
634 ; X64-AVX1-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm3
635 ; X64-AVX1-NEXT: vblendvpd %xmm3, %xmm0, %xmm1, %xmm0
636 ; X64-AVX1-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1]
637 ; X64-AVX1-NEXT: vxorpd %xmm2, %xmm0, %xmm3
638 ; X64-AVX1-NEXT: vxorpd %xmm2, %xmm1, %xmm2
639 ; X64-AVX1-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
640 ; X64-AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
641 ; X64-AVX1-NEXT: vmovq %xmm0, %rax
642 ; X64-AVX1-NEXT: vzeroupper
643 ; X64-AVX1-NEXT: retq
645 ; X64-AVX2-LABEL: test_reduce_v4i64:
646 ; X64-AVX2: ## %bb.0:
647 ; X64-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
648 ; X64-AVX2-NEXT: vpbroadcastq {{.*#+}} ymm2 = [9223372036854775808,9223372036854775808,9223372036854775808,9223372036854775808]
649 ; X64-AVX2-NEXT: vpxor %xmm2, %xmm1, %xmm3
650 ; X64-AVX2-NEXT: vpxor %xmm2, %xmm0, %xmm4
651 ; X64-AVX2-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm3
652 ; X64-AVX2-NEXT: vblendvpd %xmm3, %xmm0, %xmm1, %xmm0
653 ; X64-AVX2-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1]
654 ; X64-AVX2-NEXT: vxorpd %xmm2, %xmm0, %xmm3
655 ; X64-AVX2-NEXT: vxorpd %xmm2, %xmm1, %xmm2
656 ; X64-AVX2-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
657 ; X64-AVX2-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
658 ; X64-AVX2-NEXT: vmovq %xmm0, %rax
659 ; X64-AVX2-NEXT: vzeroupper
660 ; X64-AVX2-NEXT: retq
662 ; X64-AVX512-LABEL: test_reduce_v4i64:
663 ; X64-AVX512: ## %bb.0:
664 ; X64-AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
665 ; X64-AVX512-NEXT: vpmaxuq %xmm1, %xmm0, %xmm0
666 ; X64-AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
667 ; X64-AVX512-NEXT: vpmaxuq %xmm1, %xmm0, %xmm0
668 ; X64-AVX512-NEXT: vmovq %xmm0, %rax
669 ; X64-AVX512-NEXT: vzeroupper
670 ; X64-AVX512-NEXT: retq
671 %1 = shufflevector <4 x i64> %a0, <4 x i64> undef, <4 x i32> <i32 2, i32 3, i32 undef, i32 undef>
672 %2 = icmp ugt <4 x i64> %a0, %1
673 %3 = select <4 x i1> %2, <4 x i64> %a0, <4 x i64> %1
674 %4 = shufflevector <4 x i64> %3, <4 x i64> undef, <4 x i32> <i32 1, i32 undef, i32 undef, i32 undef>
675 %5 = icmp ugt <4 x i64> %3, %4
676 %6 = select <4 x i1> %5, <4 x i64> %3, <4 x i64> %4
677 %7 = extractelement <4 x i64> %6, i32 0
681 define i32 @test_reduce_v8i32(<8 x i32> %a0) {
682 ; X86-SSE2-LABEL: test_reduce_v8i32:
683 ; X86-SSE2: ## %bb.0:
684 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
685 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm3
686 ; X86-SSE2-NEXT: pxor %xmm2, %xmm3
687 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm4
688 ; X86-SSE2-NEXT: pxor %xmm2, %xmm4
689 ; X86-SSE2-NEXT: pcmpgtd %xmm3, %xmm4
690 ; X86-SSE2-NEXT: pand %xmm4, %xmm0
691 ; X86-SSE2-NEXT: pandn %xmm1, %xmm4
692 ; X86-SSE2-NEXT: por %xmm0, %xmm4
693 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm4[2,3,0,1]
694 ; X86-SSE2-NEXT: movdqa %xmm4, %xmm1
695 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
696 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm3
697 ; X86-SSE2-NEXT: pxor %xmm2, %xmm3
698 ; X86-SSE2-NEXT: pcmpgtd %xmm3, %xmm1
699 ; X86-SSE2-NEXT: pand %xmm1, %xmm4
700 ; X86-SSE2-NEXT: pandn %xmm0, %xmm1
701 ; X86-SSE2-NEXT: por %xmm4, %xmm1
702 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
703 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm3
704 ; X86-SSE2-NEXT: pxor %xmm2, %xmm3
705 ; X86-SSE2-NEXT: pxor %xmm0, %xmm2
706 ; X86-SSE2-NEXT: pcmpgtd %xmm2, %xmm3
707 ; X86-SSE2-NEXT: pand %xmm3, %xmm1
708 ; X86-SSE2-NEXT: pandn %xmm0, %xmm3
709 ; X86-SSE2-NEXT: por %xmm1, %xmm3
710 ; X86-SSE2-NEXT: movd %xmm3, %eax
711 ; X86-SSE2-NEXT: retl
713 ; X86-SSE42-LABEL: test_reduce_v8i32:
714 ; X86-SSE42: ## %bb.0:
715 ; X86-SSE42-NEXT: pmaxud %xmm1, %xmm0
716 ; X86-SSE42-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
717 ; X86-SSE42-NEXT: pmaxud %xmm0, %xmm1
718 ; X86-SSE42-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
719 ; X86-SSE42-NEXT: pmaxud %xmm1, %xmm0
720 ; X86-SSE42-NEXT: movd %xmm0, %eax
721 ; X86-SSE42-NEXT: retl
723 ; X86-AVX1-LABEL: test_reduce_v8i32:
724 ; X86-AVX1: ## %bb.0:
725 ; X86-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1
726 ; X86-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
727 ; X86-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
728 ; X86-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
729 ; X86-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
730 ; X86-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
731 ; X86-AVX1-NEXT: vmovd %xmm0, %eax
732 ; X86-AVX1-NEXT: vzeroupper
733 ; X86-AVX1-NEXT: retl
735 ; X86-AVX2-LABEL: test_reduce_v8i32:
736 ; X86-AVX2: ## %bb.0:
737 ; X86-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
738 ; X86-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
739 ; X86-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
740 ; X86-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
741 ; X86-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
742 ; X86-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
743 ; X86-AVX2-NEXT: vmovd %xmm0, %eax
744 ; X86-AVX2-NEXT: vzeroupper
745 ; X86-AVX2-NEXT: retl
747 ; X64-SSE2-LABEL: test_reduce_v8i32:
748 ; X64-SSE2: ## %bb.0:
749 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
750 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm3
751 ; X64-SSE2-NEXT: pxor %xmm2, %xmm3
752 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm4
753 ; X64-SSE2-NEXT: pxor %xmm2, %xmm4
754 ; X64-SSE2-NEXT: pcmpgtd %xmm3, %xmm4
755 ; X64-SSE2-NEXT: pand %xmm4, %xmm0
756 ; X64-SSE2-NEXT: pandn %xmm1, %xmm4
757 ; X64-SSE2-NEXT: por %xmm0, %xmm4
758 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm4[2,3,0,1]
759 ; X64-SSE2-NEXT: movdqa %xmm4, %xmm1
760 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
761 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm3
762 ; X64-SSE2-NEXT: pxor %xmm2, %xmm3
763 ; X64-SSE2-NEXT: pcmpgtd %xmm3, %xmm1
764 ; X64-SSE2-NEXT: pand %xmm1, %xmm4
765 ; X64-SSE2-NEXT: pandn %xmm0, %xmm1
766 ; X64-SSE2-NEXT: por %xmm4, %xmm1
767 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
768 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm3
769 ; X64-SSE2-NEXT: pxor %xmm2, %xmm3
770 ; X64-SSE2-NEXT: pxor %xmm0, %xmm2
771 ; X64-SSE2-NEXT: pcmpgtd %xmm2, %xmm3
772 ; X64-SSE2-NEXT: pand %xmm3, %xmm1
773 ; X64-SSE2-NEXT: pandn %xmm0, %xmm3
774 ; X64-SSE2-NEXT: por %xmm1, %xmm3
775 ; X64-SSE2-NEXT: movd %xmm3, %eax
776 ; X64-SSE2-NEXT: retq
778 ; X64-SSE42-LABEL: test_reduce_v8i32:
779 ; X64-SSE42: ## %bb.0:
780 ; X64-SSE42-NEXT: pmaxud %xmm1, %xmm0
781 ; X64-SSE42-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
782 ; X64-SSE42-NEXT: pmaxud %xmm0, %xmm1
783 ; X64-SSE42-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
784 ; X64-SSE42-NEXT: pmaxud %xmm1, %xmm0
785 ; X64-SSE42-NEXT: movd %xmm0, %eax
786 ; X64-SSE42-NEXT: retq
788 ; X64-AVX1-LABEL: test_reduce_v8i32:
789 ; X64-AVX1: ## %bb.0:
790 ; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1
791 ; X64-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
792 ; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
793 ; X64-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
794 ; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
795 ; X64-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
796 ; X64-AVX1-NEXT: vmovd %xmm0, %eax
797 ; X64-AVX1-NEXT: vzeroupper
798 ; X64-AVX1-NEXT: retq
800 ; X64-AVX2-LABEL: test_reduce_v8i32:
801 ; X64-AVX2: ## %bb.0:
802 ; X64-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
803 ; X64-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
804 ; X64-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
805 ; X64-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
806 ; X64-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
807 ; X64-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
808 ; X64-AVX2-NEXT: vmovd %xmm0, %eax
809 ; X64-AVX2-NEXT: vzeroupper
810 ; X64-AVX2-NEXT: retq
812 ; X64-AVX512-LABEL: test_reduce_v8i32:
813 ; X64-AVX512: ## %bb.0:
814 ; X64-AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
815 ; X64-AVX512-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
816 ; X64-AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
817 ; X64-AVX512-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
818 ; X64-AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
819 ; X64-AVX512-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
820 ; X64-AVX512-NEXT: vmovd %xmm0, %eax
821 ; X64-AVX512-NEXT: vzeroupper
822 ; X64-AVX512-NEXT: retq
823 %1 = shufflevector <8 x i32> %a0, <8 x i32> undef, <8 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef>
824 %2 = icmp ugt <8 x i32> %a0, %1
825 %3 = select <8 x i1> %2, <8 x i32> %a0, <8 x i32> %1
826 %4 = shufflevector <8 x i32> %3, <8 x i32> undef, <8 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
827 %5 = icmp ugt <8 x i32> %3, %4
828 %6 = select <8 x i1> %5, <8 x i32> %3, <8 x i32> %4
829 %7 = shufflevector <8 x i32> %6, <8 x i32> undef, <8 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
830 %8 = icmp ugt <8 x i32> %6, %7
831 %9 = select <8 x i1> %8, <8 x i32> %6, <8 x i32> %7
832 %10 = extractelement <8 x i32> %9, i32 0
836 define i16 @test_reduce_v16i16(<16 x i16> %a0) {
837 ; X86-SSE2-LABEL: test_reduce_v16i16:
838 ; X86-SSE2: ## %bb.0:
839 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [32768,32768,32768,32768,32768,32768,32768,32768]
840 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
841 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
842 ; X86-SSE2-NEXT: pmaxsw %xmm1, %xmm0
843 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
844 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
845 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
846 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
847 ; X86-SSE2-NEXT: pmaxsw %xmm0, %xmm1
848 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm0
849 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
850 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
851 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
852 ; X86-SSE2-NEXT: pmaxsw %xmm1, %xmm0
853 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
854 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
855 ; X86-SSE2-NEXT: psrld $16, %xmm1
856 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
857 ; X86-SSE2-NEXT: pmaxsw %xmm0, %xmm1
858 ; X86-SSE2-NEXT: movd %xmm1, %eax
859 ; X86-SSE2-NEXT: xorl $32768, %eax ## imm = 0x8000
860 ; X86-SSE2-NEXT: ## kill: def $ax killed $ax killed $eax
861 ; X86-SSE2-NEXT: retl
863 ; X86-SSE42-LABEL: test_reduce_v16i16:
864 ; X86-SSE42: ## %bb.0:
865 ; X86-SSE42-NEXT: pmaxuw %xmm1, %xmm0
866 ; X86-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
867 ; X86-SSE42-NEXT: pxor %xmm0, %xmm1
868 ; X86-SSE42-NEXT: phminposuw %xmm1, %xmm0
869 ; X86-SSE42-NEXT: movd %xmm0, %eax
870 ; X86-SSE42-NEXT: notl %eax
871 ; X86-SSE42-NEXT: ## kill: def $ax killed $ax killed $eax
872 ; X86-SSE42-NEXT: retl
874 ; X86-AVX1-LABEL: test_reduce_v16i16:
875 ; X86-AVX1: ## %bb.0:
876 ; X86-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1
877 ; X86-AVX1-NEXT: vpmaxuw %xmm1, %xmm0, %xmm0
878 ; X86-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
879 ; X86-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
880 ; X86-AVX1-NEXT: vphminposuw %xmm0, %xmm0
881 ; X86-AVX1-NEXT: vmovd %xmm0, %eax
882 ; X86-AVX1-NEXT: notl %eax
883 ; X86-AVX1-NEXT: ## kill: def $ax killed $ax killed $eax
884 ; X86-AVX1-NEXT: vzeroupper
885 ; X86-AVX1-NEXT: retl
887 ; X86-AVX2-LABEL: test_reduce_v16i16:
888 ; X86-AVX2: ## %bb.0:
889 ; X86-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
890 ; X86-AVX2-NEXT: vpmaxuw %xmm1, %xmm0, %xmm0
891 ; X86-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
892 ; X86-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
893 ; X86-AVX2-NEXT: vphminposuw %xmm0, %xmm0
894 ; X86-AVX2-NEXT: vmovd %xmm0, %eax
895 ; X86-AVX2-NEXT: notl %eax
896 ; X86-AVX2-NEXT: ## kill: def $ax killed $ax killed $eax
897 ; X86-AVX2-NEXT: vzeroupper
898 ; X86-AVX2-NEXT: retl
900 ; X64-SSE2-LABEL: test_reduce_v16i16:
901 ; X64-SSE2: ## %bb.0:
902 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [32768,32768,32768,32768,32768,32768,32768,32768]
903 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
904 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
905 ; X64-SSE2-NEXT: pmaxsw %xmm1, %xmm0
906 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
907 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
908 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
909 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
910 ; X64-SSE2-NEXT: pmaxsw %xmm0, %xmm1
911 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm0
912 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
913 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
914 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
915 ; X64-SSE2-NEXT: pmaxsw %xmm1, %xmm0
916 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
917 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
918 ; X64-SSE2-NEXT: psrld $16, %xmm1
919 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
920 ; X64-SSE2-NEXT: pmaxsw %xmm0, %xmm1
921 ; X64-SSE2-NEXT: movd %xmm1, %eax
922 ; X64-SSE2-NEXT: xorl $32768, %eax ## imm = 0x8000
923 ; X64-SSE2-NEXT: ## kill: def $ax killed $ax killed $eax
924 ; X64-SSE2-NEXT: retq
926 ; X64-SSE42-LABEL: test_reduce_v16i16:
927 ; X64-SSE42: ## %bb.0:
928 ; X64-SSE42-NEXT: pmaxuw %xmm1, %xmm0
929 ; X64-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
930 ; X64-SSE42-NEXT: pxor %xmm0, %xmm1
931 ; X64-SSE42-NEXT: phminposuw %xmm1, %xmm0
932 ; X64-SSE42-NEXT: movd %xmm0, %eax
933 ; X64-SSE42-NEXT: notl %eax
934 ; X64-SSE42-NEXT: ## kill: def $ax killed $ax killed $eax
935 ; X64-SSE42-NEXT: retq
937 ; X64-AVX1-LABEL: test_reduce_v16i16:
938 ; X64-AVX1: ## %bb.0:
939 ; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1
940 ; X64-AVX1-NEXT: vpmaxuw %xmm1, %xmm0, %xmm0
941 ; X64-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
942 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
943 ; X64-AVX1-NEXT: vphminposuw %xmm0, %xmm0
944 ; X64-AVX1-NEXT: vmovd %xmm0, %eax
945 ; X64-AVX1-NEXT: notl %eax
946 ; X64-AVX1-NEXT: ## kill: def $ax killed $ax killed $eax
947 ; X64-AVX1-NEXT: vzeroupper
948 ; X64-AVX1-NEXT: retq
950 ; X64-AVX2-LABEL: test_reduce_v16i16:
951 ; X64-AVX2: ## %bb.0:
952 ; X64-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
953 ; X64-AVX2-NEXT: vpmaxuw %xmm1, %xmm0, %xmm0
954 ; X64-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
955 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
956 ; X64-AVX2-NEXT: vphminposuw %xmm0, %xmm0
957 ; X64-AVX2-NEXT: vmovd %xmm0, %eax
958 ; X64-AVX2-NEXT: notl %eax
959 ; X64-AVX2-NEXT: ## kill: def $ax killed $ax killed $eax
960 ; X64-AVX2-NEXT: vzeroupper
961 ; X64-AVX2-NEXT: retq
963 ; X64-AVX512-LABEL: test_reduce_v16i16:
964 ; X64-AVX512: ## %bb.0:
965 ; X64-AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
966 ; X64-AVX512-NEXT: vpmaxuw %xmm1, %xmm0, %xmm0
967 ; X64-AVX512-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
968 ; X64-AVX512-NEXT: vphminposuw %xmm0, %xmm0
969 ; X64-AVX512-NEXT: vmovd %xmm0, %eax
970 ; X64-AVX512-NEXT: notl %eax
971 ; X64-AVX512-NEXT: ## kill: def $ax killed $ax killed $eax
972 ; X64-AVX512-NEXT: vzeroupper
973 ; X64-AVX512-NEXT: retq
974 %1 = shufflevector <16 x i16> %a0, <16 x i16> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
975 %2 = icmp ugt <16 x i16> %a0, %1
976 %3 = select <16 x i1> %2, <16 x i16> %a0, <16 x i16> %1
977 %4 = shufflevector <16 x i16> %3, <16 x i16> undef, <16 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
978 %5 = icmp ugt <16 x i16> %3, %4
979 %6 = select <16 x i1> %5, <16 x i16> %3, <16 x i16> %4
980 %7 = shufflevector <16 x i16> %6, <16 x i16> undef, <16 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
981 %8 = icmp ugt <16 x i16> %6, %7
982 %9 = select <16 x i1> %8, <16 x i16> %6, <16 x i16> %7
983 %10 = shufflevector <16 x i16> %9, <16 x i16> undef, <16 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
984 %11 = icmp ugt <16 x i16> %9, %10
985 %12 = select <16 x i1> %11, <16 x i16> %9, <16 x i16> %10
986 %13 = extractelement <16 x i16> %12, i32 0
990 define i8 @test_reduce_v32i8(<32 x i8> %a0) {
991 ; X86-SSE2-LABEL: test_reduce_v32i8:
992 ; X86-SSE2: ## %bb.0:
993 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
994 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
995 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
996 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
997 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
998 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
999 ; X86-SSE2-NEXT: psrld $16, %xmm1
1000 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
1001 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm0
1002 ; X86-SSE2-NEXT: psrlw $8, %xmm0
1003 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
1004 ; X86-SSE2-NEXT: movd %xmm0, %eax
1005 ; X86-SSE2-NEXT: ## kill: def $al killed $al killed $eax
1006 ; X86-SSE2-NEXT: retl
1008 ; X86-SSE42-LABEL: test_reduce_v32i8:
1009 ; X86-SSE42: ## %bb.0:
1010 ; X86-SSE42-NEXT: pmaxub %xmm1, %xmm0
1011 ; X86-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
1012 ; X86-SSE42-NEXT: pxor %xmm0, %xmm1
1013 ; X86-SSE42-NEXT: movdqa %xmm1, %xmm0
1014 ; X86-SSE42-NEXT: psrlw $8, %xmm0
1015 ; X86-SSE42-NEXT: pminub %xmm1, %xmm0
1016 ; X86-SSE42-NEXT: phminposuw %xmm0, %xmm0
1017 ; X86-SSE42-NEXT: pextrb $0, %xmm0, %eax
1018 ; X86-SSE42-NEXT: notb %al
1019 ; X86-SSE42-NEXT: ## kill: def $al killed $al killed $eax
1020 ; X86-SSE42-NEXT: retl
1022 ; X86-AVX1-LABEL: test_reduce_v32i8:
1023 ; X86-AVX1: ## %bb.0:
1024 ; X86-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1
1025 ; X86-AVX1-NEXT: vpmaxub %xmm1, %xmm0, %xmm0
1026 ; X86-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1027 ; X86-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
1028 ; X86-AVX1-NEXT: vpsrlw $8, %xmm0, %xmm1
1029 ; X86-AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm0
1030 ; X86-AVX1-NEXT: vphminposuw %xmm0, %xmm0
1031 ; X86-AVX1-NEXT: vpextrb $0, %xmm0, %eax
1032 ; X86-AVX1-NEXT: notb %al
1033 ; X86-AVX1-NEXT: ## kill: def $al killed $al killed $eax
1034 ; X86-AVX1-NEXT: vzeroupper
1035 ; X86-AVX1-NEXT: retl
1037 ; X86-AVX2-LABEL: test_reduce_v32i8:
1038 ; X86-AVX2: ## %bb.0:
1039 ; X86-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1040 ; X86-AVX2-NEXT: vpmaxub %xmm1, %xmm0, %xmm0
1041 ; X86-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1042 ; X86-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
1043 ; X86-AVX2-NEXT: vpsrlw $8, %xmm0, %xmm1
1044 ; X86-AVX2-NEXT: vpminub %xmm1, %xmm0, %xmm0
1045 ; X86-AVX2-NEXT: vphminposuw %xmm0, %xmm0
1046 ; X86-AVX2-NEXT: vpextrb $0, %xmm0, %eax
1047 ; X86-AVX2-NEXT: notb %al
1048 ; X86-AVX2-NEXT: ## kill: def $al killed $al killed $eax
1049 ; X86-AVX2-NEXT: vzeroupper
1050 ; X86-AVX2-NEXT: retl
1052 ; X64-SSE2-LABEL: test_reduce_v32i8:
1053 ; X64-SSE2: ## %bb.0:
1054 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
1055 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
1056 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
1057 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
1058 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
1059 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
1060 ; X64-SSE2-NEXT: psrld $16, %xmm1
1061 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
1062 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm0
1063 ; X64-SSE2-NEXT: psrlw $8, %xmm0
1064 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
1065 ; X64-SSE2-NEXT: movd %xmm0, %eax
1066 ; X64-SSE2-NEXT: ## kill: def $al killed $al killed $eax
1067 ; X64-SSE2-NEXT: retq
1069 ; X64-SSE42-LABEL: test_reduce_v32i8:
1070 ; X64-SSE42: ## %bb.0:
1071 ; X64-SSE42-NEXT: pmaxub %xmm1, %xmm0
1072 ; X64-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
1073 ; X64-SSE42-NEXT: pxor %xmm0, %xmm1
1074 ; X64-SSE42-NEXT: movdqa %xmm1, %xmm0
1075 ; X64-SSE42-NEXT: psrlw $8, %xmm0
1076 ; X64-SSE42-NEXT: pminub %xmm1, %xmm0
1077 ; X64-SSE42-NEXT: phminposuw %xmm0, %xmm0
1078 ; X64-SSE42-NEXT: pextrb $0, %xmm0, %eax
1079 ; X64-SSE42-NEXT: notb %al
1080 ; X64-SSE42-NEXT: ## kill: def $al killed $al killed $eax
1081 ; X64-SSE42-NEXT: retq
1083 ; X64-AVX1-LABEL: test_reduce_v32i8:
1084 ; X64-AVX1: ## %bb.0:
1085 ; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1
1086 ; X64-AVX1-NEXT: vpmaxub %xmm1, %xmm0, %xmm0
1087 ; X64-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1088 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
1089 ; X64-AVX1-NEXT: vpsrlw $8, %xmm0, %xmm1
1090 ; X64-AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm0
1091 ; X64-AVX1-NEXT: vphminposuw %xmm0, %xmm0
1092 ; X64-AVX1-NEXT: vpextrb $0, %xmm0, %eax
1093 ; X64-AVX1-NEXT: notb %al
1094 ; X64-AVX1-NEXT: ## kill: def $al killed $al killed $eax
1095 ; X64-AVX1-NEXT: vzeroupper
1096 ; X64-AVX1-NEXT: retq
1098 ; X64-AVX2-LABEL: test_reduce_v32i8:
1099 ; X64-AVX2: ## %bb.0:
1100 ; X64-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1101 ; X64-AVX2-NEXT: vpmaxub %xmm1, %xmm0, %xmm0
1102 ; X64-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1103 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
1104 ; X64-AVX2-NEXT: vpsrlw $8, %xmm0, %xmm1
1105 ; X64-AVX2-NEXT: vpminub %xmm1, %xmm0, %xmm0
1106 ; X64-AVX2-NEXT: vphminposuw %xmm0, %xmm0
1107 ; X64-AVX2-NEXT: vpextrb $0, %xmm0, %eax
1108 ; X64-AVX2-NEXT: notb %al
1109 ; X64-AVX2-NEXT: ## kill: def $al killed $al killed $eax
1110 ; X64-AVX2-NEXT: vzeroupper
1111 ; X64-AVX2-NEXT: retq
1113 ; X64-AVX512-LABEL: test_reduce_v32i8:
1114 ; X64-AVX512: ## %bb.0:
1115 ; X64-AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
1116 ; X64-AVX512-NEXT: vpmaxub %xmm1, %xmm0, %xmm0
1117 ; X64-AVX512-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
1118 ; X64-AVX512-NEXT: vpsrlw $8, %xmm0, %xmm1
1119 ; X64-AVX512-NEXT: vpminub %xmm1, %xmm0, %xmm0
1120 ; X64-AVX512-NEXT: vphminposuw %xmm0, %xmm0
1121 ; X64-AVX512-NEXT: vpextrb $0, %xmm0, %eax
1122 ; X64-AVX512-NEXT: notb %al
1123 ; X64-AVX512-NEXT: ## kill: def $al killed $al killed $eax
1124 ; X64-AVX512-NEXT: vzeroupper
1125 ; X64-AVX512-NEXT: retq
1126 %1 = shufflevector <32 x i8> %a0, <32 x i8> undef, <32 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1127 %2 = icmp ugt <32 x i8> %a0, %1
1128 %3 = select <32 x i1> %2, <32 x i8> %a0, <32 x i8> %1
1129 %4 = shufflevector <32 x i8> %3, <32 x i8> undef, <32 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1130 %5 = icmp ugt <32 x i8> %3, %4
1131 %6 = select <32 x i1> %5, <32 x i8> %3, <32 x i8> %4
1132 %7 = shufflevector <32 x i8> %6, <32 x i8> undef, <32 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1133 %8 = icmp ugt <32 x i8> %6, %7
1134 %9 = select <32 x i1> %8, <32 x i8> %6, <32 x i8> %7
1135 %10 = shufflevector <32 x i8> %9, <32 x i8> undef, <32 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1136 %11 = icmp ugt <32 x i8> %9, %10
1137 %12 = select <32 x i1> %11, <32 x i8> %9, <32 x i8> %10
1138 %13 = shufflevector <32 x i8> %12, <32 x i8> undef, <32 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1139 %14 = icmp ugt <32 x i8> %12, %13
1140 %15 = select <32 x i1> %14, <32 x i8> %12, <32 x i8> %13
1141 %16 = extractelement <32 x i8> %15, i32 0
1149 define i64 @test_reduce_v8i64(<8 x i64> %a0) {
1150 ; X86-SSE2-LABEL: test_reduce_v8i64:
1151 ; X86-SSE2: ## %bb.0:
1152 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
1153 ; X86-SSE2-NEXT: movdqa %xmm2, %xmm5
1154 ; X86-SSE2-NEXT: pxor %xmm4, %xmm5
1155 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm6
1156 ; X86-SSE2-NEXT: pxor %xmm4, %xmm6
1157 ; X86-SSE2-NEXT: movdqa %xmm6, %xmm7
1158 ; X86-SSE2-NEXT: pcmpgtd %xmm5, %xmm7
1159 ; X86-SSE2-NEXT: pcmpeqd %xmm5, %xmm6
1160 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm7[0,0,2,2]
1161 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3]
1162 ; X86-SSE2-NEXT: pand %xmm5, %xmm6
1163 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm7[1,1,3,3]
1164 ; X86-SSE2-NEXT: por %xmm6, %xmm5
1165 ; X86-SSE2-NEXT: pand %xmm5, %xmm0
1166 ; X86-SSE2-NEXT: pandn %xmm2, %xmm5
1167 ; X86-SSE2-NEXT: por %xmm0, %xmm5
1168 ; X86-SSE2-NEXT: movdqa %xmm3, %xmm0
1169 ; X86-SSE2-NEXT: pxor %xmm4, %xmm0
1170 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm2
1171 ; X86-SSE2-NEXT: pxor %xmm4, %xmm2
1172 ; X86-SSE2-NEXT: movdqa %xmm2, %xmm6
1173 ; X86-SSE2-NEXT: pcmpgtd %xmm0, %xmm6
1174 ; X86-SSE2-NEXT: pcmpeqd %xmm0, %xmm2
1175 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,0,2,2]
1176 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
1177 ; X86-SSE2-NEXT: pand %xmm0, %xmm2
1178 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,1,3,3]
1179 ; X86-SSE2-NEXT: por %xmm2, %xmm0
1180 ; X86-SSE2-NEXT: pand %xmm0, %xmm1
1181 ; X86-SSE2-NEXT: pandn %xmm3, %xmm0
1182 ; X86-SSE2-NEXT: por %xmm1, %xmm0
1183 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
1184 ; X86-SSE2-NEXT: pxor %xmm4, %xmm1
1185 ; X86-SSE2-NEXT: movdqa %xmm5, %xmm2
1186 ; X86-SSE2-NEXT: pxor %xmm4, %xmm2
1187 ; X86-SSE2-NEXT: movdqa %xmm2, %xmm3
1188 ; X86-SSE2-NEXT: pcmpgtd %xmm1, %xmm3
1189 ; X86-SSE2-NEXT: pcmpeqd %xmm1, %xmm2
1190 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,0,2,2]
1191 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
1192 ; X86-SSE2-NEXT: pand %xmm1, %xmm2
1193 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,1,3,3]
1194 ; X86-SSE2-NEXT: por %xmm2, %xmm1
1195 ; X86-SSE2-NEXT: pand %xmm1, %xmm5
1196 ; X86-SSE2-NEXT: pandn %xmm0, %xmm1
1197 ; X86-SSE2-NEXT: por %xmm5, %xmm1
1198 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,0,1]
1199 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm2
1200 ; X86-SSE2-NEXT: pxor %xmm4, %xmm2
1201 ; X86-SSE2-NEXT: pxor %xmm0, %xmm4
1202 ; X86-SSE2-NEXT: movdqa %xmm2, %xmm3
1203 ; X86-SSE2-NEXT: pcmpgtd %xmm4, %xmm3
1204 ; X86-SSE2-NEXT: pcmpeqd %xmm2, %xmm4
1205 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,0,2,2]
1206 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm4[1,1,3,3]
1207 ; X86-SSE2-NEXT: pand %xmm2, %xmm4
1208 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,1,3,3]
1209 ; X86-SSE2-NEXT: por %xmm4, %xmm2
1210 ; X86-SSE2-NEXT: pand %xmm2, %xmm1
1211 ; X86-SSE2-NEXT: pandn %xmm0, %xmm2
1212 ; X86-SSE2-NEXT: por %xmm1, %xmm2
1213 ; X86-SSE2-NEXT: movd %xmm2, %eax
1214 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,2,3]
1215 ; X86-SSE2-NEXT: movd %xmm0, %edx
1216 ; X86-SSE2-NEXT: retl
1218 ; X86-SSE42-LABEL: test_reduce_v8i64:
1219 ; X86-SSE42: ## %bb.0:
1220 ; X86-SSE42-NEXT: movdqa %xmm0, %xmm4
1221 ; X86-SSE42-NEXT: movdqa {{.*#+}} xmm5 = [0,2147483648,0,2147483648]
1222 ; X86-SSE42-NEXT: movdqa %xmm2, %xmm6
1223 ; X86-SSE42-NEXT: pxor %xmm5, %xmm6
1224 ; X86-SSE42-NEXT: pxor %xmm5, %xmm0
1225 ; X86-SSE42-NEXT: pcmpgtq %xmm6, %xmm0
1226 ; X86-SSE42-NEXT: blendvpd %xmm0, %xmm4, %xmm2
1227 ; X86-SSE42-NEXT: movdqa %xmm3, %xmm4
1228 ; X86-SSE42-NEXT: pxor %xmm5, %xmm4
1229 ; X86-SSE42-NEXT: movdqa %xmm1, %xmm0
1230 ; X86-SSE42-NEXT: pxor %xmm5, %xmm0
1231 ; X86-SSE42-NEXT: pcmpgtq %xmm4, %xmm0
1232 ; X86-SSE42-NEXT: blendvpd %xmm0, %xmm1, %xmm3
1233 ; X86-SSE42-NEXT: movapd %xmm3, %xmm1
1234 ; X86-SSE42-NEXT: xorpd %xmm5, %xmm1
1235 ; X86-SSE42-NEXT: movapd %xmm2, %xmm0
1236 ; X86-SSE42-NEXT: xorpd %xmm5, %xmm0
1237 ; X86-SSE42-NEXT: pcmpgtq %xmm1, %xmm0
1238 ; X86-SSE42-NEXT: blendvpd %xmm0, %xmm2, %xmm3
1239 ; X86-SSE42-NEXT: pshufd {{.*#+}} xmm1 = xmm3[2,3,0,1]
1240 ; X86-SSE42-NEXT: movdqa %xmm3, %xmm0
1241 ; X86-SSE42-NEXT: pxor %xmm5, %xmm0
1242 ; X86-SSE42-NEXT: pxor %xmm1, %xmm5
1243 ; X86-SSE42-NEXT: pcmpgtq %xmm5, %xmm0
1244 ; X86-SSE42-NEXT: blendvpd %xmm0, %xmm3, %xmm1
1245 ; X86-SSE42-NEXT: movd %xmm1, %eax
1246 ; X86-SSE42-NEXT: pextrd $1, %xmm1, %edx
1247 ; X86-SSE42-NEXT: retl
1249 ; X86-AVX1-LABEL: test_reduce_v8i64:
1250 ; X86-AVX1: ## %bb.0:
1251 ; X86-AVX1-NEXT: vmovddup {{.*#+}} xmm2 = [-0.0E+0,-0.0E+0]
1252 ; X86-AVX1-NEXT: ## xmm2 = mem[0,0]
1253 ; X86-AVX1-NEXT: vxorps %xmm2, %xmm1, %xmm3
1254 ; X86-AVX1-NEXT: vxorps %xmm2, %xmm0, %xmm4
1255 ; X86-AVX1-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm3
1256 ; X86-AVX1-NEXT: vextractf128 $1, %ymm1, %xmm4
1257 ; X86-AVX1-NEXT: vxorps %xmm2, %xmm4, %xmm5
1258 ; X86-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm6
1259 ; X86-AVX1-NEXT: vxorps %xmm2, %xmm6, %xmm7
1260 ; X86-AVX1-NEXT: vpcmpgtq %xmm5, %xmm7, %xmm5
1261 ; X86-AVX1-NEXT: vblendvpd %xmm5, %xmm6, %xmm4, %xmm4
1262 ; X86-AVX1-NEXT: vxorpd %xmm2, %xmm4, %xmm5
1263 ; X86-AVX1-NEXT: vblendvpd %xmm3, %xmm0, %xmm1, %xmm0
1264 ; X86-AVX1-NEXT: vxorpd %xmm2, %xmm0, %xmm1
1265 ; X86-AVX1-NEXT: vpcmpgtq %xmm5, %xmm1, %xmm1
1266 ; X86-AVX1-NEXT: vblendvpd %xmm1, %xmm0, %xmm4, %xmm0
1267 ; X86-AVX1-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1]
1268 ; X86-AVX1-NEXT: vxorpd %xmm2, %xmm0, %xmm3
1269 ; X86-AVX1-NEXT: vxorpd %xmm2, %xmm1, %xmm2
1270 ; X86-AVX1-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
1271 ; X86-AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
1272 ; X86-AVX1-NEXT: vmovd %xmm0, %eax
1273 ; X86-AVX1-NEXT: vpextrd $1, %xmm0, %edx
1274 ; X86-AVX1-NEXT: vzeroupper
1275 ; X86-AVX1-NEXT: retl
1277 ; X86-AVX2-LABEL: test_reduce_v8i64:
1278 ; X86-AVX2: ## %bb.0:
1279 ; X86-AVX2-NEXT: vpbroadcastq {{.*#+}} ymm2 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
1280 ; X86-AVX2-NEXT: vpxor %ymm2, %ymm1, %ymm3
1281 ; X86-AVX2-NEXT: vpxor %ymm2, %ymm0, %ymm4
1282 ; X86-AVX2-NEXT: vpcmpgtq %ymm3, %ymm4, %ymm3
1283 ; X86-AVX2-NEXT: vblendvpd %ymm3, %ymm0, %ymm1, %ymm0
1284 ; X86-AVX2-NEXT: vextractf128 $1, %ymm0, %xmm1
1285 ; X86-AVX2-NEXT: vxorpd %xmm2, %xmm1, %xmm3
1286 ; X86-AVX2-NEXT: vxorpd %xmm2, %xmm0, %xmm4
1287 ; X86-AVX2-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm3
1288 ; X86-AVX2-NEXT: vblendvpd %xmm3, %xmm0, %xmm1, %xmm0
1289 ; X86-AVX2-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1]
1290 ; X86-AVX2-NEXT: vxorpd %xmm2, %xmm0, %xmm3
1291 ; X86-AVX2-NEXT: vxorpd %xmm2, %xmm1, %xmm2
1292 ; X86-AVX2-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
1293 ; X86-AVX2-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
1294 ; X86-AVX2-NEXT: vmovd %xmm0, %eax
1295 ; X86-AVX2-NEXT: vpextrd $1, %xmm0, %edx
1296 ; X86-AVX2-NEXT: vzeroupper
1297 ; X86-AVX2-NEXT: retl
1299 ; X64-SSE2-LABEL: test_reduce_v8i64:
1300 ; X64-SSE2: ## %bb.0:
1301 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
1302 ; X64-SSE2-NEXT: movdqa %xmm2, %xmm5
1303 ; X64-SSE2-NEXT: pxor %xmm4, %xmm5
1304 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm6
1305 ; X64-SSE2-NEXT: pxor %xmm4, %xmm6
1306 ; X64-SSE2-NEXT: movdqa %xmm6, %xmm7
1307 ; X64-SSE2-NEXT: pcmpgtd %xmm5, %xmm7
1308 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm8 = xmm7[0,0,2,2]
1309 ; X64-SSE2-NEXT: pcmpeqd %xmm5, %xmm6
1310 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,1,3,3]
1311 ; X64-SSE2-NEXT: pand %xmm8, %xmm6
1312 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm7[1,1,3,3]
1313 ; X64-SSE2-NEXT: por %xmm6, %xmm5
1314 ; X64-SSE2-NEXT: pand %xmm5, %xmm0
1315 ; X64-SSE2-NEXT: pandn %xmm2, %xmm5
1316 ; X64-SSE2-NEXT: por %xmm0, %xmm5
1317 ; X64-SSE2-NEXT: movdqa %xmm3, %xmm0
1318 ; X64-SSE2-NEXT: pxor %xmm4, %xmm0
1319 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm2
1320 ; X64-SSE2-NEXT: pxor %xmm4, %xmm2
1321 ; X64-SSE2-NEXT: movdqa %xmm2, %xmm6
1322 ; X64-SSE2-NEXT: pcmpgtd %xmm0, %xmm6
1323 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm7 = xmm6[0,0,2,2]
1324 ; X64-SSE2-NEXT: pcmpeqd %xmm0, %xmm2
1325 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3]
1326 ; X64-SSE2-NEXT: pand %xmm7, %xmm0
1327 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm6[1,1,3,3]
1328 ; X64-SSE2-NEXT: por %xmm0, %xmm2
1329 ; X64-SSE2-NEXT: pand %xmm2, %xmm1
1330 ; X64-SSE2-NEXT: pandn %xmm3, %xmm2
1331 ; X64-SSE2-NEXT: por %xmm1, %xmm2
1332 ; X64-SSE2-NEXT: movdqa %xmm2, %xmm0
1333 ; X64-SSE2-NEXT: pxor %xmm4, %xmm0
1334 ; X64-SSE2-NEXT: movdqa %xmm5, %xmm1
1335 ; X64-SSE2-NEXT: pxor %xmm4, %xmm1
1336 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm3
1337 ; X64-SSE2-NEXT: pcmpgtd %xmm0, %xmm3
1338 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm3[0,0,2,2]
1339 ; X64-SSE2-NEXT: pcmpeqd %xmm0, %xmm1
1340 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3]
1341 ; X64-SSE2-NEXT: pand %xmm6, %xmm0
1342 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,1,3,3]
1343 ; X64-SSE2-NEXT: por %xmm0, %xmm1
1344 ; X64-SSE2-NEXT: pand %xmm1, %xmm5
1345 ; X64-SSE2-NEXT: pandn %xmm2, %xmm1
1346 ; X64-SSE2-NEXT: por %xmm5, %xmm1
1347 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,0,1]
1348 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm2
1349 ; X64-SSE2-NEXT: pxor %xmm4, %xmm2
1350 ; X64-SSE2-NEXT: pxor %xmm0, %xmm4
1351 ; X64-SSE2-NEXT: movdqa %xmm2, %xmm3
1352 ; X64-SSE2-NEXT: pcmpgtd %xmm4, %xmm3
1353 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm3[0,0,2,2]
1354 ; X64-SSE2-NEXT: pcmpeqd %xmm2, %xmm4
1355 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm4[1,1,3,3]
1356 ; X64-SSE2-NEXT: pand %xmm5, %xmm2
1357 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
1358 ; X64-SSE2-NEXT: por %xmm2, %xmm3
1359 ; X64-SSE2-NEXT: pand %xmm3, %xmm1
1360 ; X64-SSE2-NEXT: pandn %xmm0, %xmm3
1361 ; X64-SSE2-NEXT: por %xmm1, %xmm3
1362 ; X64-SSE2-NEXT: movq %xmm3, %rax
1363 ; X64-SSE2-NEXT: retq
1365 ; X64-SSE42-LABEL: test_reduce_v8i64:
1366 ; X64-SSE42: ## %bb.0:
1367 ; X64-SSE42-NEXT: movdqa %xmm0, %xmm4
1368 ; X64-SSE42-NEXT: movdqa {{.*#+}} xmm5 = [9223372036854775808,9223372036854775808]
1369 ; X64-SSE42-NEXT: movdqa %xmm2, %xmm6
1370 ; X64-SSE42-NEXT: pxor %xmm5, %xmm6
1371 ; X64-SSE42-NEXT: pxor %xmm5, %xmm0
1372 ; X64-SSE42-NEXT: pcmpgtq %xmm6, %xmm0
1373 ; X64-SSE42-NEXT: blendvpd %xmm0, %xmm4, %xmm2
1374 ; X64-SSE42-NEXT: movdqa %xmm3, %xmm4
1375 ; X64-SSE42-NEXT: pxor %xmm5, %xmm4
1376 ; X64-SSE42-NEXT: movdqa %xmm1, %xmm0
1377 ; X64-SSE42-NEXT: pxor %xmm5, %xmm0
1378 ; X64-SSE42-NEXT: pcmpgtq %xmm4, %xmm0
1379 ; X64-SSE42-NEXT: blendvpd %xmm0, %xmm1, %xmm3
1380 ; X64-SSE42-NEXT: movapd %xmm3, %xmm1
1381 ; X64-SSE42-NEXT: xorpd %xmm5, %xmm1
1382 ; X64-SSE42-NEXT: movapd %xmm2, %xmm0
1383 ; X64-SSE42-NEXT: xorpd %xmm5, %xmm0
1384 ; X64-SSE42-NEXT: pcmpgtq %xmm1, %xmm0
1385 ; X64-SSE42-NEXT: blendvpd %xmm0, %xmm2, %xmm3
1386 ; X64-SSE42-NEXT: pshufd {{.*#+}} xmm1 = xmm3[2,3,0,1]
1387 ; X64-SSE42-NEXT: movdqa %xmm3, %xmm0
1388 ; X64-SSE42-NEXT: pxor %xmm5, %xmm0
1389 ; X64-SSE42-NEXT: pxor %xmm1, %xmm5
1390 ; X64-SSE42-NEXT: pcmpgtq %xmm5, %xmm0
1391 ; X64-SSE42-NEXT: blendvpd %xmm0, %xmm3, %xmm1
1392 ; X64-SSE42-NEXT: movq %xmm1, %rax
1393 ; X64-SSE42-NEXT: retq
1395 ; X64-AVX1-LABEL: test_reduce_v8i64:
1396 ; X64-AVX1: ## %bb.0:
1397 ; X64-AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808]
1398 ; X64-AVX1-NEXT: vpxor %xmm2, %xmm1, %xmm3
1399 ; X64-AVX1-NEXT: vpxor %xmm2, %xmm0, %xmm4
1400 ; X64-AVX1-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm3
1401 ; X64-AVX1-NEXT: vextractf128 $1, %ymm1, %xmm4
1402 ; X64-AVX1-NEXT: vpxor %xmm2, %xmm4, %xmm5
1403 ; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm6
1404 ; X64-AVX1-NEXT: vpxor %xmm2, %xmm6, %xmm7
1405 ; X64-AVX1-NEXT: vpcmpgtq %xmm5, %xmm7, %xmm5
1406 ; X64-AVX1-NEXT: vblendvpd %xmm5, %xmm6, %xmm4, %xmm4
1407 ; X64-AVX1-NEXT: vxorpd %xmm2, %xmm4, %xmm5
1408 ; X64-AVX1-NEXT: vblendvpd %xmm3, %xmm0, %xmm1, %xmm0
1409 ; X64-AVX1-NEXT: vxorpd %xmm2, %xmm0, %xmm1
1410 ; X64-AVX1-NEXT: vpcmpgtq %xmm5, %xmm1, %xmm1
1411 ; X64-AVX1-NEXT: vblendvpd %xmm1, %xmm0, %xmm4, %xmm0
1412 ; X64-AVX1-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1]
1413 ; X64-AVX1-NEXT: vxorpd %xmm2, %xmm0, %xmm3
1414 ; X64-AVX1-NEXT: vxorpd %xmm2, %xmm1, %xmm2
1415 ; X64-AVX1-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
1416 ; X64-AVX1-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
1417 ; X64-AVX1-NEXT: vmovq %xmm0, %rax
1418 ; X64-AVX1-NEXT: vzeroupper
1419 ; X64-AVX1-NEXT: retq
1421 ; X64-AVX2-LABEL: test_reduce_v8i64:
1422 ; X64-AVX2: ## %bb.0:
1423 ; X64-AVX2-NEXT: vpbroadcastq {{.*#+}} ymm2 = [9223372036854775808,9223372036854775808,9223372036854775808,9223372036854775808]
1424 ; X64-AVX2-NEXT: vpxor %ymm2, %ymm1, %ymm3
1425 ; X64-AVX2-NEXT: vpxor %ymm2, %ymm0, %ymm4
1426 ; X64-AVX2-NEXT: vpcmpgtq %ymm3, %ymm4, %ymm3
1427 ; X64-AVX2-NEXT: vblendvpd %ymm3, %ymm0, %ymm1, %ymm0
1428 ; X64-AVX2-NEXT: vextractf128 $1, %ymm0, %xmm1
1429 ; X64-AVX2-NEXT: vxorpd %xmm2, %xmm1, %xmm3
1430 ; X64-AVX2-NEXT: vxorpd %xmm2, %xmm0, %xmm4
1431 ; X64-AVX2-NEXT: vpcmpgtq %xmm3, %xmm4, %xmm3
1432 ; X64-AVX2-NEXT: vblendvpd %xmm3, %xmm0, %xmm1, %xmm0
1433 ; X64-AVX2-NEXT: vpermilps {{.*#+}} xmm1 = xmm0[2,3,0,1]
1434 ; X64-AVX2-NEXT: vxorpd %xmm2, %xmm0, %xmm3
1435 ; X64-AVX2-NEXT: vxorpd %xmm2, %xmm1, %xmm2
1436 ; X64-AVX2-NEXT: vpcmpgtq %xmm2, %xmm3, %xmm2
1437 ; X64-AVX2-NEXT: vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
1438 ; X64-AVX2-NEXT: vmovq %xmm0, %rax
1439 ; X64-AVX2-NEXT: vzeroupper
1440 ; X64-AVX2-NEXT: retq
1442 ; X64-AVX512-LABEL: test_reduce_v8i64:
1443 ; X64-AVX512: ## %bb.0:
1444 ; X64-AVX512-NEXT: vextracti64x4 $1, %zmm0, %ymm1
1445 ; X64-AVX512-NEXT: vpmaxuq %zmm1, %zmm0, %zmm0
1446 ; X64-AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
1447 ; X64-AVX512-NEXT: vpmaxuq %xmm1, %xmm0, %xmm0
1448 ; X64-AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
1449 ; X64-AVX512-NEXT: vpmaxuq %xmm1, %xmm0, %xmm0
1450 ; X64-AVX512-NEXT: vmovq %xmm0, %rax
1451 ; X64-AVX512-NEXT: vzeroupper
1452 ; X64-AVX512-NEXT: retq
1453 %1 = shufflevector <8 x i64> %a0, <8 x i64> undef, <8 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef>
1454 %2 = icmp ugt <8 x i64> %a0, %1
1455 %3 = select <8 x i1> %2, <8 x i64> %a0, <8 x i64> %1
1456 %4 = shufflevector <8 x i64> %3, <8 x i64> undef, <8 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1457 %5 = icmp ugt <8 x i64> %3, %4
1458 %6 = select <8 x i1> %5, <8 x i64> %3, <8 x i64> %4
1459 %7 = shufflevector <8 x i64> %6, <8 x i64> undef, <8 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1460 %8 = icmp ugt <8 x i64> %6, %7
1461 %9 = select <8 x i1> %8, <8 x i64> %6, <8 x i64> %7
1462 %10 = extractelement <8 x i64> %9, i32 0
1466 define i32 @test_reduce_v16i32(<16 x i32> %a0) {
1467 ; X86-SSE2-LABEL: test_reduce_v16i32:
1468 ; X86-SSE2: ## %bb.0:
1469 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
1470 ; X86-SSE2-NEXT: movdqa %xmm2, %xmm6
1471 ; X86-SSE2-NEXT: pxor %xmm4, %xmm6
1472 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm5
1473 ; X86-SSE2-NEXT: pxor %xmm4, %xmm5
1474 ; X86-SSE2-NEXT: pcmpgtd %xmm6, %xmm5
1475 ; X86-SSE2-NEXT: pand %xmm5, %xmm0
1476 ; X86-SSE2-NEXT: pandn %xmm2, %xmm5
1477 ; X86-SSE2-NEXT: por %xmm0, %xmm5
1478 ; X86-SSE2-NEXT: movdqa %xmm3, %xmm0
1479 ; X86-SSE2-NEXT: pxor %xmm4, %xmm0
1480 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm2
1481 ; X86-SSE2-NEXT: pxor %xmm4, %xmm2
1482 ; X86-SSE2-NEXT: pcmpgtd %xmm0, %xmm2
1483 ; X86-SSE2-NEXT: pand %xmm2, %xmm1
1484 ; X86-SSE2-NEXT: pandn %xmm3, %xmm2
1485 ; X86-SSE2-NEXT: por %xmm1, %xmm2
1486 ; X86-SSE2-NEXT: movdqa %xmm2, %xmm0
1487 ; X86-SSE2-NEXT: pxor %xmm4, %xmm0
1488 ; X86-SSE2-NEXT: movdqa %xmm5, %xmm1
1489 ; X86-SSE2-NEXT: pxor %xmm4, %xmm1
1490 ; X86-SSE2-NEXT: pcmpgtd %xmm0, %xmm1
1491 ; X86-SSE2-NEXT: pand %xmm1, %xmm5
1492 ; X86-SSE2-NEXT: pandn %xmm2, %xmm1
1493 ; X86-SSE2-NEXT: por %xmm5, %xmm1
1494 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,0,1]
1495 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm2
1496 ; X86-SSE2-NEXT: pxor %xmm4, %xmm2
1497 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm3
1498 ; X86-SSE2-NEXT: pxor %xmm4, %xmm3
1499 ; X86-SSE2-NEXT: pcmpgtd %xmm3, %xmm2
1500 ; X86-SSE2-NEXT: pand %xmm2, %xmm1
1501 ; X86-SSE2-NEXT: pandn %xmm0, %xmm2
1502 ; X86-SSE2-NEXT: por %xmm1, %xmm2
1503 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,2,3]
1504 ; X86-SSE2-NEXT: movdqa %xmm2, %xmm1
1505 ; X86-SSE2-NEXT: pxor %xmm4, %xmm1
1506 ; X86-SSE2-NEXT: pxor %xmm0, %xmm4
1507 ; X86-SSE2-NEXT: pcmpgtd %xmm4, %xmm1
1508 ; X86-SSE2-NEXT: pand %xmm1, %xmm2
1509 ; X86-SSE2-NEXT: pandn %xmm0, %xmm1
1510 ; X86-SSE2-NEXT: por %xmm2, %xmm1
1511 ; X86-SSE2-NEXT: movd %xmm1, %eax
1512 ; X86-SSE2-NEXT: retl
1514 ; X86-SSE42-LABEL: test_reduce_v16i32:
1515 ; X86-SSE42: ## %bb.0:
1516 ; X86-SSE42-NEXT: pmaxud %xmm3, %xmm1
1517 ; X86-SSE42-NEXT: pmaxud %xmm2, %xmm1
1518 ; X86-SSE42-NEXT: pmaxud %xmm0, %xmm1
1519 ; X86-SSE42-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,0,1]
1520 ; X86-SSE42-NEXT: pmaxud %xmm1, %xmm0
1521 ; X86-SSE42-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
1522 ; X86-SSE42-NEXT: pmaxud %xmm0, %xmm1
1523 ; X86-SSE42-NEXT: movd %xmm1, %eax
1524 ; X86-SSE42-NEXT: retl
1526 ; X86-AVX1-LABEL: test_reduce_v16i32:
1527 ; X86-AVX1: ## %bb.0:
1528 ; X86-AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
1529 ; X86-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1530 ; X86-AVX1-NEXT: vpmaxud %xmm2, %xmm3, %xmm2
1531 ; X86-AVX1-NEXT: vpmaxud %xmm2, %xmm1, %xmm1
1532 ; X86-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1533 ; X86-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
1534 ; X86-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1535 ; X86-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
1536 ; X86-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1537 ; X86-AVX1-NEXT: vmovd %xmm0, %eax
1538 ; X86-AVX1-NEXT: vzeroupper
1539 ; X86-AVX1-NEXT: retl
1541 ; X86-AVX2-LABEL: test_reduce_v16i32:
1542 ; X86-AVX2: ## %bb.0:
1543 ; X86-AVX2-NEXT: vpmaxud %ymm1, %ymm0, %ymm0
1544 ; X86-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1545 ; X86-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1546 ; X86-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
1547 ; X86-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1548 ; X86-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
1549 ; X86-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1550 ; X86-AVX2-NEXT: vmovd %xmm0, %eax
1551 ; X86-AVX2-NEXT: vzeroupper
1552 ; X86-AVX2-NEXT: retl
1554 ; X64-SSE2-LABEL: test_reduce_v16i32:
1555 ; X64-SSE2: ## %bb.0:
1556 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
1557 ; X64-SSE2-NEXT: movdqa %xmm2, %xmm6
1558 ; X64-SSE2-NEXT: pxor %xmm4, %xmm6
1559 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm5
1560 ; X64-SSE2-NEXT: pxor %xmm4, %xmm5
1561 ; X64-SSE2-NEXT: pcmpgtd %xmm6, %xmm5
1562 ; X64-SSE2-NEXT: pand %xmm5, %xmm0
1563 ; X64-SSE2-NEXT: pandn %xmm2, %xmm5
1564 ; X64-SSE2-NEXT: por %xmm0, %xmm5
1565 ; X64-SSE2-NEXT: movdqa %xmm3, %xmm0
1566 ; X64-SSE2-NEXT: pxor %xmm4, %xmm0
1567 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm2
1568 ; X64-SSE2-NEXT: pxor %xmm4, %xmm2
1569 ; X64-SSE2-NEXT: pcmpgtd %xmm0, %xmm2
1570 ; X64-SSE2-NEXT: pand %xmm2, %xmm1
1571 ; X64-SSE2-NEXT: pandn %xmm3, %xmm2
1572 ; X64-SSE2-NEXT: por %xmm1, %xmm2
1573 ; X64-SSE2-NEXT: movdqa %xmm2, %xmm0
1574 ; X64-SSE2-NEXT: pxor %xmm4, %xmm0
1575 ; X64-SSE2-NEXT: movdqa %xmm5, %xmm1
1576 ; X64-SSE2-NEXT: pxor %xmm4, %xmm1
1577 ; X64-SSE2-NEXT: pcmpgtd %xmm0, %xmm1
1578 ; X64-SSE2-NEXT: pand %xmm1, %xmm5
1579 ; X64-SSE2-NEXT: pandn %xmm2, %xmm1
1580 ; X64-SSE2-NEXT: por %xmm5, %xmm1
1581 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,0,1]
1582 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm2
1583 ; X64-SSE2-NEXT: pxor %xmm4, %xmm2
1584 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm3
1585 ; X64-SSE2-NEXT: pxor %xmm4, %xmm3
1586 ; X64-SSE2-NEXT: pcmpgtd %xmm3, %xmm2
1587 ; X64-SSE2-NEXT: pand %xmm2, %xmm1
1588 ; X64-SSE2-NEXT: pandn %xmm0, %xmm2
1589 ; X64-SSE2-NEXT: por %xmm1, %xmm2
1590 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,2,3]
1591 ; X64-SSE2-NEXT: movdqa %xmm2, %xmm1
1592 ; X64-SSE2-NEXT: pxor %xmm4, %xmm1
1593 ; X64-SSE2-NEXT: pxor %xmm0, %xmm4
1594 ; X64-SSE2-NEXT: pcmpgtd %xmm4, %xmm1
1595 ; X64-SSE2-NEXT: pand %xmm1, %xmm2
1596 ; X64-SSE2-NEXT: pandn %xmm0, %xmm1
1597 ; X64-SSE2-NEXT: por %xmm2, %xmm1
1598 ; X64-SSE2-NEXT: movd %xmm1, %eax
1599 ; X64-SSE2-NEXT: retq
1601 ; X64-SSE42-LABEL: test_reduce_v16i32:
1602 ; X64-SSE42: ## %bb.0:
1603 ; X64-SSE42-NEXT: pmaxud %xmm3, %xmm1
1604 ; X64-SSE42-NEXT: pmaxud %xmm2, %xmm1
1605 ; X64-SSE42-NEXT: pmaxud %xmm0, %xmm1
1606 ; X64-SSE42-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,0,1]
1607 ; X64-SSE42-NEXT: pmaxud %xmm1, %xmm0
1608 ; X64-SSE42-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
1609 ; X64-SSE42-NEXT: pmaxud %xmm0, %xmm1
1610 ; X64-SSE42-NEXT: movd %xmm1, %eax
1611 ; X64-SSE42-NEXT: retq
1613 ; X64-AVX1-LABEL: test_reduce_v16i32:
1614 ; X64-AVX1: ## %bb.0:
1615 ; X64-AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
1616 ; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1617 ; X64-AVX1-NEXT: vpmaxud %xmm2, %xmm3, %xmm2
1618 ; X64-AVX1-NEXT: vpmaxud %xmm2, %xmm1, %xmm1
1619 ; X64-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1620 ; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
1621 ; X64-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1622 ; X64-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
1623 ; X64-AVX1-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1624 ; X64-AVX1-NEXT: vmovd %xmm0, %eax
1625 ; X64-AVX1-NEXT: vzeroupper
1626 ; X64-AVX1-NEXT: retq
1628 ; X64-AVX2-LABEL: test_reduce_v16i32:
1629 ; X64-AVX2: ## %bb.0:
1630 ; X64-AVX2-NEXT: vpmaxud %ymm1, %ymm0, %ymm0
1631 ; X64-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1632 ; X64-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1633 ; X64-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
1634 ; X64-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1635 ; X64-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
1636 ; X64-AVX2-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1637 ; X64-AVX2-NEXT: vmovd %xmm0, %eax
1638 ; X64-AVX2-NEXT: vzeroupper
1639 ; X64-AVX2-NEXT: retq
1641 ; X64-AVX512-LABEL: test_reduce_v16i32:
1642 ; X64-AVX512: ## %bb.0:
1643 ; X64-AVX512-NEXT: vextracti64x4 $1, %zmm0, %ymm1
1644 ; X64-AVX512-NEXT: vpmaxud %zmm1, %zmm0, %zmm0
1645 ; X64-AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
1646 ; X64-AVX512-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1647 ; X64-AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
1648 ; X64-AVX512-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1649 ; X64-AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
1650 ; X64-AVX512-NEXT: vpmaxud %xmm1, %xmm0, %xmm0
1651 ; X64-AVX512-NEXT: vmovd %xmm0, %eax
1652 ; X64-AVX512-NEXT: vzeroupper
1653 ; X64-AVX512-NEXT: retq
1654 %1 = shufflevector <16 x i32> %a0, <16 x i32> undef, <16 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1655 %2 = icmp ugt <16 x i32> %a0, %1
1656 %3 = select <16 x i1> %2, <16 x i32> %a0, <16 x i32> %1
1657 %4 = shufflevector <16 x i32> %3, <16 x i32> undef, <16 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1658 %5 = icmp ugt <16 x i32> %3, %4
1659 %6 = select <16 x i1> %5, <16 x i32> %3, <16 x i32> %4
1660 %7 = shufflevector <16 x i32> %6, <16 x i32> undef, <16 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1661 %8 = icmp ugt <16 x i32> %6, %7
1662 %9 = select <16 x i1> %8, <16 x i32> %6, <16 x i32> %7
1663 %10 = shufflevector <16 x i32> %9, <16 x i32> undef, <16 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1664 %11 = icmp ugt <16 x i32> %9, %10
1665 %12 = select <16 x i1> %11, <16 x i32> %9, <16 x i32> %10
1666 %13 = extractelement <16 x i32> %12, i32 0
1670 define i16 @test_reduce_v32i16(<32 x i16> %a0) {
1671 ; X86-SSE2-LABEL: test_reduce_v32i16:
1672 ; X86-SSE2: ## %bb.0:
1673 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [32768,32768,32768,32768,32768,32768,32768,32768]
1674 ; X86-SSE2-NEXT: pxor %xmm4, %xmm3
1675 ; X86-SSE2-NEXT: pxor %xmm4, %xmm1
1676 ; X86-SSE2-NEXT: pmaxsw %xmm3, %xmm1
1677 ; X86-SSE2-NEXT: pxor %xmm4, %xmm2
1678 ; X86-SSE2-NEXT: pmaxsw %xmm1, %xmm2
1679 ; X86-SSE2-NEXT: pxor %xmm4, %xmm0
1680 ; X86-SSE2-NEXT: pmaxsw %xmm2, %xmm0
1681 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
1682 ; X86-SSE2-NEXT: pxor %xmm4, %xmm1
1683 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
1684 ; X86-SSE2-NEXT: pxor %xmm4, %xmm1
1685 ; X86-SSE2-NEXT: pmaxsw %xmm0, %xmm1
1686 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm0
1687 ; X86-SSE2-NEXT: pxor %xmm4, %xmm0
1688 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
1689 ; X86-SSE2-NEXT: pxor %xmm4, %xmm0
1690 ; X86-SSE2-NEXT: pmaxsw %xmm1, %xmm0
1691 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
1692 ; X86-SSE2-NEXT: pxor %xmm4, %xmm1
1693 ; X86-SSE2-NEXT: psrld $16, %xmm1
1694 ; X86-SSE2-NEXT: pxor %xmm4, %xmm1
1695 ; X86-SSE2-NEXT: pmaxsw %xmm0, %xmm1
1696 ; X86-SSE2-NEXT: movd %xmm1, %eax
1697 ; X86-SSE2-NEXT: xorl $32768, %eax ## imm = 0x8000
1698 ; X86-SSE2-NEXT: ## kill: def $ax killed $ax killed $eax
1699 ; X86-SSE2-NEXT: retl
1701 ; X86-SSE42-LABEL: test_reduce_v32i16:
1702 ; X86-SSE42: ## %bb.0:
1703 ; X86-SSE42-NEXT: pmaxuw %xmm3, %xmm1
1704 ; X86-SSE42-NEXT: pmaxuw %xmm2, %xmm1
1705 ; X86-SSE42-NEXT: pmaxuw %xmm0, %xmm1
1706 ; X86-SSE42-NEXT: pcmpeqd %xmm0, %xmm0
1707 ; X86-SSE42-NEXT: pxor %xmm1, %xmm0
1708 ; X86-SSE42-NEXT: phminposuw %xmm0, %xmm0
1709 ; X86-SSE42-NEXT: movd %xmm0, %eax
1710 ; X86-SSE42-NEXT: notl %eax
1711 ; X86-SSE42-NEXT: ## kill: def $ax killed $ax killed $eax
1712 ; X86-SSE42-NEXT: retl
1714 ; X86-AVX1-LABEL: test_reduce_v32i16:
1715 ; X86-AVX1: ## %bb.0:
1716 ; X86-AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
1717 ; X86-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1718 ; X86-AVX1-NEXT: vpmaxuw %xmm2, %xmm3, %xmm2
1719 ; X86-AVX1-NEXT: vpmaxuw %xmm2, %xmm1, %xmm1
1720 ; X86-AVX1-NEXT: vpmaxuw %xmm1, %xmm0, %xmm0
1721 ; X86-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1722 ; X86-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
1723 ; X86-AVX1-NEXT: vphminposuw %xmm0, %xmm0
1724 ; X86-AVX1-NEXT: vmovd %xmm0, %eax
1725 ; X86-AVX1-NEXT: notl %eax
1726 ; X86-AVX1-NEXT: ## kill: def $ax killed $ax killed $eax
1727 ; X86-AVX1-NEXT: vzeroupper
1728 ; X86-AVX1-NEXT: retl
1730 ; X86-AVX2-LABEL: test_reduce_v32i16:
1731 ; X86-AVX2: ## %bb.0:
1732 ; X86-AVX2-NEXT: vpmaxuw %ymm1, %ymm0, %ymm0
1733 ; X86-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1734 ; X86-AVX2-NEXT: vpmaxuw %xmm1, %xmm0, %xmm0
1735 ; X86-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1736 ; X86-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
1737 ; X86-AVX2-NEXT: vphminposuw %xmm0, %xmm0
1738 ; X86-AVX2-NEXT: vmovd %xmm0, %eax
1739 ; X86-AVX2-NEXT: notl %eax
1740 ; X86-AVX2-NEXT: ## kill: def $ax killed $ax killed $eax
1741 ; X86-AVX2-NEXT: vzeroupper
1742 ; X86-AVX2-NEXT: retl
1744 ; X64-SSE2-LABEL: test_reduce_v32i16:
1745 ; X64-SSE2: ## %bb.0:
1746 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [32768,32768,32768,32768,32768,32768,32768,32768]
1747 ; X64-SSE2-NEXT: pxor %xmm4, %xmm3
1748 ; X64-SSE2-NEXT: pxor %xmm4, %xmm1
1749 ; X64-SSE2-NEXT: pmaxsw %xmm3, %xmm1
1750 ; X64-SSE2-NEXT: pxor %xmm4, %xmm2
1751 ; X64-SSE2-NEXT: pmaxsw %xmm1, %xmm2
1752 ; X64-SSE2-NEXT: pxor %xmm4, %xmm0
1753 ; X64-SSE2-NEXT: pmaxsw %xmm2, %xmm0
1754 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
1755 ; X64-SSE2-NEXT: pxor %xmm4, %xmm1
1756 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
1757 ; X64-SSE2-NEXT: pxor %xmm4, %xmm1
1758 ; X64-SSE2-NEXT: pmaxsw %xmm0, %xmm1
1759 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm0
1760 ; X64-SSE2-NEXT: pxor %xmm4, %xmm0
1761 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
1762 ; X64-SSE2-NEXT: pxor %xmm4, %xmm0
1763 ; X64-SSE2-NEXT: pmaxsw %xmm1, %xmm0
1764 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
1765 ; X64-SSE2-NEXT: pxor %xmm4, %xmm1
1766 ; X64-SSE2-NEXT: psrld $16, %xmm1
1767 ; X64-SSE2-NEXT: pxor %xmm4, %xmm1
1768 ; X64-SSE2-NEXT: pmaxsw %xmm0, %xmm1
1769 ; X64-SSE2-NEXT: movd %xmm1, %eax
1770 ; X64-SSE2-NEXT: xorl $32768, %eax ## imm = 0x8000
1771 ; X64-SSE2-NEXT: ## kill: def $ax killed $ax killed $eax
1772 ; X64-SSE2-NEXT: retq
1774 ; X64-SSE42-LABEL: test_reduce_v32i16:
1775 ; X64-SSE42: ## %bb.0:
1776 ; X64-SSE42-NEXT: pmaxuw %xmm3, %xmm1
1777 ; X64-SSE42-NEXT: pmaxuw %xmm2, %xmm1
1778 ; X64-SSE42-NEXT: pmaxuw %xmm0, %xmm1
1779 ; X64-SSE42-NEXT: pcmpeqd %xmm0, %xmm0
1780 ; X64-SSE42-NEXT: pxor %xmm1, %xmm0
1781 ; X64-SSE42-NEXT: phminposuw %xmm0, %xmm0
1782 ; X64-SSE42-NEXT: movd %xmm0, %eax
1783 ; X64-SSE42-NEXT: notl %eax
1784 ; X64-SSE42-NEXT: ## kill: def $ax killed $ax killed $eax
1785 ; X64-SSE42-NEXT: retq
1787 ; X64-AVX1-LABEL: test_reduce_v32i16:
1788 ; X64-AVX1: ## %bb.0:
1789 ; X64-AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
1790 ; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1791 ; X64-AVX1-NEXT: vpmaxuw %xmm2, %xmm3, %xmm2
1792 ; X64-AVX1-NEXT: vpmaxuw %xmm2, %xmm1, %xmm1
1793 ; X64-AVX1-NEXT: vpmaxuw %xmm1, %xmm0, %xmm0
1794 ; X64-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1795 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
1796 ; X64-AVX1-NEXT: vphminposuw %xmm0, %xmm0
1797 ; X64-AVX1-NEXT: vmovd %xmm0, %eax
1798 ; X64-AVX1-NEXT: notl %eax
1799 ; X64-AVX1-NEXT: ## kill: def $ax killed $ax killed $eax
1800 ; X64-AVX1-NEXT: vzeroupper
1801 ; X64-AVX1-NEXT: retq
1803 ; X64-AVX2-LABEL: test_reduce_v32i16:
1804 ; X64-AVX2: ## %bb.0:
1805 ; X64-AVX2-NEXT: vpmaxuw %ymm1, %ymm0, %ymm0
1806 ; X64-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1807 ; X64-AVX2-NEXT: vpmaxuw %xmm1, %xmm0, %xmm0
1808 ; X64-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1809 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
1810 ; X64-AVX2-NEXT: vphminposuw %xmm0, %xmm0
1811 ; X64-AVX2-NEXT: vmovd %xmm0, %eax
1812 ; X64-AVX2-NEXT: notl %eax
1813 ; X64-AVX2-NEXT: ## kill: def $ax killed $ax killed $eax
1814 ; X64-AVX2-NEXT: vzeroupper
1815 ; X64-AVX2-NEXT: retq
1817 ; X64-AVX512-LABEL: test_reduce_v32i16:
1818 ; X64-AVX512: ## %bb.0:
1819 ; X64-AVX512-NEXT: vextracti64x4 $1, %zmm0, %ymm1
1820 ; X64-AVX512-NEXT: vpmaxuw %ymm1, %ymm0, %ymm0
1821 ; X64-AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
1822 ; X64-AVX512-NEXT: vpmaxuw %xmm1, %xmm0, %xmm0
1823 ; X64-AVX512-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
1824 ; X64-AVX512-NEXT: vphminposuw %xmm0, %xmm0
1825 ; X64-AVX512-NEXT: vmovd %xmm0, %eax
1826 ; X64-AVX512-NEXT: notl %eax
1827 ; X64-AVX512-NEXT: ## kill: def $ax killed $ax killed $eax
1828 ; X64-AVX512-NEXT: vzeroupper
1829 ; X64-AVX512-NEXT: retq
1830 %1 = shufflevector <32 x i16> %a0, <32 x i16> undef, <32 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1831 %2 = icmp ugt <32 x i16> %a0, %1
1832 %3 = select <32 x i1> %2, <32 x i16> %a0, <32 x i16> %1
1833 %4 = shufflevector <32 x i16> %3, <32 x i16> undef, <32 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1834 %5 = icmp ugt <32 x i16> %3, %4
1835 %6 = select <32 x i1> %5, <32 x i16> %3, <32 x i16> %4
1836 %7 = shufflevector <32 x i16> %6, <32 x i16> undef, <32 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1837 %8 = icmp ugt <32 x i16> %6, %7
1838 %9 = select <32 x i1> %8, <32 x i16> %6, <32 x i16> %7
1839 %10 = shufflevector <32 x i16> %9, <32 x i16> undef, <32 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1840 %11 = icmp ugt <32 x i16> %9, %10
1841 %12 = select <32 x i1> %11, <32 x i16> %9, <32 x i16> %10
1842 %13 = shufflevector <32 x i16> %12, <32 x i16> undef, <32 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1843 %14 = icmp ugt <32 x i16> %12, %13
1844 %15 = select <32 x i1> %14, <32 x i16> %12, <32 x i16> %13
1845 %16 = extractelement <32 x i16> %15, i32 0
1849 define i8 @test_reduce_v64i8(<64 x i8> %a0) {
1850 ; X86-SSE2-LABEL: test_reduce_v64i8:
1851 ; X86-SSE2: ## %bb.0:
1852 ; X86-SSE2-NEXT: pmaxub %xmm3, %xmm1
1853 ; X86-SSE2-NEXT: pmaxub %xmm2, %xmm1
1854 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
1855 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,0,1]
1856 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
1857 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
1858 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
1859 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm0
1860 ; X86-SSE2-NEXT: psrld $16, %xmm0
1861 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
1862 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
1863 ; X86-SSE2-NEXT: psrlw $8, %xmm1
1864 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
1865 ; X86-SSE2-NEXT: movd %xmm1, %eax
1866 ; X86-SSE2-NEXT: ## kill: def $al killed $al killed $eax
1867 ; X86-SSE2-NEXT: retl
1869 ; X86-SSE42-LABEL: test_reduce_v64i8:
1870 ; X86-SSE42: ## %bb.0:
1871 ; X86-SSE42-NEXT: pmaxub %xmm3, %xmm1
1872 ; X86-SSE42-NEXT: pmaxub %xmm2, %xmm1
1873 ; X86-SSE42-NEXT: pmaxub %xmm0, %xmm1
1874 ; X86-SSE42-NEXT: pcmpeqd %xmm0, %xmm0
1875 ; X86-SSE42-NEXT: pxor %xmm1, %xmm0
1876 ; X86-SSE42-NEXT: movdqa %xmm0, %xmm1
1877 ; X86-SSE42-NEXT: psrlw $8, %xmm1
1878 ; X86-SSE42-NEXT: pminub %xmm0, %xmm1
1879 ; X86-SSE42-NEXT: phminposuw %xmm1, %xmm0
1880 ; X86-SSE42-NEXT: pextrb $0, %xmm0, %eax
1881 ; X86-SSE42-NEXT: notb %al
1882 ; X86-SSE42-NEXT: ## kill: def $al killed $al killed $eax
1883 ; X86-SSE42-NEXT: retl
1885 ; X86-AVX1-LABEL: test_reduce_v64i8:
1886 ; X86-AVX1: ## %bb.0:
1887 ; X86-AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
1888 ; X86-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1889 ; X86-AVX1-NEXT: vpmaxub %xmm2, %xmm3, %xmm2
1890 ; X86-AVX1-NEXT: vpmaxub %xmm2, %xmm1, %xmm1
1891 ; X86-AVX1-NEXT: vpmaxub %xmm1, %xmm0, %xmm0
1892 ; X86-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1893 ; X86-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
1894 ; X86-AVX1-NEXT: vpsrlw $8, %xmm0, %xmm1
1895 ; X86-AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm0
1896 ; X86-AVX1-NEXT: vphminposuw %xmm0, %xmm0
1897 ; X86-AVX1-NEXT: vpextrb $0, %xmm0, %eax
1898 ; X86-AVX1-NEXT: notb %al
1899 ; X86-AVX1-NEXT: ## kill: def $al killed $al killed $eax
1900 ; X86-AVX1-NEXT: vzeroupper
1901 ; X86-AVX1-NEXT: retl
1903 ; X86-AVX2-LABEL: test_reduce_v64i8:
1904 ; X86-AVX2: ## %bb.0:
1905 ; X86-AVX2-NEXT: vpmaxub %ymm1, %ymm0, %ymm0
1906 ; X86-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1907 ; X86-AVX2-NEXT: vpmaxub %xmm1, %xmm0, %xmm0
1908 ; X86-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1909 ; X86-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
1910 ; X86-AVX2-NEXT: vpsrlw $8, %xmm0, %xmm1
1911 ; X86-AVX2-NEXT: vpminub %xmm1, %xmm0, %xmm0
1912 ; X86-AVX2-NEXT: vphminposuw %xmm0, %xmm0
1913 ; X86-AVX2-NEXT: vpextrb $0, %xmm0, %eax
1914 ; X86-AVX2-NEXT: notb %al
1915 ; X86-AVX2-NEXT: ## kill: def $al killed $al killed $eax
1916 ; X86-AVX2-NEXT: vzeroupper
1917 ; X86-AVX2-NEXT: retl
1919 ; X64-SSE2-LABEL: test_reduce_v64i8:
1920 ; X64-SSE2: ## %bb.0:
1921 ; X64-SSE2-NEXT: pmaxub %xmm3, %xmm1
1922 ; X64-SSE2-NEXT: pmaxub %xmm2, %xmm1
1923 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
1924 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,0,1]
1925 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
1926 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,2,3]
1927 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
1928 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm0
1929 ; X64-SSE2-NEXT: psrld $16, %xmm0
1930 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
1931 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
1932 ; X64-SSE2-NEXT: psrlw $8, %xmm1
1933 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
1934 ; X64-SSE2-NEXT: movd %xmm1, %eax
1935 ; X64-SSE2-NEXT: ## kill: def $al killed $al killed $eax
1936 ; X64-SSE2-NEXT: retq
1938 ; X64-SSE42-LABEL: test_reduce_v64i8:
1939 ; X64-SSE42: ## %bb.0:
1940 ; X64-SSE42-NEXT: pmaxub %xmm3, %xmm1
1941 ; X64-SSE42-NEXT: pmaxub %xmm2, %xmm1
1942 ; X64-SSE42-NEXT: pmaxub %xmm0, %xmm1
1943 ; X64-SSE42-NEXT: pcmpeqd %xmm0, %xmm0
1944 ; X64-SSE42-NEXT: pxor %xmm1, %xmm0
1945 ; X64-SSE42-NEXT: movdqa %xmm0, %xmm1
1946 ; X64-SSE42-NEXT: psrlw $8, %xmm1
1947 ; X64-SSE42-NEXT: pminub %xmm0, %xmm1
1948 ; X64-SSE42-NEXT: phminposuw %xmm1, %xmm0
1949 ; X64-SSE42-NEXT: pextrb $0, %xmm0, %eax
1950 ; X64-SSE42-NEXT: notb %al
1951 ; X64-SSE42-NEXT: ## kill: def $al killed $al killed $eax
1952 ; X64-SSE42-NEXT: retq
1954 ; X64-AVX1-LABEL: test_reduce_v64i8:
1955 ; X64-AVX1: ## %bb.0:
1956 ; X64-AVX1-NEXT: vextractf128 $1, %ymm1, %xmm2
1957 ; X64-AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1958 ; X64-AVX1-NEXT: vpmaxub %xmm2, %xmm3, %xmm2
1959 ; X64-AVX1-NEXT: vpmaxub %xmm2, %xmm1, %xmm1
1960 ; X64-AVX1-NEXT: vpmaxub %xmm1, %xmm0, %xmm0
1961 ; X64-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1962 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
1963 ; X64-AVX1-NEXT: vpsrlw $8, %xmm0, %xmm1
1964 ; X64-AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm0
1965 ; X64-AVX1-NEXT: vphminposuw %xmm0, %xmm0
1966 ; X64-AVX1-NEXT: vpextrb $0, %xmm0, %eax
1967 ; X64-AVX1-NEXT: notb %al
1968 ; X64-AVX1-NEXT: ## kill: def $al killed $al killed $eax
1969 ; X64-AVX1-NEXT: vzeroupper
1970 ; X64-AVX1-NEXT: retq
1972 ; X64-AVX2-LABEL: test_reduce_v64i8:
1973 ; X64-AVX2: ## %bb.0:
1974 ; X64-AVX2-NEXT: vpmaxub %ymm1, %ymm0, %ymm0
1975 ; X64-AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
1976 ; X64-AVX2-NEXT: vpmaxub %xmm1, %xmm0, %xmm0
1977 ; X64-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
1978 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
1979 ; X64-AVX2-NEXT: vpsrlw $8, %xmm0, %xmm1
1980 ; X64-AVX2-NEXT: vpminub %xmm1, %xmm0, %xmm0
1981 ; X64-AVX2-NEXT: vphminposuw %xmm0, %xmm0
1982 ; X64-AVX2-NEXT: vpextrb $0, %xmm0, %eax
1983 ; X64-AVX2-NEXT: notb %al
1984 ; X64-AVX2-NEXT: ## kill: def $al killed $al killed $eax
1985 ; X64-AVX2-NEXT: vzeroupper
1986 ; X64-AVX2-NEXT: retq
1988 ; X64-AVX512-LABEL: test_reduce_v64i8:
1989 ; X64-AVX512: ## %bb.0:
1990 ; X64-AVX512-NEXT: vextracti64x4 $1, %zmm0, %ymm1
1991 ; X64-AVX512-NEXT: vpmaxub %ymm1, %ymm0, %ymm0
1992 ; X64-AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
1993 ; X64-AVX512-NEXT: vpmaxub %xmm1, %xmm0, %xmm0
1994 ; X64-AVX512-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
1995 ; X64-AVX512-NEXT: vpsrlw $8, %xmm0, %xmm1
1996 ; X64-AVX512-NEXT: vpminub %xmm1, %xmm0, %xmm0
1997 ; X64-AVX512-NEXT: vphminposuw %xmm0, %xmm0
1998 ; X64-AVX512-NEXT: vpextrb $0, %xmm0, %eax
1999 ; X64-AVX512-NEXT: notb %al
2000 ; X64-AVX512-NEXT: ## kill: def $al killed $al killed $eax
2001 ; X64-AVX512-NEXT: vzeroupper
2002 ; X64-AVX512-NEXT: retq
2003 %1 = shufflevector <64 x i8> %a0, <64 x i8> undef, <64 x i32> <i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2004 %2 = icmp ugt <64 x i8> %a0, %1
2005 %3 = select <64 x i1> %2, <64 x i8> %a0, <64 x i8> %1
2006 %4 = shufflevector <64 x i8> %3, <64 x i8> undef, <64 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2007 %5 = icmp ugt <64 x i8> %3, %4
2008 %6 = select <64 x i1> %5, <64 x i8> %3, <64 x i8> %4
2009 %7 = shufflevector <64 x i8> %6, <64 x i8> undef, <64 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2010 %8 = icmp ugt <64 x i8> %6, %7
2011 %9 = select <64 x i1> %8, <64 x i8> %6, <64 x i8> %7
2012 %10 = shufflevector <64 x i8> %9, <64 x i8> undef, <64 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2013 %11 = icmp ugt <64 x i8> %9, %10
2014 %12 = select <64 x i1> %11, <64 x i8> %9, <64 x i8> %10
2015 %13 = shufflevector <64 x i8> %12, <64 x i8> undef, <64 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2016 %14 = icmp ugt <64 x i8> %12, %13
2017 %15 = select <64 x i1> %14, <64 x i8> %12, <64 x i8> %13
2018 %16 = shufflevector <64 x i8> %15, <64 x i8> undef, <64 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2019 %17 = icmp ugt <64 x i8> %15, %16
2020 %18 = select <64 x i1> %17, <64 x i8> %15, <64 x i8> %16
2021 %19 = extractelement <64 x i8> %18, i32 0
2026 ; Partial Vector Reductions
2029 define i16 @test_reduce_v16i16_v8i16(<16 x i16> %a0) {
2030 ; X86-SSE2-LABEL: test_reduce_v16i16_v8i16:
2031 ; X86-SSE2: ## %bb.0:
2032 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
2033 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [32768,32768,32768,32768,32768,32768,32768,32768]
2034 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
2035 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
2036 ; X86-SSE2-NEXT: pmaxsw %xmm0, %xmm1
2037 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm0
2038 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
2039 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
2040 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
2041 ; X86-SSE2-NEXT: pmaxsw %xmm1, %xmm0
2042 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
2043 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
2044 ; X86-SSE2-NEXT: psrld $16, %xmm1
2045 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
2046 ; X86-SSE2-NEXT: pmaxsw %xmm0, %xmm1
2047 ; X86-SSE2-NEXT: movd %xmm1, %eax
2048 ; X86-SSE2-NEXT: xorl $32768, %eax ## imm = 0x8000
2049 ; X86-SSE2-NEXT: ## kill: def $ax killed $ax killed $eax
2050 ; X86-SSE2-NEXT: retl
2052 ; X86-SSE42-LABEL: test_reduce_v16i16_v8i16:
2053 ; X86-SSE42: ## %bb.0:
2054 ; X86-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
2055 ; X86-SSE42-NEXT: pxor %xmm0, %xmm1
2056 ; X86-SSE42-NEXT: phminposuw %xmm1, %xmm0
2057 ; X86-SSE42-NEXT: movd %xmm0, %eax
2058 ; X86-SSE42-NEXT: notl %eax
2059 ; X86-SSE42-NEXT: ## kill: def $ax killed $ax killed $eax
2060 ; X86-SSE42-NEXT: retl
2062 ; X86-AVX-LABEL: test_reduce_v16i16_v8i16:
2063 ; X86-AVX: ## %bb.0:
2064 ; X86-AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2065 ; X86-AVX-NEXT: vpxor %xmm1, %xmm0, %xmm0
2066 ; X86-AVX-NEXT: vphminposuw %xmm0, %xmm0
2067 ; X86-AVX-NEXT: vmovd %xmm0, %eax
2068 ; X86-AVX-NEXT: notl %eax
2069 ; X86-AVX-NEXT: ## kill: def $ax killed $ax killed $eax
2070 ; X86-AVX-NEXT: vzeroupper
2071 ; X86-AVX-NEXT: retl
2073 ; X64-SSE2-LABEL: test_reduce_v16i16_v8i16:
2074 ; X64-SSE2: ## %bb.0:
2075 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
2076 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [32768,32768,32768,32768,32768,32768,32768,32768]
2077 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
2078 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
2079 ; X64-SSE2-NEXT: pmaxsw %xmm0, %xmm1
2080 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm0
2081 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
2082 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
2083 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
2084 ; X64-SSE2-NEXT: pmaxsw %xmm1, %xmm0
2085 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
2086 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
2087 ; X64-SSE2-NEXT: psrld $16, %xmm1
2088 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
2089 ; X64-SSE2-NEXT: pmaxsw %xmm0, %xmm1
2090 ; X64-SSE2-NEXT: movd %xmm1, %eax
2091 ; X64-SSE2-NEXT: xorl $32768, %eax ## imm = 0x8000
2092 ; X64-SSE2-NEXT: ## kill: def $ax killed $ax killed $eax
2093 ; X64-SSE2-NEXT: retq
2095 ; X64-SSE42-LABEL: test_reduce_v16i16_v8i16:
2096 ; X64-SSE42: ## %bb.0:
2097 ; X64-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
2098 ; X64-SSE42-NEXT: pxor %xmm0, %xmm1
2099 ; X64-SSE42-NEXT: phminposuw %xmm1, %xmm0
2100 ; X64-SSE42-NEXT: movd %xmm0, %eax
2101 ; X64-SSE42-NEXT: notl %eax
2102 ; X64-SSE42-NEXT: ## kill: def $ax killed $ax killed $eax
2103 ; X64-SSE42-NEXT: retq
2105 ; X64-AVX1-LABEL: test_reduce_v16i16_v8i16:
2106 ; X64-AVX1: ## %bb.0:
2107 ; X64-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2108 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
2109 ; X64-AVX1-NEXT: vphminposuw %xmm0, %xmm0
2110 ; X64-AVX1-NEXT: vmovd %xmm0, %eax
2111 ; X64-AVX1-NEXT: notl %eax
2112 ; X64-AVX1-NEXT: ## kill: def $ax killed $ax killed $eax
2113 ; X64-AVX1-NEXT: vzeroupper
2114 ; X64-AVX1-NEXT: retq
2116 ; X64-AVX2-LABEL: test_reduce_v16i16_v8i16:
2117 ; X64-AVX2: ## %bb.0:
2118 ; X64-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2119 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
2120 ; X64-AVX2-NEXT: vphminposuw %xmm0, %xmm0
2121 ; X64-AVX2-NEXT: vmovd %xmm0, %eax
2122 ; X64-AVX2-NEXT: notl %eax
2123 ; X64-AVX2-NEXT: ## kill: def $ax killed $ax killed $eax
2124 ; X64-AVX2-NEXT: vzeroupper
2125 ; X64-AVX2-NEXT: retq
2127 ; X64-AVX512-LABEL: test_reduce_v16i16_v8i16:
2128 ; X64-AVX512: ## %bb.0:
2129 ; X64-AVX512-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
2130 ; X64-AVX512-NEXT: vphminposuw %xmm0, %xmm0
2131 ; X64-AVX512-NEXT: vmovd %xmm0, %eax
2132 ; X64-AVX512-NEXT: notl %eax
2133 ; X64-AVX512-NEXT: ## kill: def $ax killed $ax killed $eax
2134 ; X64-AVX512-NEXT: vzeroupper
2135 ; X64-AVX512-NEXT: retq
2136 %1 = shufflevector <16 x i16> %a0, <16 x i16> undef, <16 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2137 %2 = icmp ugt <16 x i16> %a0, %1
2138 %3 = select <16 x i1> %2, <16 x i16> %a0, <16 x i16> %1
2139 %4 = shufflevector <16 x i16> %3, <16 x i16> undef, <16 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2140 %5 = icmp ugt <16 x i16> %3, %4
2141 %6 = select <16 x i1> %5, <16 x i16> %3, <16 x i16> %4
2142 %7 = shufflevector <16 x i16> %6, <16 x i16> undef, <16 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2143 %8 = icmp ugt <16 x i16> %6, %7
2144 %9 = select <16 x i1> %8, <16 x i16> %6, <16 x i16> %7
2145 %10 = extractelement <16 x i16> %9, i32 0
2149 define i16 @test_reduce_v32i16_v8i16(<32 x i16> %a0) {
2150 ; X86-SSE2-LABEL: test_reduce_v32i16_v8i16:
2151 ; X86-SSE2: ## %bb.0:
2152 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
2153 ; X86-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [32768,32768,32768,32768,32768,32768,32768,32768]
2154 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
2155 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
2156 ; X86-SSE2-NEXT: pmaxsw %xmm0, %xmm1
2157 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm0
2158 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
2159 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
2160 ; X86-SSE2-NEXT: pxor %xmm2, %xmm0
2161 ; X86-SSE2-NEXT: pmaxsw %xmm1, %xmm0
2162 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
2163 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
2164 ; X86-SSE2-NEXT: psrld $16, %xmm1
2165 ; X86-SSE2-NEXT: pxor %xmm2, %xmm1
2166 ; X86-SSE2-NEXT: pmaxsw %xmm0, %xmm1
2167 ; X86-SSE2-NEXT: movd %xmm1, %eax
2168 ; X86-SSE2-NEXT: xorl $32768, %eax ## imm = 0x8000
2169 ; X86-SSE2-NEXT: ## kill: def $ax killed $ax killed $eax
2170 ; X86-SSE2-NEXT: retl
2172 ; X86-SSE42-LABEL: test_reduce_v32i16_v8i16:
2173 ; X86-SSE42: ## %bb.0:
2174 ; X86-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
2175 ; X86-SSE42-NEXT: pxor %xmm0, %xmm1
2176 ; X86-SSE42-NEXT: phminposuw %xmm1, %xmm0
2177 ; X86-SSE42-NEXT: movd %xmm0, %eax
2178 ; X86-SSE42-NEXT: notl %eax
2179 ; X86-SSE42-NEXT: ## kill: def $ax killed $ax killed $eax
2180 ; X86-SSE42-NEXT: retl
2182 ; X86-AVX-LABEL: test_reduce_v32i16_v8i16:
2183 ; X86-AVX: ## %bb.0:
2184 ; X86-AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2185 ; X86-AVX-NEXT: vpxor %xmm1, %xmm0, %xmm0
2186 ; X86-AVX-NEXT: vphminposuw %xmm0, %xmm0
2187 ; X86-AVX-NEXT: vmovd %xmm0, %eax
2188 ; X86-AVX-NEXT: notl %eax
2189 ; X86-AVX-NEXT: ## kill: def $ax killed $ax killed $eax
2190 ; X86-AVX-NEXT: vzeroupper
2191 ; X86-AVX-NEXT: retl
2193 ; X64-SSE2-LABEL: test_reduce_v32i16_v8i16:
2194 ; X64-SSE2: ## %bb.0:
2195 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
2196 ; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [32768,32768,32768,32768,32768,32768,32768,32768]
2197 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
2198 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
2199 ; X64-SSE2-NEXT: pmaxsw %xmm0, %xmm1
2200 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm0
2201 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
2202 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
2203 ; X64-SSE2-NEXT: pxor %xmm2, %xmm0
2204 ; X64-SSE2-NEXT: pmaxsw %xmm1, %xmm0
2205 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
2206 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
2207 ; X64-SSE2-NEXT: psrld $16, %xmm1
2208 ; X64-SSE2-NEXT: pxor %xmm2, %xmm1
2209 ; X64-SSE2-NEXT: pmaxsw %xmm0, %xmm1
2210 ; X64-SSE2-NEXT: movd %xmm1, %eax
2211 ; X64-SSE2-NEXT: xorl $32768, %eax ## imm = 0x8000
2212 ; X64-SSE2-NEXT: ## kill: def $ax killed $ax killed $eax
2213 ; X64-SSE2-NEXT: retq
2215 ; X64-SSE42-LABEL: test_reduce_v32i16_v8i16:
2216 ; X64-SSE42: ## %bb.0:
2217 ; X64-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
2218 ; X64-SSE42-NEXT: pxor %xmm0, %xmm1
2219 ; X64-SSE42-NEXT: phminposuw %xmm1, %xmm0
2220 ; X64-SSE42-NEXT: movd %xmm0, %eax
2221 ; X64-SSE42-NEXT: notl %eax
2222 ; X64-SSE42-NEXT: ## kill: def $ax killed $ax killed $eax
2223 ; X64-SSE42-NEXT: retq
2225 ; X64-AVX1-LABEL: test_reduce_v32i16_v8i16:
2226 ; X64-AVX1: ## %bb.0:
2227 ; X64-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2228 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
2229 ; X64-AVX1-NEXT: vphminposuw %xmm0, %xmm0
2230 ; X64-AVX1-NEXT: vmovd %xmm0, %eax
2231 ; X64-AVX1-NEXT: notl %eax
2232 ; X64-AVX1-NEXT: ## kill: def $ax killed $ax killed $eax
2233 ; X64-AVX1-NEXT: vzeroupper
2234 ; X64-AVX1-NEXT: retq
2236 ; X64-AVX2-LABEL: test_reduce_v32i16_v8i16:
2237 ; X64-AVX2: ## %bb.0:
2238 ; X64-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2239 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
2240 ; X64-AVX2-NEXT: vphminposuw %xmm0, %xmm0
2241 ; X64-AVX2-NEXT: vmovd %xmm0, %eax
2242 ; X64-AVX2-NEXT: notl %eax
2243 ; X64-AVX2-NEXT: ## kill: def $ax killed $ax killed $eax
2244 ; X64-AVX2-NEXT: vzeroupper
2245 ; X64-AVX2-NEXT: retq
2247 ; X64-AVX512-LABEL: test_reduce_v32i16_v8i16:
2248 ; X64-AVX512: ## %bb.0:
2249 ; X64-AVX512-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
2250 ; X64-AVX512-NEXT: vphminposuw %xmm0, %xmm0
2251 ; X64-AVX512-NEXT: vmovd %xmm0, %eax
2252 ; X64-AVX512-NEXT: notl %eax
2253 ; X64-AVX512-NEXT: ## kill: def $ax killed $ax killed $eax
2254 ; X64-AVX512-NEXT: vzeroupper
2255 ; X64-AVX512-NEXT: retq
2256 %1 = shufflevector <32 x i16> %a0, <32 x i16> undef, <32 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2257 %2 = icmp ugt <32 x i16> %a0, %1
2258 %3 = select <32 x i1> %2, <32 x i16> %a0, <32 x i16> %1
2259 %4 = shufflevector <32 x i16> %3, <32 x i16> undef, <32 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2260 %5 = icmp ugt <32 x i16> %3, %4
2261 %6 = select <32 x i1> %5, <32 x i16> %3, <32 x i16> %4
2262 %7 = shufflevector <32 x i16> %6, <32 x i16> undef, <32 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2263 %8 = icmp ugt <32 x i16> %6, %7
2264 %9 = select <32 x i1> %8, <32 x i16> %6, <32 x i16> %7
2265 %10 = extractelement <32 x i16> %9, i32 0
2269 define i8 @test_reduce_v32i8_v16i8(<32 x i8> %a0) {
2270 ; X86-SSE2-LABEL: test_reduce_v32i8_v16i8:
2271 ; X86-SSE2: ## %bb.0:
2272 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
2273 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
2274 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
2275 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
2276 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
2277 ; X86-SSE2-NEXT: psrld $16, %xmm1
2278 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
2279 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm0
2280 ; X86-SSE2-NEXT: psrlw $8, %xmm0
2281 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
2282 ; X86-SSE2-NEXT: movd %xmm0, %eax
2283 ; X86-SSE2-NEXT: ## kill: def $al killed $al killed $eax
2284 ; X86-SSE2-NEXT: retl
2286 ; X86-SSE42-LABEL: test_reduce_v32i8_v16i8:
2287 ; X86-SSE42: ## %bb.0:
2288 ; X86-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
2289 ; X86-SSE42-NEXT: pxor %xmm0, %xmm1
2290 ; X86-SSE42-NEXT: movdqa %xmm1, %xmm0
2291 ; X86-SSE42-NEXT: psrlw $8, %xmm0
2292 ; X86-SSE42-NEXT: pminub %xmm1, %xmm0
2293 ; X86-SSE42-NEXT: phminposuw %xmm0, %xmm0
2294 ; X86-SSE42-NEXT: pextrb $0, %xmm0, %eax
2295 ; X86-SSE42-NEXT: notb %al
2296 ; X86-SSE42-NEXT: ## kill: def $al killed $al killed $eax
2297 ; X86-SSE42-NEXT: retl
2299 ; X86-AVX-LABEL: test_reduce_v32i8_v16i8:
2300 ; X86-AVX: ## %bb.0:
2301 ; X86-AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2302 ; X86-AVX-NEXT: vpxor %xmm1, %xmm0, %xmm0
2303 ; X86-AVX-NEXT: vpsrlw $8, %xmm0, %xmm1
2304 ; X86-AVX-NEXT: vpminub %xmm1, %xmm0, %xmm0
2305 ; X86-AVX-NEXT: vphminposuw %xmm0, %xmm0
2306 ; X86-AVX-NEXT: vpextrb $0, %xmm0, %eax
2307 ; X86-AVX-NEXT: notb %al
2308 ; X86-AVX-NEXT: ## kill: def $al killed $al killed $eax
2309 ; X86-AVX-NEXT: vzeroupper
2310 ; X86-AVX-NEXT: retl
2312 ; X64-SSE2-LABEL: test_reduce_v32i8_v16i8:
2313 ; X64-SSE2: ## %bb.0:
2314 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
2315 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
2316 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
2317 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
2318 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
2319 ; X64-SSE2-NEXT: psrld $16, %xmm1
2320 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
2321 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm0
2322 ; X64-SSE2-NEXT: psrlw $8, %xmm0
2323 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
2324 ; X64-SSE2-NEXT: movd %xmm0, %eax
2325 ; X64-SSE2-NEXT: ## kill: def $al killed $al killed $eax
2326 ; X64-SSE2-NEXT: retq
2328 ; X64-SSE42-LABEL: test_reduce_v32i8_v16i8:
2329 ; X64-SSE42: ## %bb.0:
2330 ; X64-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
2331 ; X64-SSE42-NEXT: pxor %xmm0, %xmm1
2332 ; X64-SSE42-NEXT: movdqa %xmm1, %xmm0
2333 ; X64-SSE42-NEXT: psrlw $8, %xmm0
2334 ; X64-SSE42-NEXT: pminub %xmm1, %xmm0
2335 ; X64-SSE42-NEXT: phminposuw %xmm0, %xmm0
2336 ; X64-SSE42-NEXT: pextrb $0, %xmm0, %eax
2337 ; X64-SSE42-NEXT: notb %al
2338 ; X64-SSE42-NEXT: ## kill: def $al killed $al killed $eax
2339 ; X64-SSE42-NEXT: retq
2341 ; X64-AVX1-LABEL: test_reduce_v32i8_v16i8:
2342 ; X64-AVX1: ## %bb.0:
2343 ; X64-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2344 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
2345 ; X64-AVX1-NEXT: vpsrlw $8, %xmm0, %xmm1
2346 ; X64-AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm0
2347 ; X64-AVX1-NEXT: vphminposuw %xmm0, %xmm0
2348 ; X64-AVX1-NEXT: vpextrb $0, %xmm0, %eax
2349 ; X64-AVX1-NEXT: notb %al
2350 ; X64-AVX1-NEXT: ## kill: def $al killed $al killed $eax
2351 ; X64-AVX1-NEXT: vzeroupper
2352 ; X64-AVX1-NEXT: retq
2354 ; X64-AVX2-LABEL: test_reduce_v32i8_v16i8:
2355 ; X64-AVX2: ## %bb.0:
2356 ; X64-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2357 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
2358 ; X64-AVX2-NEXT: vpsrlw $8, %xmm0, %xmm1
2359 ; X64-AVX2-NEXT: vpminub %xmm1, %xmm0, %xmm0
2360 ; X64-AVX2-NEXT: vphminposuw %xmm0, %xmm0
2361 ; X64-AVX2-NEXT: vpextrb $0, %xmm0, %eax
2362 ; X64-AVX2-NEXT: notb %al
2363 ; X64-AVX2-NEXT: ## kill: def $al killed $al killed $eax
2364 ; X64-AVX2-NEXT: vzeroupper
2365 ; X64-AVX2-NEXT: retq
2367 ; X64-AVX512-LABEL: test_reduce_v32i8_v16i8:
2368 ; X64-AVX512: ## %bb.0:
2369 ; X64-AVX512-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
2370 ; X64-AVX512-NEXT: vpsrlw $8, %xmm0, %xmm1
2371 ; X64-AVX512-NEXT: vpminub %xmm1, %xmm0, %xmm0
2372 ; X64-AVX512-NEXT: vphminposuw %xmm0, %xmm0
2373 ; X64-AVX512-NEXT: vpextrb $0, %xmm0, %eax
2374 ; X64-AVX512-NEXT: notb %al
2375 ; X64-AVX512-NEXT: ## kill: def $al killed $al killed $eax
2376 ; X64-AVX512-NEXT: vzeroupper
2377 ; X64-AVX512-NEXT: retq
2378 %1 = shufflevector <32 x i8> %a0, <32 x i8> undef, <32 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2379 %2 = icmp ugt <32 x i8> %a0, %1
2380 %3 = select <32 x i1> %2, <32 x i8> %a0, <32 x i8> %1
2381 %4 = shufflevector <32 x i8> %3, <32 x i8> undef, <32 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2382 %5 = icmp ugt <32 x i8> %3, %4
2383 %6 = select <32 x i1> %5, <32 x i8> %3, <32 x i8> %4
2384 %7 = shufflevector <32 x i8> %6, <32 x i8> undef, <32 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2385 %8 = icmp ugt <32 x i8> %6, %7
2386 %9 = select <32 x i1> %8, <32 x i8> %6, <32 x i8> %7
2387 %10 = shufflevector <32 x i8> %9, <32 x i8> undef, <32 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2388 %11 = icmp ugt <32 x i8> %9, %10
2389 %12 = select <32 x i1> %11, <32 x i8> %9, <32 x i8> %10
2390 %13 = extractelement <32 x i8> %12, i32 0
2394 define i8 @test_reduce_v64i8_v16i8(<64 x i8> %a0) {
2395 ; X86-SSE2-LABEL: test_reduce_v64i8_v16i8:
2396 ; X86-SSE2: ## %bb.0:
2397 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
2398 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
2399 ; X86-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
2400 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
2401 ; X86-SSE2-NEXT: movdqa %xmm0, %xmm1
2402 ; X86-SSE2-NEXT: psrld $16, %xmm1
2403 ; X86-SSE2-NEXT: pmaxub %xmm0, %xmm1
2404 ; X86-SSE2-NEXT: movdqa %xmm1, %xmm0
2405 ; X86-SSE2-NEXT: psrlw $8, %xmm0
2406 ; X86-SSE2-NEXT: pmaxub %xmm1, %xmm0
2407 ; X86-SSE2-NEXT: movd %xmm0, %eax
2408 ; X86-SSE2-NEXT: ## kill: def $al killed $al killed $eax
2409 ; X86-SSE2-NEXT: retl
2411 ; X86-SSE42-LABEL: test_reduce_v64i8_v16i8:
2412 ; X86-SSE42: ## %bb.0:
2413 ; X86-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
2414 ; X86-SSE42-NEXT: pxor %xmm0, %xmm1
2415 ; X86-SSE42-NEXT: movdqa %xmm1, %xmm0
2416 ; X86-SSE42-NEXT: psrlw $8, %xmm0
2417 ; X86-SSE42-NEXT: pminub %xmm1, %xmm0
2418 ; X86-SSE42-NEXT: phminposuw %xmm0, %xmm0
2419 ; X86-SSE42-NEXT: pextrb $0, %xmm0, %eax
2420 ; X86-SSE42-NEXT: notb %al
2421 ; X86-SSE42-NEXT: ## kill: def $al killed $al killed $eax
2422 ; X86-SSE42-NEXT: retl
2424 ; X86-AVX-LABEL: test_reduce_v64i8_v16i8:
2425 ; X86-AVX: ## %bb.0:
2426 ; X86-AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2427 ; X86-AVX-NEXT: vpxor %xmm1, %xmm0, %xmm0
2428 ; X86-AVX-NEXT: vpsrlw $8, %xmm0, %xmm1
2429 ; X86-AVX-NEXT: vpminub %xmm1, %xmm0, %xmm0
2430 ; X86-AVX-NEXT: vphminposuw %xmm0, %xmm0
2431 ; X86-AVX-NEXT: vpextrb $0, %xmm0, %eax
2432 ; X86-AVX-NEXT: notb %al
2433 ; X86-AVX-NEXT: ## kill: def $al killed $al killed $eax
2434 ; X86-AVX-NEXT: vzeroupper
2435 ; X86-AVX-NEXT: retl
2437 ; X64-SSE2-LABEL: test_reduce_v64i8_v16i8:
2438 ; X64-SSE2: ## %bb.0:
2439 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
2440 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
2441 ; X64-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
2442 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
2443 ; X64-SSE2-NEXT: movdqa %xmm0, %xmm1
2444 ; X64-SSE2-NEXT: psrld $16, %xmm1
2445 ; X64-SSE2-NEXT: pmaxub %xmm0, %xmm1
2446 ; X64-SSE2-NEXT: movdqa %xmm1, %xmm0
2447 ; X64-SSE2-NEXT: psrlw $8, %xmm0
2448 ; X64-SSE2-NEXT: pmaxub %xmm1, %xmm0
2449 ; X64-SSE2-NEXT: movd %xmm0, %eax
2450 ; X64-SSE2-NEXT: ## kill: def $al killed $al killed $eax
2451 ; X64-SSE2-NEXT: retq
2453 ; X64-SSE42-LABEL: test_reduce_v64i8_v16i8:
2454 ; X64-SSE42: ## %bb.0:
2455 ; X64-SSE42-NEXT: pcmpeqd %xmm1, %xmm1
2456 ; X64-SSE42-NEXT: pxor %xmm0, %xmm1
2457 ; X64-SSE42-NEXT: movdqa %xmm1, %xmm0
2458 ; X64-SSE42-NEXT: psrlw $8, %xmm0
2459 ; X64-SSE42-NEXT: pminub %xmm1, %xmm0
2460 ; X64-SSE42-NEXT: phminposuw %xmm0, %xmm0
2461 ; X64-SSE42-NEXT: pextrb $0, %xmm0, %eax
2462 ; X64-SSE42-NEXT: notb %al
2463 ; X64-SSE42-NEXT: ## kill: def $al killed $al killed $eax
2464 ; X64-SSE42-NEXT: retq
2466 ; X64-AVX1-LABEL: test_reduce_v64i8_v16i8:
2467 ; X64-AVX1: ## %bb.0:
2468 ; X64-AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2469 ; X64-AVX1-NEXT: vpxor %xmm1, %xmm0, %xmm0
2470 ; X64-AVX1-NEXT: vpsrlw $8, %xmm0, %xmm1
2471 ; X64-AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm0
2472 ; X64-AVX1-NEXT: vphminposuw %xmm0, %xmm0
2473 ; X64-AVX1-NEXT: vpextrb $0, %xmm0, %eax
2474 ; X64-AVX1-NEXT: notb %al
2475 ; X64-AVX1-NEXT: ## kill: def $al killed $al killed $eax
2476 ; X64-AVX1-NEXT: vzeroupper
2477 ; X64-AVX1-NEXT: retq
2479 ; X64-AVX2-LABEL: test_reduce_v64i8_v16i8:
2480 ; X64-AVX2: ## %bb.0:
2481 ; X64-AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1
2482 ; X64-AVX2-NEXT: vpxor %xmm1, %xmm0, %xmm0
2483 ; X64-AVX2-NEXT: vpsrlw $8, %xmm0, %xmm1
2484 ; X64-AVX2-NEXT: vpminub %xmm1, %xmm0, %xmm0
2485 ; X64-AVX2-NEXT: vphminposuw %xmm0, %xmm0
2486 ; X64-AVX2-NEXT: vpextrb $0, %xmm0, %eax
2487 ; X64-AVX2-NEXT: notb %al
2488 ; X64-AVX2-NEXT: ## kill: def $al killed $al killed $eax
2489 ; X64-AVX2-NEXT: vzeroupper
2490 ; X64-AVX2-NEXT: retq
2492 ; X64-AVX512-LABEL: test_reduce_v64i8_v16i8:
2493 ; X64-AVX512: ## %bb.0:
2494 ; X64-AVX512-NEXT: vpternlogq $15, %xmm0, %xmm0, %xmm0
2495 ; X64-AVX512-NEXT: vpsrlw $8, %xmm0, %xmm1
2496 ; X64-AVX512-NEXT: vpminub %xmm1, %xmm0, %xmm0
2497 ; X64-AVX512-NEXT: vphminposuw %xmm0, %xmm0
2498 ; X64-AVX512-NEXT: vpextrb $0, %xmm0, %eax
2499 ; X64-AVX512-NEXT: notb %al
2500 ; X64-AVX512-NEXT: ## kill: def $al killed $al killed $eax
2501 ; X64-AVX512-NEXT: vzeroupper
2502 ; X64-AVX512-NEXT: retq
2503 %1 = shufflevector <64 x i8> %a0, <64 x i8> undef, <64 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2504 %2 = icmp ugt <64 x i8> %a0, %1
2505 %3 = select <64 x i1> %2, <64 x i8> %a0, <64 x i8> %1
2506 %4 = shufflevector <64 x i8> %3, <64 x i8> undef, <64 x i32> <i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2507 %5 = icmp ugt <64 x i8> %3, %4
2508 %6 = select <64 x i1> %5, <64 x i8> %3, <64 x i8> %4
2509 %7 = shufflevector <64 x i8> %6, <64 x i8> undef, <64 x i32> <i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2510 %8 = icmp ugt <64 x i8> %6, %7
2511 %9 = select <64 x i1> %8, <64 x i8> %6, <64 x i8> %7
2512 %10 = shufflevector <64 x i8> %9, <64 x i8> undef, <64 x i32> <i32 1, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2513 %11 = icmp ugt <64 x i8> %9, %10
2514 %12 = select <64 x i1> %11, <64 x i8> %9, <64 x i8> %10
2515 %13 = extractelement <64 x i8> %12, i32 0