1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+sse2 < %s | FileCheck %s --check-prefixes=CHECK,CHECK-SSE,CHECK-SSE2
3 ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+sse4.1 < %s | FileCheck %s --check-prefixes=CHECK,CHECK-SSE,CHECK-SSE41
4 ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+avx < %s | FileCheck %s --check-prefixes=CHECK,CHECK-AVX,CHECK-AVX1
5 ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+avx2 < %s | FileCheck %s --check-prefixes=CHECK,CHECK-AVX,CHECK-AVX2
6 ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+avx512f,+avx512vl < %s | FileCheck %s --check-prefixes=CHECK,CHECK-AVX,CHECK-AVX512VL
9 define <4 x i32> @test_urem_odd_25(<4 x i32> %X) nounwind {
10 ; CHECK-SSE2-LABEL: test_urem_odd_25:
11 ; CHECK-SSE2: # %bb.0:
12 ; CHECK-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [3264175145,3264175145,3264175145,3264175145]
13 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3]
14 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm0
15 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
16 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm2
17 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3]
18 ; CHECK-SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
19 ; CHECK-SSE2-NEXT: pxor {{.*}}(%rip), %xmm0
20 ; CHECK-SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
21 ; CHECK-SSE2-NEXT: pandn {{.*}}(%rip), %xmm0
22 ; CHECK-SSE2-NEXT: retq
24 ; CHECK-SSE41-LABEL: test_urem_odd_25:
25 ; CHECK-SSE41: # %bb.0:
26 ; CHECK-SSE41-NEXT: pmulld {{.*}}(%rip), %xmm0
27 ; CHECK-SSE41-NEXT: movdqa {{.*#+}} xmm1 = [171798691,171798691,171798691,171798691]
28 ; CHECK-SSE41-NEXT: pminud %xmm0, %xmm1
29 ; CHECK-SSE41-NEXT: pcmpeqd %xmm1, %xmm0
30 ; CHECK-SSE41-NEXT: psrld $31, %xmm0
31 ; CHECK-SSE41-NEXT: retq
33 ; CHECK-AVX1-LABEL: test_urem_odd_25:
34 ; CHECK-AVX1: # %bb.0:
35 ; CHECK-AVX1-NEXT: vpmulld {{.*}}(%rip), %xmm0, %xmm0
36 ; CHECK-AVX1-NEXT: vpminud {{.*}}(%rip), %xmm0, %xmm1
37 ; CHECK-AVX1-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
38 ; CHECK-AVX1-NEXT: vpsrld $31, %xmm0, %xmm0
39 ; CHECK-AVX1-NEXT: retq
41 ; CHECK-AVX2-LABEL: test_urem_odd_25:
42 ; CHECK-AVX2: # %bb.0:
43 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3264175145,3264175145,3264175145,3264175145]
44 ; CHECK-AVX2-NEXT: vpmulld %xmm1, %xmm0, %xmm0
45 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [171798691,171798691,171798691,171798691]
46 ; CHECK-AVX2-NEXT: vpminud %xmm1, %xmm0, %xmm1
47 ; CHECK-AVX2-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
48 ; CHECK-AVX2-NEXT: vpsrld $31, %xmm0, %xmm0
49 ; CHECK-AVX2-NEXT: retq
51 ; CHECK-AVX512VL-LABEL: test_urem_odd_25:
52 ; CHECK-AVX512VL: # %bb.0:
53 ; CHECK-AVX512VL-NEXT: vpmulld {{.*}}(%rip){1to4}, %xmm0, %xmm0
54 ; CHECK-AVX512VL-NEXT: vpminud {{.*}}(%rip){1to4}, %xmm0, %xmm1
55 ; CHECK-AVX512VL-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
56 ; CHECK-AVX512VL-NEXT: vpsrld $31, %xmm0, %xmm0
57 ; CHECK-AVX512VL-NEXT: retq
58 %urem = urem <4 x i32> %X, <i32 25, i32 25, i32 25, i32 25>
59 %cmp = icmp eq <4 x i32> %urem, <i32 0, i32 0, i32 0, i32 0>
60 %ret = zext <4 x i1> %cmp to <4 x i32>
65 define <4 x i32> @test_urem_even_100(<4 x i32> %X) nounwind {
66 ; CHECK-SSE2-LABEL: test_urem_even_100:
67 ; CHECK-SSE2: # %bb.0:
68 ; CHECK-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [1374389535,1374389535,1374389535,1374389535]
69 ; CHECK-SSE2-NEXT: movdqa %xmm0, %xmm2
70 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm2
71 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,3,2,3]
72 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3]
73 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm3
74 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,3,2,3]
75 ; CHECK-SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
76 ; CHECK-SSE2-NEXT: psrld $5, %xmm2
77 ; CHECK-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [100,100,100,100]
78 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,3,3]
79 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm2
80 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
81 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm3
82 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,2,2,3]
83 ; CHECK-SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
84 ; CHECK-SSE2-NEXT: psubd %xmm2, %xmm0
85 ; CHECK-SSE2-NEXT: pxor %xmm1, %xmm1
86 ; CHECK-SSE2-NEXT: pcmpeqd %xmm1, %xmm0
87 ; CHECK-SSE2-NEXT: psrld $31, %xmm0
88 ; CHECK-SSE2-NEXT: retq
90 ; CHECK-SSE41-LABEL: test_urem_even_100:
91 ; CHECK-SSE41: # %bb.0:
92 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
93 ; CHECK-SSE41-NEXT: movdqa {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
94 ; CHECK-SSE41-NEXT: pmuludq %xmm2, %xmm1
95 ; CHECK-SSE41-NEXT: pmuludq %xmm0, %xmm2
96 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
97 ; CHECK-SSE41-NEXT: pblendw {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,3],xmm2[4,5],xmm1[6,7]
98 ; CHECK-SSE41-NEXT: psrld $5, %xmm2
99 ; CHECK-SSE41-NEXT: pmulld {{.*}}(%rip), %xmm2
100 ; CHECK-SSE41-NEXT: psubd %xmm2, %xmm0
101 ; CHECK-SSE41-NEXT: pxor %xmm1, %xmm1
102 ; CHECK-SSE41-NEXT: pcmpeqd %xmm1, %xmm0
103 ; CHECK-SSE41-NEXT: psrld $31, %xmm0
104 ; CHECK-SSE41-NEXT: retq
106 ; CHECK-AVX1-LABEL: test_urem_even_100:
107 ; CHECK-AVX1: # %bb.0:
108 ; CHECK-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
109 ; CHECK-AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
110 ; CHECK-AVX1-NEXT: vpmuludq %xmm2, %xmm1, %xmm1
111 ; CHECK-AVX1-NEXT: vpmuludq %xmm2, %xmm0, %xmm2
112 ; CHECK-AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
113 ; CHECK-AVX1-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3],xmm2[4,5],xmm1[6,7]
114 ; CHECK-AVX1-NEXT: vpsrld $5, %xmm1, %xmm1
115 ; CHECK-AVX1-NEXT: vpmulld {{.*}}(%rip), %xmm1, %xmm1
116 ; CHECK-AVX1-NEXT: vpsubd %xmm1, %xmm0, %xmm0
117 ; CHECK-AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
118 ; CHECK-AVX1-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
119 ; CHECK-AVX1-NEXT: vpsrld $31, %xmm0, %xmm0
120 ; CHECK-AVX1-NEXT: retq
122 ; CHECK-AVX2-LABEL: test_urem_even_100:
123 ; CHECK-AVX2: # %bb.0:
124 ; CHECK-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
125 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
126 ; CHECK-AVX2-NEXT: vpmuludq %xmm2, %xmm1, %xmm1
127 ; CHECK-AVX2-NEXT: vpmuludq %xmm2, %xmm0, %xmm2
128 ; CHECK-AVX2-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
129 ; CHECK-AVX2-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2],xmm1[3]
130 ; CHECK-AVX2-NEXT: vpsrld $5, %xmm1, %xmm1
131 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm2 = [100,100,100,100]
132 ; CHECK-AVX2-NEXT: vpmulld %xmm2, %xmm1, %xmm1
133 ; CHECK-AVX2-NEXT: vpsubd %xmm1, %xmm0, %xmm0
134 ; CHECK-AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
135 ; CHECK-AVX2-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
136 ; CHECK-AVX2-NEXT: vpsrld $31, %xmm0, %xmm0
137 ; CHECK-AVX2-NEXT: retq
139 ; CHECK-AVX512VL-LABEL: test_urem_even_100:
140 ; CHECK-AVX512VL: # %bb.0:
141 ; CHECK-AVX512VL-NEXT: vpmulld {{.*}}(%rip){1to4}, %xmm0, %xmm0
142 ; CHECK-AVX512VL-NEXT: vprord $2, %xmm0, %xmm0
143 ; CHECK-AVX512VL-NEXT: vpminud {{.*}}(%rip){1to4}, %xmm0, %xmm1
144 ; CHECK-AVX512VL-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
145 ; CHECK-AVX512VL-NEXT: vpsrld $31, %xmm0, %xmm0
146 ; CHECK-AVX512VL-NEXT: retq
147 %urem = urem <4 x i32> %X, <i32 100, i32 100, i32 100, i32 100>
148 %cmp = icmp eq <4 x i32> %urem, <i32 0, i32 0, i32 0, i32 0>
149 %ret = zext <4 x i1> %cmp to <4 x i32>
153 ; Negative divisors should be negated, and thus this is still splat vectors.
156 define <4 x i32> @test_urem_odd_neg25(<4 x i32> %X) nounwind {
157 ; CHECK-SSE2-LABEL: test_urem_odd_neg25:
158 ; CHECK-SSE2: # %bb.0:
159 ; CHECK-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [3264175145,1030792151,1030792151,3264175145]
160 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3]
161 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm0
162 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
163 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
164 ; CHECK-SSE2-NEXT: pmuludq %xmm2, %xmm1
165 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
166 ; CHECK-SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
167 ; CHECK-SSE2-NEXT: pxor {{.*}}(%rip), %xmm0
168 ; CHECK-SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
169 ; CHECK-SSE2-NEXT: pandn {{.*}}(%rip), %xmm0
170 ; CHECK-SSE2-NEXT: retq
172 ; CHECK-SSE41-LABEL: test_urem_odd_neg25:
173 ; CHECK-SSE41: # %bb.0:
174 ; CHECK-SSE41-NEXT: pmulld {{.*}}(%rip), %xmm0
175 ; CHECK-SSE41-NEXT: movdqa {{.*#+}} xmm1 = [171798691,1,1,171798691]
176 ; CHECK-SSE41-NEXT: pminud %xmm0, %xmm1
177 ; CHECK-SSE41-NEXT: pcmpeqd %xmm1, %xmm0
178 ; CHECK-SSE41-NEXT: psrld $31, %xmm0
179 ; CHECK-SSE41-NEXT: retq
181 ; CHECK-AVX-LABEL: test_urem_odd_neg25:
182 ; CHECK-AVX: # %bb.0:
183 ; CHECK-AVX-NEXT: vpmulld {{.*}}(%rip), %xmm0, %xmm0
184 ; CHECK-AVX-NEXT: vpminud {{.*}}(%rip), %xmm0, %xmm1
185 ; CHECK-AVX-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
186 ; CHECK-AVX-NEXT: vpsrld $31, %xmm0, %xmm0
187 ; CHECK-AVX-NEXT: retq
188 %urem = urem <4 x i32> %X, <i32 25, i32 -25, i32 -25, i32 25>
189 %cmp = icmp eq <4 x i32> %urem, <i32 0, i32 0, i32 0, i32 0>
190 %ret = zext <4 x i1> %cmp to <4 x i32>
195 define <4 x i32> @test_urem_even_neg100(<4 x i32> %X) nounwind {
196 ; CHECK-SSE2-LABEL: test_urem_even_neg100:
197 ; CHECK-SSE2: # %bb.0:
198 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
199 ; CHECK-SSE2-NEXT: pmuludq {{.*}}(%rip), %xmm1
200 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,3,2,3]
201 ; CHECK-SSE2-NEXT: movdqa %xmm0, %xmm2
202 ; CHECK-SSE2-NEXT: psrld $2, %xmm2
203 ; CHECK-SSE2-NEXT: pmuludq {{.*}}(%rip), %xmm2
204 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,3,2,3]
205 ; CHECK-SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
206 ; CHECK-SSE2-NEXT: movdqa %xmm2, %xmm1
207 ; CHECK-SSE2-NEXT: psrld $27, %xmm1
208 ; CHECK-SSE2-NEXT: pmuludq {{.*}}(%rip), %xmm1
209 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
210 ; CHECK-SSE2-NEXT: psrld $5, %xmm2
211 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
212 ; CHECK-SSE2-NEXT: pmuludq {{.*}}(%rip), %xmm2
213 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
214 ; CHECK-SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
215 ; CHECK-SSE2-NEXT: psubd %xmm1, %xmm0
216 ; CHECK-SSE2-NEXT: pxor %xmm1, %xmm1
217 ; CHECK-SSE2-NEXT: pcmpeqd %xmm1, %xmm0
218 ; CHECK-SSE2-NEXT: psrld $31, %xmm0
219 ; CHECK-SSE2-NEXT: retq
221 ; CHECK-SSE41-LABEL: test_urem_even_neg100:
222 ; CHECK-SSE41: # %bb.0:
223 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
224 ; CHECK-SSE41-NEXT: pmuludq {{.*}}(%rip), %xmm1
225 ; CHECK-SSE41-NEXT: movdqa %xmm0, %xmm2
226 ; CHECK-SSE41-NEXT: psrld $2, %xmm2
227 ; CHECK-SSE41-NEXT: pmuludq {{.*}}(%rip), %xmm2
228 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
229 ; CHECK-SSE41-NEXT: pblendw {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,3],xmm2[4,5],xmm1[6,7]
230 ; CHECK-SSE41-NEXT: movdqa %xmm2, %xmm1
231 ; CHECK-SSE41-NEXT: psrld $5, %xmm1
232 ; CHECK-SSE41-NEXT: psrld $27, %xmm2
233 ; CHECK-SSE41-NEXT: pblendw {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,3],xmm2[4,5],xmm1[6,7]
234 ; CHECK-SSE41-NEXT: pmulld {{.*}}(%rip), %xmm2
235 ; CHECK-SSE41-NEXT: psubd %xmm2, %xmm0
236 ; CHECK-SSE41-NEXT: pxor %xmm1, %xmm1
237 ; CHECK-SSE41-NEXT: pcmpeqd %xmm1, %xmm0
238 ; CHECK-SSE41-NEXT: psrld $31, %xmm0
239 ; CHECK-SSE41-NEXT: retq
241 ; CHECK-AVX1-LABEL: test_urem_even_neg100:
242 ; CHECK-AVX1: # %bb.0:
243 ; CHECK-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
244 ; CHECK-AVX1-NEXT: vpmuludq {{.*}}(%rip), %xmm1, %xmm1
245 ; CHECK-AVX1-NEXT: vpsrld $2, %xmm0, %xmm2
246 ; CHECK-AVX1-NEXT: vpmuludq {{.*}}(%rip), %xmm2, %xmm2
247 ; CHECK-AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
248 ; CHECK-AVX1-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3],xmm2[4,5],xmm1[6,7]
249 ; CHECK-AVX1-NEXT: vpsrld $5, %xmm1, %xmm2
250 ; CHECK-AVX1-NEXT: vpsrld $27, %xmm1, %xmm1
251 ; CHECK-AVX1-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm2[2,3],xmm1[4,5],xmm2[6,7]
252 ; CHECK-AVX1-NEXT: vpmulld {{.*}}(%rip), %xmm1, %xmm1
253 ; CHECK-AVX1-NEXT: vpsubd %xmm1, %xmm0, %xmm0
254 ; CHECK-AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
255 ; CHECK-AVX1-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
256 ; CHECK-AVX1-NEXT: vpsrld $31, %xmm0, %xmm0
257 ; CHECK-AVX1-NEXT: retq
259 ; CHECK-AVX2-LABEL: test_urem_even_neg100:
260 ; CHECK-AVX2: # %bb.0:
261 ; CHECK-AVX2-NEXT: vpsrlvd {{.*}}(%rip), %xmm0, %xmm1
262 ; CHECK-AVX2-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[1,1,3,3]
263 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm3 = [1374389535,1374389535,1374389535,1374389535]
264 ; CHECK-AVX2-NEXT: vpmuludq %xmm3, %xmm2, %xmm2
265 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm3 = [536870925,536870925,536870925,536870925]
266 ; CHECK-AVX2-NEXT: vpmuludq %xmm3, %xmm1, %xmm1
267 ; CHECK-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
268 ; CHECK-AVX2-NEXT: vpblendd {{.*#+}} xmm1 = xmm1[0],xmm2[1],xmm1[2],xmm2[3]
269 ; CHECK-AVX2-NEXT: vpsrlvd {{.*}}(%rip), %xmm1, %xmm1
270 ; CHECK-AVX2-NEXT: vpmulld {{.*}}(%rip), %xmm1, %xmm1
271 ; CHECK-AVX2-NEXT: vpsubd %xmm1, %xmm0, %xmm0
272 ; CHECK-AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
273 ; CHECK-AVX2-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
274 ; CHECK-AVX2-NEXT: vpsrld $31, %xmm0, %xmm0
275 ; CHECK-AVX2-NEXT: retq
277 ; CHECK-AVX512VL-LABEL: test_urem_even_neg100:
278 ; CHECK-AVX512VL: # %bb.0:
279 ; CHECK-AVX512VL-NEXT: vpmulld {{.*}}(%rip), %xmm0, %xmm0
280 ; CHECK-AVX512VL-NEXT: vprord $2, %xmm0, %xmm0
281 ; CHECK-AVX512VL-NEXT: vpminud {{.*}}(%rip), %xmm0, %xmm1
282 ; CHECK-AVX512VL-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
283 ; CHECK-AVX512VL-NEXT: vpsrld $31, %xmm0, %xmm0
284 ; CHECK-AVX512VL-NEXT: retq
285 %urem = urem <4 x i32> %X, <i32 -100, i32 100, i32 -100, i32 100>
286 %cmp = icmp eq <4 x i32> %urem, <i32 0, i32 0, i32 0, i32 0>
287 %ret = zext <4 x i1> %cmp to <4 x i32>
291 ;------------------------------------------------------------------------------;
292 ; Comparison constant has undef elements.
293 ;------------------------------------------------------------------------------;
295 define <4 x i32> @test_urem_odd_undef1(<4 x i32> %X) nounwind {
296 ; CHECK-SSE2-LABEL: test_urem_odd_undef1:
297 ; CHECK-SSE2: # %bb.0:
298 ; CHECK-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [1374389535,1374389535,1374389535,1374389535]
299 ; CHECK-SSE2-NEXT: movdqa %xmm0, %xmm2
300 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm2
301 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,3,2,3]
302 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3]
303 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm3
304 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,3,2,3]
305 ; CHECK-SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
306 ; CHECK-SSE2-NEXT: psrld $3, %xmm2
307 ; CHECK-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [25,25,25,25]
308 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,3,3]
309 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm2
310 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
311 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm3
312 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,2,2,3]
313 ; CHECK-SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
314 ; CHECK-SSE2-NEXT: psubd %xmm2, %xmm0
315 ; CHECK-SSE2-NEXT: pxor %xmm1, %xmm1
316 ; CHECK-SSE2-NEXT: pcmpeqd %xmm1, %xmm0
317 ; CHECK-SSE2-NEXT: psrld $31, %xmm0
318 ; CHECK-SSE2-NEXT: retq
320 ; CHECK-SSE41-LABEL: test_urem_odd_undef1:
321 ; CHECK-SSE41: # %bb.0:
322 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
323 ; CHECK-SSE41-NEXT: movdqa {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
324 ; CHECK-SSE41-NEXT: pmuludq %xmm2, %xmm1
325 ; CHECK-SSE41-NEXT: pmuludq %xmm0, %xmm2
326 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
327 ; CHECK-SSE41-NEXT: pblendw {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,3],xmm2[4,5],xmm1[6,7]
328 ; CHECK-SSE41-NEXT: psrld $3, %xmm2
329 ; CHECK-SSE41-NEXT: pmulld {{.*}}(%rip), %xmm2
330 ; CHECK-SSE41-NEXT: psubd %xmm2, %xmm0
331 ; CHECK-SSE41-NEXT: pxor %xmm1, %xmm1
332 ; CHECK-SSE41-NEXT: pcmpeqd %xmm1, %xmm0
333 ; CHECK-SSE41-NEXT: psrld $31, %xmm0
334 ; CHECK-SSE41-NEXT: retq
336 ; CHECK-AVX1-LABEL: test_urem_odd_undef1:
337 ; CHECK-AVX1: # %bb.0:
338 ; CHECK-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
339 ; CHECK-AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
340 ; CHECK-AVX1-NEXT: vpmuludq %xmm2, %xmm1, %xmm1
341 ; CHECK-AVX1-NEXT: vpmuludq %xmm2, %xmm0, %xmm2
342 ; CHECK-AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
343 ; CHECK-AVX1-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3],xmm2[4,5],xmm1[6,7]
344 ; CHECK-AVX1-NEXT: vpsrld $3, %xmm1, %xmm1
345 ; CHECK-AVX1-NEXT: vpmulld {{.*}}(%rip), %xmm1, %xmm1
346 ; CHECK-AVX1-NEXT: vpsubd %xmm1, %xmm0, %xmm0
347 ; CHECK-AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
348 ; CHECK-AVX1-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
349 ; CHECK-AVX1-NEXT: vpsrld $31, %xmm0, %xmm0
350 ; CHECK-AVX1-NEXT: retq
352 ; CHECK-AVX2-LABEL: test_urem_odd_undef1:
353 ; CHECK-AVX2: # %bb.0:
354 ; CHECK-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
355 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
356 ; CHECK-AVX2-NEXT: vpmuludq %xmm2, %xmm1, %xmm1
357 ; CHECK-AVX2-NEXT: vpmuludq %xmm2, %xmm0, %xmm2
358 ; CHECK-AVX2-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
359 ; CHECK-AVX2-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2],xmm1[3]
360 ; CHECK-AVX2-NEXT: vpsrld $3, %xmm1, %xmm1
361 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm2 = [25,25,25,25]
362 ; CHECK-AVX2-NEXT: vpmulld %xmm2, %xmm1, %xmm1
363 ; CHECK-AVX2-NEXT: vpsubd %xmm1, %xmm0, %xmm0
364 ; CHECK-AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
365 ; CHECK-AVX2-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
366 ; CHECK-AVX2-NEXT: vpsrld $31, %xmm0, %xmm0
367 ; CHECK-AVX2-NEXT: retq
369 ; CHECK-AVX512VL-LABEL: test_urem_odd_undef1:
370 ; CHECK-AVX512VL: # %bb.0:
371 ; CHECK-AVX512VL-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
372 ; CHECK-AVX512VL-NEXT: vpbroadcastd {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
373 ; CHECK-AVX512VL-NEXT: vpmuludq %xmm2, %xmm1, %xmm1
374 ; CHECK-AVX512VL-NEXT: vpmuludq %xmm2, %xmm0, %xmm2
375 ; CHECK-AVX512VL-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
376 ; CHECK-AVX512VL-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2],xmm1[3]
377 ; CHECK-AVX512VL-NEXT: vpsrld $3, %xmm1, %xmm1
378 ; CHECK-AVX512VL-NEXT: vpmulld {{.*}}(%rip){1to4}, %xmm1, %xmm1
379 ; CHECK-AVX512VL-NEXT: vpsubd %xmm1, %xmm0, %xmm0
380 ; CHECK-AVX512VL-NEXT: vpxor %xmm1, %xmm1, %xmm1
381 ; CHECK-AVX512VL-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
382 ; CHECK-AVX512VL-NEXT: vpsrld $31, %xmm0, %xmm0
383 ; CHECK-AVX512VL-NEXT: retq
384 %urem = urem <4 x i32> %X, <i32 25, i32 25, i32 25, i32 25>
385 %cmp = icmp eq <4 x i32> %urem, <i32 0, i32 0, i32 undef, i32 0>
386 %ret = zext <4 x i1> %cmp to <4 x i32>
390 define <4 x i32> @test_urem_even_undef1(<4 x i32> %X) nounwind {
391 ; CHECK-SSE2-LABEL: test_urem_even_undef1:
392 ; CHECK-SSE2: # %bb.0:
393 ; CHECK-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [1374389535,1374389535,1374389535,1374389535]
394 ; CHECK-SSE2-NEXT: movdqa %xmm0, %xmm2
395 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm2
396 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,3,2,3]
397 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3]
398 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm3
399 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,3,2,3]
400 ; CHECK-SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
401 ; CHECK-SSE2-NEXT: psrld $5, %xmm2
402 ; CHECK-SSE2-NEXT: movdqa {{.*#+}} xmm1 = [100,100,100,100]
403 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,3,3]
404 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm2
405 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
406 ; CHECK-SSE2-NEXT: pmuludq %xmm1, %xmm3
407 ; CHECK-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,2,2,3]
408 ; CHECK-SSE2-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
409 ; CHECK-SSE2-NEXT: psubd %xmm2, %xmm0
410 ; CHECK-SSE2-NEXT: pxor %xmm1, %xmm1
411 ; CHECK-SSE2-NEXT: pcmpeqd %xmm1, %xmm0
412 ; CHECK-SSE2-NEXT: psrld $31, %xmm0
413 ; CHECK-SSE2-NEXT: retq
415 ; CHECK-SSE41-LABEL: test_urem_even_undef1:
416 ; CHECK-SSE41: # %bb.0:
417 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
418 ; CHECK-SSE41-NEXT: movdqa {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
419 ; CHECK-SSE41-NEXT: pmuludq %xmm2, %xmm1
420 ; CHECK-SSE41-NEXT: pmuludq %xmm0, %xmm2
421 ; CHECK-SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
422 ; CHECK-SSE41-NEXT: pblendw {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,3],xmm2[4,5],xmm1[6,7]
423 ; CHECK-SSE41-NEXT: psrld $5, %xmm2
424 ; CHECK-SSE41-NEXT: pmulld {{.*}}(%rip), %xmm2
425 ; CHECK-SSE41-NEXT: psubd %xmm2, %xmm0
426 ; CHECK-SSE41-NEXT: pxor %xmm1, %xmm1
427 ; CHECK-SSE41-NEXT: pcmpeqd %xmm1, %xmm0
428 ; CHECK-SSE41-NEXT: psrld $31, %xmm0
429 ; CHECK-SSE41-NEXT: retq
431 ; CHECK-AVX1-LABEL: test_urem_even_undef1:
432 ; CHECK-AVX1: # %bb.0:
433 ; CHECK-AVX1-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
434 ; CHECK-AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
435 ; CHECK-AVX1-NEXT: vpmuludq %xmm2, %xmm1, %xmm1
436 ; CHECK-AVX1-NEXT: vpmuludq %xmm2, %xmm0, %xmm2
437 ; CHECK-AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
438 ; CHECK-AVX1-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3],xmm2[4,5],xmm1[6,7]
439 ; CHECK-AVX1-NEXT: vpsrld $5, %xmm1, %xmm1
440 ; CHECK-AVX1-NEXT: vpmulld {{.*}}(%rip), %xmm1, %xmm1
441 ; CHECK-AVX1-NEXT: vpsubd %xmm1, %xmm0, %xmm0
442 ; CHECK-AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
443 ; CHECK-AVX1-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
444 ; CHECK-AVX1-NEXT: vpsrld $31, %xmm0, %xmm0
445 ; CHECK-AVX1-NEXT: retq
447 ; CHECK-AVX2-LABEL: test_urem_even_undef1:
448 ; CHECK-AVX2: # %bb.0:
449 ; CHECK-AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
450 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
451 ; CHECK-AVX2-NEXT: vpmuludq %xmm2, %xmm1, %xmm1
452 ; CHECK-AVX2-NEXT: vpmuludq %xmm2, %xmm0, %xmm2
453 ; CHECK-AVX2-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
454 ; CHECK-AVX2-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2],xmm1[3]
455 ; CHECK-AVX2-NEXT: vpsrld $5, %xmm1, %xmm1
456 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm2 = [100,100,100,100]
457 ; CHECK-AVX2-NEXT: vpmulld %xmm2, %xmm1, %xmm1
458 ; CHECK-AVX2-NEXT: vpsubd %xmm1, %xmm0, %xmm0
459 ; CHECK-AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
460 ; CHECK-AVX2-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
461 ; CHECK-AVX2-NEXT: vpsrld $31, %xmm0, %xmm0
462 ; CHECK-AVX2-NEXT: retq
464 ; CHECK-AVX512VL-LABEL: test_urem_even_undef1:
465 ; CHECK-AVX512VL: # %bb.0:
466 ; CHECK-AVX512VL-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
467 ; CHECK-AVX512VL-NEXT: vpbroadcastd {{.*#+}} xmm2 = [1374389535,1374389535,1374389535,1374389535]
468 ; CHECK-AVX512VL-NEXT: vpmuludq %xmm2, %xmm1, %xmm1
469 ; CHECK-AVX512VL-NEXT: vpmuludq %xmm2, %xmm0, %xmm2
470 ; CHECK-AVX512VL-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
471 ; CHECK-AVX512VL-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2],xmm1[3]
472 ; CHECK-AVX512VL-NEXT: vpsrld $5, %xmm1, %xmm1
473 ; CHECK-AVX512VL-NEXT: vpmulld {{.*}}(%rip){1to4}, %xmm1, %xmm1
474 ; CHECK-AVX512VL-NEXT: vpsubd %xmm1, %xmm0, %xmm0
475 ; CHECK-AVX512VL-NEXT: vpxor %xmm1, %xmm1, %xmm1
476 ; CHECK-AVX512VL-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
477 ; CHECK-AVX512VL-NEXT: vpsrld $31, %xmm0, %xmm0
478 ; CHECK-AVX512VL-NEXT: retq
479 %urem = urem <4 x i32> %X, <i32 100, i32 100, i32 100, i32 100>
480 %cmp = icmp eq <4 x i32> %urem, <i32 0, i32 0, i32 undef, i32 0>
481 %ret = zext <4 x i1> %cmp to <4 x i32>
485 ;------------------------------------------------------------------------------;
487 ;------------------------------------------------------------------------------;
489 define <4 x i32> @test_urem_one_eq(<4 x i32> %X) nounwind {
490 ; CHECK-SSE-LABEL: test_urem_one_eq:
491 ; CHECK-SSE: # %bb.0:
492 ; CHECK-SSE-NEXT: movaps {{.*#+}} xmm0 = [1,1,1,1]
493 ; CHECK-SSE-NEXT: retq
495 ; CHECK-AVX1-LABEL: test_urem_one_eq:
496 ; CHECK-AVX1: # %bb.0:
497 ; CHECK-AVX1-NEXT: vmovaps {{.*#+}} xmm0 = [1,1,1,1]
498 ; CHECK-AVX1-NEXT: retq
500 ; CHECK-AVX2-LABEL: test_urem_one_eq:
501 ; CHECK-AVX2: # %bb.0:
502 ; CHECK-AVX2-NEXT: vbroadcastss {{.*#+}} xmm0 = [1,1,1,1]
503 ; CHECK-AVX2-NEXT: retq
505 ; CHECK-AVX512VL-LABEL: test_urem_one_eq:
506 ; CHECK-AVX512VL: # %bb.0:
507 ; CHECK-AVX512VL-NEXT: vbroadcastss {{.*#+}} xmm0 = [1,1,1,1]
508 ; CHECK-AVX512VL-NEXT: retq
509 %urem = urem <4 x i32> %X, <i32 1, i32 1, i32 1, i32 1>
510 %cmp = icmp eq <4 x i32> %urem, <i32 0, i32 0, i32 0, i32 0>
511 %ret = zext <4 x i1> %cmp to <4 x i32>
514 define <4 x i32> @test_urem_one_ne(<4 x i32> %X) nounwind {
515 ; CHECK-SSE-LABEL: test_urem_one_ne:
516 ; CHECK-SSE: # %bb.0:
517 ; CHECK-SSE-NEXT: xorps %xmm0, %xmm0
518 ; CHECK-SSE-NEXT: retq
520 ; CHECK-AVX-LABEL: test_urem_one_ne:
521 ; CHECK-AVX: # %bb.0:
522 ; CHECK-AVX-NEXT: vxorps %xmm0, %xmm0, %xmm0
523 ; CHECK-AVX-NEXT: retq
524 %urem = urem <4 x i32> %X, <i32 1, i32 1, i32 1, i32 1>
525 %cmp = icmp ne <4 x i32> %urem, <i32 0, i32 0, i32 0, i32 0>
526 %ret = zext <4 x i1> %cmp to <4 x i32>
530 ; We can lower remainder of division by powers of two much better elsewhere.
531 define <4 x i32> @test_urem_pow2(<4 x i32> %X) nounwind {
532 ; CHECK-SSE-LABEL: test_urem_pow2:
533 ; CHECK-SSE: # %bb.0:
534 ; CHECK-SSE-NEXT: pand {{.*}}(%rip), %xmm0
535 ; CHECK-SSE-NEXT: pxor %xmm1, %xmm1
536 ; CHECK-SSE-NEXT: pcmpeqd %xmm1, %xmm0
537 ; CHECK-SSE-NEXT: psrld $31, %xmm0
538 ; CHECK-SSE-NEXT: retq
540 ; CHECK-AVX1-LABEL: test_urem_pow2:
541 ; CHECK-AVX1: # %bb.0:
542 ; CHECK-AVX1-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
543 ; CHECK-AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
544 ; CHECK-AVX1-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
545 ; CHECK-AVX1-NEXT: vpsrld $31, %xmm0, %xmm0
546 ; CHECK-AVX1-NEXT: retq
548 ; CHECK-AVX2-LABEL: test_urem_pow2:
549 ; CHECK-AVX2: # %bb.0:
550 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15]
551 ; CHECK-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
552 ; CHECK-AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
553 ; CHECK-AVX2-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
554 ; CHECK-AVX2-NEXT: vpsrld $31, %xmm0, %xmm0
555 ; CHECK-AVX2-NEXT: retq
557 ; CHECK-AVX512VL-LABEL: test_urem_pow2:
558 ; CHECK-AVX512VL: # %bb.0:
559 ; CHECK-AVX512VL-NEXT: vpandd {{.*}}(%rip){1to4}, %xmm0, %xmm0
560 ; CHECK-AVX512VL-NEXT: vpxor %xmm1, %xmm1, %xmm1
561 ; CHECK-AVX512VL-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
562 ; CHECK-AVX512VL-NEXT: vpsrld $31, %xmm0, %xmm0
563 ; CHECK-AVX512VL-NEXT: retq
564 %urem = urem <4 x i32> %X, <i32 16, i32 16, i32 16, i32 16>
565 %cmp = icmp eq <4 x i32> %urem, <i32 0, i32 0, i32 0, i32 0>
566 %ret = zext <4 x i1> %cmp to <4 x i32>
570 ; We could lower remainder of division by INT_MIN much better elsewhere.
571 define <4 x i32> @test_urem_int_min(<4 x i32> %X) nounwind {
572 ; CHECK-SSE-LABEL: test_urem_int_min:
573 ; CHECK-SSE: # %bb.0:
574 ; CHECK-SSE-NEXT: pand {{.*}}(%rip), %xmm0
575 ; CHECK-SSE-NEXT: pxor %xmm1, %xmm1
576 ; CHECK-SSE-NEXT: pcmpeqd %xmm1, %xmm0
577 ; CHECK-SSE-NEXT: psrld $31, %xmm0
578 ; CHECK-SSE-NEXT: retq
580 ; CHECK-AVX1-LABEL: test_urem_int_min:
581 ; CHECK-AVX1: # %bb.0:
582 ; CHECK-AVX1-NEXT: vpand {{.*}}(%rip), %xmm0, %xmm0
583 ; CHECK-AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
584 ; CHECK-AVX1-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
585 ; CHECK-AVX1-NEXT: vpsrld $31, %xmm0, %xmm0
586 ; CHECK-AVX1-NEXT: retq
588 ; CHECK-AVX2-LABEL: test_urem_int_min:
589 ; CHECK-AVX2: # %bb.0:
590 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2147483647,2147483647,2147483647,2147483647]
591 ; CHECK-AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0
592 ; CHECK-AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
593 ; CHECK-AVX2-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
594 ; CHECK-AVX2-NEXT: vpsrld $31, %xmm0, %xmm0
595 ; CHECK-AVX2-NEXT: retq
597 ; CHECK-AVX512VL-LABEL: test_urem_int_min:
598 ; CHECK-AVX512VL: # %bb.0:
599 ; CHECK-AVX512VL-NEXT: vpandd {{.*}}(%rip){1to4}, %xmm0, %xmm0
600 ; CHECK-AVX512VL-NEXT: vpxor %xmm1, %xmm1, %xmm1
601 ; CHECK-AVX512VL-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
602 ; CHECK-AVX512VL-NEXT: vpsrld $31, %xmm0, %xmm0
603 ; CHECK-AVX512VL-NEXT: retq
604 %urem = urem <4 x i32> %X, <i32 2147483648, i32 2147483648, i32 2147483648, i32 2147483648>
605 %cmp = icmp eq <4 x i32> %urem, <i32 0, i32 0, i32 0, i32 0>
606 %ret = zext <4 x i1> %cmp to <4 x i32>
610 ; We could lower remainder of division by all-ones much better elsewhere.
611 define <4 x i32> @test_urem_allones(<4 x i32> %X) nounwind {
612 ; CHECK-SSE2-LABEL: test_urem_allones:
613 ; CHECK-SSE2: # %bb.0:
614 ; CHECK-SSE2-NEXT: pxor %xmm1, %xmm1
615 ; CHECK-SSE2-NEXT: psubd %xmm0, %xmm1
616 ; CHECK-SSE2-NEXT: pxor {{.*}}(%rip), %xmm1
617 ; CHECK-SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm1
618 ; CHECK-SSE2-NEXT: pandn {{.*}}(%rip), %xmm1
619 ; CHECK-SSE2-NEXT: movdqa %xmm1, %xmm0
620 ; CHECK-SSE2-NEXT: retq
622 ; CHECK-SSE41-LABEL: test_urem_allones:
623 ; CHECK-SSE41: # %bb.0:
624 ; CHECK-SSE41-NEXT: pxor %xmm1, %xmm1
625 ; CHECK-SSE41-NEXT: psubd %xmm0, %xmm1
626 ; CHECK-SSE41-NEXT: movdqa {{.*#+}} xmm0 = [1,1,1,1]
627 ; CHECK-SSE41-NEXT: pminud %xmm1, %xmm0
628 ; CHECK-SSE41-NEXT: pcmpeqd %xmm1, %xmm0
629 ; CHECK-SSE41-NEXT: psrld $31, %xmm0
630 ; CHECK-SSE41-NEXT: retq
632 ; CHECK-AVX1-LABEL: test_urem_allones:
633 ; CHECK-AVX1: # %bb.0:
634 ; CHECK-AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1
635 ; CHECK-AVX1-NEXT: vpsubd %xmm0, %xmm1, %xmm0
636 ; CHECK-AVX1-NEXT: vpminud {{.*}}(%rip), %xmm0, %xmm1
637 ; CHECK-AVX1-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
638 ; CHECK-AVX1-NEXT: vpsrld $31, %xmm0, %xmm0
639 ; CHECK-AVX1-NEXT: retq
641 ; CHECK-AVX2-LABEL: test_urem_allones:
642 ; CHECK-AVX2: # %bb.0:
643 ; CHECK-AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
644 ; CHECK-AVX2-NEXT: vpsubd %xmm0, %xmm1, %xmm0
645 ; CHECK-AVX2-NEXT: vpbroadcastd {{.*#+}} xmm1 = [1,1,1,1]
646 ; CHECK-AVX2-NEXT: vpminud %xmm1, %xmm0, %xmm1
647 ; CHECK-AVX2-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
648 ; CHECK-AVX2-NEXT: vpsrld $31, %xmm0, %xmm0
649 ; CHECK-AVX2-NEXT: retq
651 ; CHECK-AVX512VL-LABEL: test_urem_allones:
652 ; CHECK-AVX512VL: # %bb.0:
653 ; CHECK-AVX512VL-NEXT: vpxor %xmm1, %xmm1, %xmm1
654 ; CHECK-AVX512VL-NEXT: vpsubd %xmm0, %xmm1, %xmm0
655 ; CHECK-AVX512VL-NEXT: vpminud {{.*}}(%rip){1to4}, %xmm0, %xmm1
656 ; CHECK-AVX512VL-NEXT: vpcmpeqd %xmm1, %xmm0, %xmm0
657 ; CHECK-AVX512VL-NEXT: vpsrld $31, %xmm0, %xmm0
658 ; CHECK-AVX512VL-NEXT: retq
659 %urem = urem <4 x i32> %X, <i32 4294967295, i32 4294967295, i32 4294967295, i32 4294967295>
660 %cmp = icmp eq <4 x i32> %urem, <i32 0, i32 0, i32 0, i32 0>
661 %ret = zext <4 x i1> %cmp to <4 x i32>