1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mcpu=x86-64-v3 | FileCheck %s --check-prefixes=CHECK-X64,CHECK-X64-V3
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mcpu=x86-64-v4 | FileCheck %s --check-prefixes=CHECK-X64,CHECK-X64-V4
5 define <2 x i64> @udiv_identity_const(<2 x i1> %c, <2 x i64> %x) {
6 ; CHECK-X64-V3-LABEL: udiv_identity_const:
7 ; CHECK-X64-V3: # %bb.0:
8 ; CHECK-X64-V3-NEXT: vpsllq $63, %xmm0, %xmm0
9 ; CHECK-X64-V3-NEXT: vmovddup {{.*#+}} xmm2 = [1,1]
10 ; CHECK-X64-V3-NEXT: # xmm2 = mem[0,0]
11 ; CHECK-X64-V3-NEXT: vblendvpd %xmm0, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2, %xmm0
12 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
13 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
14 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
15 ; CHECK-X64-V3-NEXT: divq %rcx
16 ; CHECK-X64-V3-NEXT: movq %rax, %rcx
17 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rsi
18 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
19 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
20 ; CHECK-X64-V3-NEXT: divq %rsi
21 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
22 ; CHECK-X64-V3-NEXT: vmovq %rax, %xmm1
23 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
24 ; CHECK-X64-V3-NEXT: retq
26 ; CHECK-X64-V4-LABEL: udiv_identity_const:
27 ; CHECK-X64-V4: # %bb.0:
28 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
29 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k1
30 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rdx
31 ; CHECK-X64-V4-NEXT: movabsq $3353953467947191203, %rax # imm = 0x2E8BA2E8BA2E8BA3
32 ; CHECK-X64-V4-NEXT: mulxq %rax, %rcx, %rcx
33 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
34 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rdx
35 ; CHECK-X64-V4-NEXT: mulxq %rax, %rax, %rax
36 ; CHECK-X64-V4-NEXT: vmovq %rax, %xmm2
37 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm2[0],xmm0[0]
38 ; CHECK-X64-V4-NEXT: vpsrlq $1, %xmm0, %xmm1 {%k1}
39 ; CHECK-X64-V4-NEXT: vmovdqa %xmm1, %xmm0
40 ; CHECK-X64-V4-NEXT: retq
41 %d = select <2 x i1> %c, <2 x i64> <i64 11, i64 11>, <2 x i64> <i64 1, i64 1>
42 %r = udiv <2 x i64> %x, %d
47 define <2 x i64> @udiv_identity_const_todo_getter_nonzero(<2 x i1> %c, <2 x i64> %x) {
48 ; CHECK-X64-V3-LABEL: udiv_identity_const_todo_getter_nonzero:
49 ; CHECK-X64-V3: # %bb.0:
50 ; CHECK-X64-V3-NEXT: vpsllq $63, %xmm0, %xmm0
51 ; CHECK-X64-V3-NEXT: vmovddup {{.*#+}} xmm2 = [1,1]
52 ; CHECK-X64-V3-NEXT: # xmm2 = mem[0,0]
53 ; CHECK-X64-V3-NEXT: vblendvpd %xmm0, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2, %xmm0
54 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
55 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
56 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
57 ; CHECK-X64-V3-NEXT: divq %rcx
58 ; CHECK-X64-V3-NEXT: movq %rax, %rcx
59 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rsi
60 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
61 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
62 ; CHECK-X64-V3-NEXT: divq %rsi
63 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
64 ; CHECK-X64-V3-NEXT: vmovq %rax, %xmm1
65 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
66 ; CHECK-X64-V3-NEXT: retq
68 ; CHECK-X64-V4-LABEL: udiv_identity_const_todo_getter_nonzero:
69 ; CHECK-X64-V4: # %bb.0:
70 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
71 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k1
72 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rdx
73 ; CHECK-X64-V4-NEXT: movabsq $-3689348814741910323, %rax # imm = 0xCCCCCCCCCCCCCCCD
74 ; CHECK-X64-V4-NEXT: mulxq %rax, %rcx, %rcx
75 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
76 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rdx
77 ; CHECK-X64-V4-NEXT: mulxq %rax, %rax, %rax
78 ; CHECK-X64-V4-NEXT: vmovq %rax, %xmm2
79 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm2[0],xmm0[0]
80 ; CHECK-X64-V4-NEXT: vpsrlq $3, %xmm0, %xmm1 {%k1}
81 ; CHECK-X64-V4-NEXT: vmovdqa %xmm1, %xmm0
82 ; CHECK-X64-V4-NEXT: retq
84 ;; Fails at the moment because `10` is even so there is no common
85 ;; bit between the arms of the select so the default case of
86 ;; isKnownNeverZero has no guaranteed 1 bits.
87 %d = select <2 x i1> %c, <2 x i64> <i64 10, i64 10>, <2 x i64> <i64 1, i64 1>
88 %r = udiv <2 x i64> %x, %d
92 define <2 x i64> @udiv_indentity_non_zero(<2 x i1> %c, <2 x i64> %x, <2 x i64> %y) {
93 ; CHECK-X64-V3-LABEL: udiv_indentity_non_zero:
94 ; CHECK-X64-V3: # %bb.0:
95 ; CHECK-X64-V3-NEXT: vpsllq $63, %xmm0, %xmm0
96 ; CHECK-X64-V3-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3
97 ; CHECK-X64-V3-NEXT: vpsubq %xmm3, %xmm2, %xmm2
98 ; CHECK-X64-V3-NEXT: vmovddup {{.*#+}} xmm3 = [1,1]
99 ; CHECK-X64-V3-NEXT: # xmm3 = mem[0,0]
100 ; CHECK-X64-V3-NEXT: vblendvpd %xmm0, %xmm2, %xmm3, %xmm0
101 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
102 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
103 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
104 ; CHECK-X64-V3-NEXT: divq %rcx
105 ; CHECK-X64-V3-NEXT: movq %rax, %rcx
106 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rsi
107 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
108 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
109 ; CHECK-X64-V3-NEXT: divq %rsi
110 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
111 ; CHECK-X64-V3-NEXT: vmovq %rax, %xmm1
112 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
113 ; CHECK-X64-V3-NEXT: retq
115 ; CHECK-X64-V4-LABEL: udiv_indentity_non_zero:
116 ; CHECK-X64-V4: # %bb.0:
117 ; CHECK-X64-V4-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3
118 ; CHECK-X64-V4-NEXT: vpsubq %xmm3, %xmm2, %xmm2
119 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm2, %rcx
120 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rax
121 ; CHECK-X64-V4-NEXT: xorl %edx, %edx
122 ; CHECK-X64-V4-NEXT: divq %rcx
123 ; CHECK-X64-V4-NEXT: movq %rax, %rcx
124 ; CHECK-X64-V4-NEXT: vmovq %xmm2, %rsi
125 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rax
126 ; CHECK-X64-V4-NEXT: xorl %edx, %edx
127 ; CHECK-X64-V4-NEXT: divq %rsi
128 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
129 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k1
130 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
131 ; CHECK-X64-V4-NEXT: vmovq %rax, %xmm2
132 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm1 {%k1} = xmm2[0],xmm0[0]
133 ; CHECK-X64-V4-NEXT: vmovdqa %xmm1, %xmm0
134 ; CHECK-X64-V4-NEXT: retq
135 %non_zero = add nsw nuw <2 x i64> %y, <i64 1, i64 1>
136 %d = select <2 x i1> %c, <2 x i64> %non_zero, <2 x i64> <i64 1, i64 1>
137 %r = udiv <2 x i64> %x, %d
141 define <2 x i64> @udiv_indentity_zero(<2 x i1> %c, <2 x i64> %x) {
142 ; CHECK-X64-V3-LABEL: udiv_indentity_zero:
143 ; CHECK-X64-V3: # %bb.0:
144 ; CHECK-X64-V3-NEXT: vpandn {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
145 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
146 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
147 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
148 ; CHECK-X64-V3-NEXT: divq %rcx
149 ; CHECK-X64-V3-NEXT: movq %rax, %rcx
150 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
151 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rsi
152 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
153 ; CHECK-X64-V3-NEXT: divq %rsi
154 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
155 ; CHECK-X64-V3-NEXT: vmovq %rax, %xmm1
156 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
157 ; CHECK-X64-V3-NEXT: retq
159 ; CHECK-X64-V4-LABEL: udiv_indentity_zero:
160 ; CHECK-X64-V4: # %bb.0:
161 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
162 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k0
163 ; CHECK-X64-V4-NEXT: knotw %k0, %k1
164 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 {%k1} {z} = [1,1]
165 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rax
166 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm0, %rcx
167 ; CHECK-X64-V4-NEXT: xorl %edx, %edx
168 ; CHECK-X64-V4-NEXT: divq %rcx
169 ; CHECK-X64-V4-NEXT: movq %rax, %rcx
170 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rax
171 ; CHECK-X64-V4-NEXT: vmovq %xmm0, %rsi
172 ; CHECK-X64-V4-NEXT: xorl %edx, %edx
173 ; CHECK-X64-V4-NEXT: divq %rsi
174 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
175 ; CHECK-X64-V4-NEXT: vmovq %rax, %xmm1
176 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
177 ; CHECK-X64-V4-NEXT: retq
178 %d = select <2 x i1> %c, <2 x i64> zeroinitializer, <2 x i64> <i64 1, i64 1>
179 %r = udiv <2 x i64> %x, %d
183 define <2 x i64> @udiv_indentity_partial_zero(<2 x i1> %c, <2 x i64> %x) {
184 ; CHECK-X64-V3-LABEL: udiv_indentity_partial_zero:
185 ; CHECK-X64-V3: # %bb.0:
186 ; CHECK-X64-V3-NEXT: vpsllq $63, %xmm0, %xmm0
187 ; CHECK-X64-V3-NEXT: vmovddup {{.*#+}} xmm2 = [1,1]
188 ; CHECK-X64-V3-NEXT: # xmm2 = mem[0,0]
189 ; CHECK-X64-V3-NEXT: vblendvpd %xmm0, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2, %xmm0
190 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
191 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
192 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
193 ; CHECK-X64-V3-NEXT: divq %rcx
194 ; CHECK-X64-V3-NEXT: movq %rax, %rcx
195 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rsi
196 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
197 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
198 ; CHECK-X64-V3-NEXT: divq %rsi
199 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
200 ; CHECK-X64-V3-NEXT: vmovq %rax, %xmm1
201 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
202 ; CHECK-X64-V3-NEXT: retq
204 ; CHECK-X64-V4-LABEL: udiv_indentity_partial_zero:
205 ; CHECK-X64-V4: # %bb.0:
206 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
207 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k1
208 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 = [1,1]
209 ; CHECK-X64-V4-NEXT: vmovdqa64 {{.*#+}} xmm0 {%k1} = [0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0]
210 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm0, %rcx
211 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rax
212 ; CHECK-X64-V4-NEXT: xorl %edx, %edx
213 ; CHECK-X64-V4-NEXT: divq %rcx
214 ; CHECK-X64-V4-NEXT: movq %rax, %rcx
215 ; CHECK-X64-V4-NEXT: vmovq %xmm0, %rsi
216 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rax
217 ; CHECK-X64-V4-NEXT: xorl %edx, %edx
218 ; CHECK-X64-V4-NEXT: divq %rsi
219 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
220 ; CHECK-X64-V4-NEXT: vmovq %rax, %xmm1
221 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
222 ; CHECK-X64-V4-NEXT: retq
223 %d = select <2 x i1> %c, <2 x i64> <i64 0, i64 5>, <2 x i64> <i64 1, i64 1>
224 %r = udiv <2 x i64> %x, %d
228 define <2 x i64> @urem_identity_const(<2 x i1> %c, <2 x i64> %x) {
229 ; CHECK-X64-V3-LABEL: urem_identity_const:
230 ; CHECK-X64-V3: # %bb.0:
231 ; CHECK-X64-V3-NEXT: vpsllq $63, %xmm0, %xmm0
232 ; CHECK-X64-V3-NEXT: vmovddup {{.*#+}} xmm2 = [11,11]
233 ; CHECK-X64-V3-NEXT: # xmm2 = mem[0,0]
234 ; CHECK-X64-V3-NEXT: vblendvpd %xmm0, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2, %xmm0
235 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
236 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
237 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
238 ; CHECK-X64-V3-NEXT: divq %rcx
239 ; CHECK-X64-V3-NEXT: movq %rdx, %rcx
240 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rsi
241 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
242 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
243 ; CHECK-X64-V3-NEXT: divq %rsi
244 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
245 ; CHECK-X64-V3-NEXT: vmovq %rdx, %xmm1
246 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
247 ; CHECK-X64-V3-NEXT: retq
249 ; CHECK-X64-V4-LABEL: urem_identity_const:
250 ; CHECK-X64-V4: # %bb.0:
251 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
252 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k1
253 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 = [11,11]
254 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 {%k1} = [1,1]
255 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm0, %rcx
256 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rax
257 ; CHECK-X64-V4-NEXT: xorl %edx, %edx
258 ; CHECK-X64-V4-NEXT: divq %rcx
259 ; CHECK-X64-V4-NEXT: movq %rdx, %rcx
260 ; CHECK-X64-V4-NEXT: vmovq %xmm0, %rsi
261 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rax
262 ; CHECK-X64-V4-NEXT: xorl %edx, %edx
263 ; CHECK-X64-V4-NEXT: divq %rsi
264 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
265 ; CHECK-X64-V4-NEXT: vmovq %rdx, %xmm1
266 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
267 ; CHECK-X64-V4-NEXT: retq
268 %d = select <2 x i1> %c, <2 x i64> <i64 1, i64 1>, <2 x i64> <i64 11, i64 11>
269 %r = urem <2 x i64> %x, %d
273 define <2 x i64> @sdiv_identity_const(<2 x i1> %c, <2 x i64> %x) {
274 ; CHECK-X64-V3-LABEL: sdiv_identity_const:
275 ; CHECK-X64-V3: # %bb.0:
276 ; CHECK-X64-V3-NEXT: vpsllq $63, %xmm0, %xmm0
277 ; CHECK-X64-V3-NEXT: vmovddup {{.*#+}} xmm2 = [1,1]
278 ; CHECK-X64-V3-NEXT: # xmm2 = mem[0,0]
279 ; CHECK-X64-V3-NEXT: vblendvpd %xmm0, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2, %xmm0
280 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
281 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
282 ; CHECK-X64-V3-NEXT: cqto
283 ; CHECK-X64-V3-NEXT: idivq %rcx
284 ; CHECK-X64-V3-NEXT: movq %rax, %rcx
285 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rsi
286 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
287 ; CHECK-X64-V3-NEXT: cqto
288 ; CHECK-X64-V3-NEXT: idivq %rsi
289 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
290 ; CHECK-X64-V3-NEXT: vmovq %rax, %xmm1
291 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
292 ; CHECK-X64-V3-NEXT: retq
294 ; CHECK-X64-V4-LABEL: sdiv_identity_const:
295 ; CHECK-X64-V4: # %bb.0:
296 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
297 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k1
298 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 = [1,1]
299 ; CHECK-X64-V4-NEXT: vmovdqa64 {{.*#+}} xmm0 {%k1} = [11,13]
300 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm0, %rcx
301 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rax
302 ; CHECK-X64-V4-NEXT: cqto
303 ; CHECK-X64-V4-NEXT: idivq %rcx
304 ; CHECK-X64-V4-NEXT: movq %rax, %rcx
305 ; CHECK-X64-V4-NEXT: vmovq %xmm0, %rsi
306 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rax
307 ; CHECK-X64-V4-NEXT: cqto
308 ; CHECK-X64-V4-NEXT: idivq %rsi
309 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
310 ; CHECK-X64-V4-NEXT: vmovq %rax, %xmm1
311 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
312 ; CHECK-X64-V4-NEXT: retq
313 %d = select <2 x i1> %c, <2 x i64> <i64 11, i64 13>, <2 x i64> <i64 1, i64 1>
314 %r = sdiv <2 x i64> %x, %d
318 define <2 x i64> @sdiv_identity_const_todo_better_nonzero(<2 x i1> %c, <2 x i64> %x) {
319 ; CHECK-X64-V3-LABEL: sdiv_identity_const_todo_better_nonzero:
320 ; CHECK-X64-V3: # %bb.0:
321 ; CHECK-X64-V3-NEXT: vpsllq $63, %xmm0, %xmm0
322 ; CHECK-X64-V3-NEXT: vmovddup {{.*#+}} xmm2 = [1,1]
323 ; CHECK-X64-V3-NEXT: # xmm2 = mem[0,0]
324 ; CHECK-X64-V3-NEXT: vblendvpd %xmm0, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2, %xmm0
325 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
326 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
327 ; CHECK-X64-V3-NEXT: cqto
328 ; CHECK-X64-V3-NEXT: idivq %rcx
329 ; CHECK-X64-V3-NEXT: movq %rax, %rcx
330 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rsi
331 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
332 ; CHECK-X64-V3-NEXT: cqto
333 ; CHECK-X64-V3-NEXT: idivq %rsi
334 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
335 ; CHECK-X64-V3-NEXT: vmovq %rax, %xmm1
336 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
337 ; CHECK-X64-V3-NEXT: retq
339 ; CHECK-X64-V4-LABEL: sdiv_identity_const_todo_better_nonzero:
340 ; CHECK-X64-V4: # %bb.0:
341 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
342 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k1
343 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 = [1,1]
344 ; CHECK-X64-V4-NEXT: vmovdqa64 {{.*#+}} xmm0 {%k1} = [11,17]
345 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm0, %rcx
346 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rax
347 ; CHECK-X64-V4-NEXT: cqto
348 ; CHECK-X64-V4-NEXT: idivq %rcx
349 ; CHECK-X64-V4-NEXT: movq %rax, %rcx
350 ; CHECK-X64-V4-NEXT: vmovq %xmm0, %rsi
351 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rax
352 ; CHECK-X64-V4-NEXT: cqto
353 ; CHECK-X64-V4-NEXT: idivq %rsi
354 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
355 ; CHECK-X64-V4-NEXT: vmovq %rax, %xmm1
356 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
357 ; CHECK-X64-V4-NEXT: retq
358 %d = select <2 x i1> %c, <2 x i64> <i64 11, i64 17>, <2 x i64> <i64 1, i64 1>
359 %r = sdiv <2 x i64> %x, %d
363 define <2 x i64> @srem_identity_const(<2 x i1> %c, <2 x i64> %x) {
364 ; CHECK-X64-V3-LABEL: srem_identity_const:
365 ; CHECK-X64-V3: # %bb.0:
366 ; CHECK-X64-V3-NEXT: vpsllq $63, %xmm0, %xmm0
367 ; CHECK-X64-V3-NEXT: vmovddup {{.*#+}} xmm2 = [11,11]
368 ; CHECK-X64-V3-NEXT: # xmm2 = mem[0,0]
369 ; CHECK-X64-V3-NEXT: vblendvpd %xmm0, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2, %xmm0
370 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
371 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
372 ; CHECK-X64-V3-NEXT: cqto
373 ; CHECK-X64-V3-NEXT: idivq %rcx
374 ; CHECK-X64-V3-NEXT: movq %rdx, %rcx
375 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rsi
376 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
377 ; CHECK-X64-V3-NEXT: cqto
378 ; CHECK-X64-V3-NEXT: idivq %rsi
379 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
380 ; CHECK-X64-V3-NEXT: vmovq %rdx, %xmm1
381 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
382 ; CHECK-X64-V3-NEXT: retq
384 ; CHECK-X64-V4-LABEL: srem_identity_const:
385 ; CHECK-X64-V4: # %bb.0:
386 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
387 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k1
388 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 = [11,11]
389 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 {%k1} = [1,1]
390 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm0, %rcx
391 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rax
392 ; CHECK-X64-V4-NEXT: cqto
393 ; CHECK-X64-V4-NEXT: idivq %rcx
394 ; CHECK-X64-V4-NEXT: movq %rdx, %rcx
395 ; CHECK-X64-V4-NEXT: vmovq %xmm0, %rsi
396 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rax
397 ; CHECK-X64-V4-NEXT: cqto
398 ; CHECK-X64-V4-NEXT: idivq %rsi
399 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
400 ; CHECK-X64-V4-NEXT: vmovq %rdx, %xmm1
401 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
402 ; CHECK-X64-V4-NEXT: retq
403 %d = select <2 x i1> %c, <2 x i64> <i64 1, i64 1>, <2 x i64> <i64 11, i64 11>
404 %r = srem <2 x i64> %x, %d
408 define <2 x i64> @udivrem_identity_const(<2 x i1> %c, <2 x i64> %x) {
409 ; CHECK-X64-V3-LABEL: udivrem_identity_const:
410 ; CHECK-X64-V3: # %bb.0:
411 ; CHECK-X64-V3-NEXT: vpsllq $63, %xmm0, %xmm0
412 ; CHECK-X64-V3-NEXT: vmovddup {{.*#+}} xmm2 = [1,1]
413 ; CHECK-X64-V3-NEXT: # xmm2 = mem[0,0]
414 ; CHECK-X64-V3-NEXT: vblendvpd %xmm0, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2, %xmm0
415 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
416 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
417 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
418 ; CHECK-X64-V3-NEXT: divq %rcx
419 ; CHECK-X64-V3-NEXT: movq %rax, %rcx
420 ; CHECK-X64-V3-NEXT: movq %rdx, %rsi
421 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rdi
422 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
423 ; CHECK-X64-V3-NEXT: xorl %edx, %edx
424 ; CHECK-X64-V3-NEXT: divq %rdi
425 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
426 ; CHECK-X64-V3-NEXT: vmovq %rax, %xmm1
427 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
428 ; CHECK-X64-V3-NEXT: vmovq %rsi, %xmm1
429 ; CHECK-X64-V3-NEXT: vmovq %rdx, %xmm2
430 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
431 ; CHECK-X64-V3-NEXT: vpaddq %xmm1, %xmm0, %xmm0
432 ; CHECK-X64-V3-NEXT: retq
434 ; CHECK-X64-V4-LABEL: udivrem_identity_const:
435 ; CHECK-X64-V4: # %bb.0:
436 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
437 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k1
438 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 = [1,1]
439 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 {%k1} = [11,11]
440 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm0, %rcx
441 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rax
442 ; CHECK-X64-V4-NEXT: xorl %edx, %edx
443 ; CHECK-X64-V4-NEXT: divq %rcx
444 ; CHECK-X64-V4-NEXT: movq %rax, %rcx
445 ; CHECK-X64-V4-NEXT: movq %rdx, %rsi
446 ; CHECK-X64-V4-NEXT: vmovq %xmm0, %rdi
447 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rax
448 ; CHECK-X64-V4-NEXT: xorl %edx, %edx
449 ; CHECK-X64-V4-NEXT: divq %rdi
450 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
451 ; CHECK-X64-V4-NEXT: vmovq %rax, %xmm1
452 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
453 ; CHECK-X64-V4-NEXT: vmovq %rsi, %xmm1
454 ; CHECK-X64-V4-NEXT: vmovq %rdx, %xmm2
455 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
456 ; CHECK-X64-V4-NEXT: vpaddq %xmm1, %xmm0, %xmm0
457 ; CHECK-X64-V4-NEXT: retq
458 %d = select <2 x i1> %c, <2 x i64> <i64 11, i64 11>, <2 x i64> <i64 1, i64 1>
459 %div = udiv <2 x i64> %x, %d
460 %rem = urem <2 x i64> %x, %d
461 %r = add <2 x i64> %div, %rem
465 define <2 x i64> @sdivrem_identity_const(<2 x i1> %c, <2 x i64> %x) {
466 ; CHECK-X64-V3-LABEL: sdivrem_identity_const:
467 ; CHECK-X64-V3: # %bb.0:
468 ; CHECK-X64-V3-NEXT: vpsllq $63, %xmm0, %xmm0
469 ; CHECK-X64-V3-NEXT: vmovddup {{.*#+}} xmm2 = [1,1]
470 ; CHECK-X64-V3-NEXT: # xmm2 = mem[0,0]
471 ; CHECK-X64-V3-NEXT: vblendvpd %xmm0, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2, %xmm0
472 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm0, %rcx
473 ; CHECK-X64-V3-NEXT: vpextrq $1, %xmm1, %rax
474 ; CHECK-X64-V3-NEXT: cqto
475 ; CHECK-X64-V3-NEXT: idivq %rcx
476 ; CHECK-X64-V3-NEXT: movq %rax, %rcx
477 ; CHECK-X64-V3-NEXT: movq %rdx, %rsi
478 ; CHECK-X64-V3-NEXT: vmovq %xmm0, %rdi
479 ; CHECK-X64-V3-NEXT: vmovq %xmm1, %rax
480 ; CHECK-X64-V3-NEXT: cqto
481 ; CHECK-X64-V3-NEXT: idivq %rdi
482 ; CHECK-X64-V3-NEXT: vmovq %rcx, %xmm0
483 ; CHECK-X64-V3-NEXT: vmovq %rax, %xmm1
484 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
485 ; CHECK-X64-V3-NEXT: vmovq %rsi, %xmm1
486 ; CHECK-X64-V3-NEXT: vmovq %rdx, %xmm2
487 ; CHECK-X64-V3-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
488 ; CHECK-X64-V3-NEXT: vpaddq %xmm1, %xmm0, %xmm0
489 ; CHECK-X64-V3-NEXT: retq
491 ; CHECK-X64-V4-LABEL: sdivrem_identity_const:
492 ; CHECK-X64-V4: # %bb.0:
493 ; CHECK-X64-V4-NEXT: vpsllq $63, %xmm0, %xmm0
494 ; CHECK-X64-V4-NEXT: vpmovq2m %xmm0, %k1
495 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 = [1,1]
496 ; CHECK-X64-V4-NEXT: vpbroadcastq {{.*#+}} xmm0 {%k1} = [11,11]
497 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm0, %rcx
498 ; CHECK-X64-V4-NEXT: vpextrq $1, %xmm1, %rax
499 ; CHECK-X64-V4-NEXT: cqto
500 ; CHECK-X64-V4-NEXT: idivq %rcx
501 ; CHECK-X64-V4-NEXT: movq %rax, %rcx
502 ; CHECK-X64-V4-NEXT: movq %rdx, %rsi
503 ; CHECK-X64-V4-NEXT: vmovq %xmm0, %rdi
504 ; CHECK-X64-V4-NEXT: vmovq %xmm1, %rax
505 ; CHECK-X64-V4-NEXT: cqto
506 ; CHECK-X64-V4-NEXT: idivq %rdi
507 ; CHECK-X64-V4-NEXT: vmovq %rcx, %xmm0
508 ; CHECK-X64-V4-NEXT: vmovq %rax, %xmm1
509 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
510 ; CHECK-X64-V4-NEXT: vmovq %rsi, %xmm1
511 ; CHECK-X64-V4-NEXT: vmovq %rdx, %xmm2
512 ; CHECK-X64-V4-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
513 ; CHECK-X64-V4-NEXT: vpaddq %xmm1, %xmm0, %xmm0
514 ; CHECK-X64-V4-NEXT: retq
515 %d = select <2 x i1> %c, <2 x i64> <i64 11, i64 11>, <2 x i64> <i64 1, i64 1>
516 %div = sdiv <2 x i64> %x, %d
517 %rem = srem <2 x i64> %x, %d
518 %r = add <2 x i64> %div, %rem
521 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line: