1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple aarch64-apple-darwin -debugify-and-strip-all-safe -run-pass=aarch64-prelegalizer-combiner -global-isel -verify-machineinstrs %s -o - | FileCheck %s
4 # Check that we propagate the G_SEXT to the sources of the phi operand.
6 name: sext_icst_through_phi
7 tracksRegLiveness: true
9 ; CHECK-LABEL: name: sext_icst_through_phi
11 ; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
12 ; CHECK: liveins: $w0, $w1
13 ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
14 ; CHECK: %one:_(s32) = G_CONSTANT i32 2
15 ; CHECK: %cmp:_(s1) = G_ICMP intpred(sle), [[COPY]](s32), %one
16 ; CHECK: G_BRCOND %cmp(s1), %bb.2
19 ; CHECK: successors: %bb.3(0x80000000)
20 ; CHECK: %cst32_4:_(s32) = G_CONSTANT i32 4
21 ; CHECK: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT %cst32_4(s32)
24 ; CHECK: successors: %bb.3(0x80000000)
25 ; CHECK: %cst32_10:_(s32) = G_CONSTANT i32 10
26 ; CHECK: [[SEXT1:%[0-9]+]]:_(s64) = G_SEXT %cst32_10(s32)
28 ; CHECK: %ext:_(s64) = G_PHI [[SEXT]](s64), %bb.1, [[SEXT1]](s64), %bb.2
29 ; CHECK: $x0 = COPY %ext(s64)
30 ; CHECK: RET_ReallyLR implicit $x0
36 %zero:_(s32) = G_CONSTANT i32 0
37 %one:_(s32) = G_CONSTANT i32 2
38 %cmp:_(s1) = G_ICMP intpred(sgt), %0(s32), %one
39 G_BRCOND %cmp(s1), %bb.2
43 %cst32_4:_(s32) = G_CONSTANT i32 4
47 %cst32_10:_(s32) = G_CONSTANT i32 10
50 %phi:_(s32) = G_PHI %cst32_4(s32), %bb.2, %cst32_10(s32), %bb.3
51 %ext:_(s64) = G_SEXT %phi
53 RET_ReallyLR implicit $x0
57 # Check that we propagate the G_ZEXT to the sources of the phi operand.
59 name: zext_icst_through_phi
60 tracksRegLiveness: true
62 ; CHECK-LABEL: name: zext_icst_through_phi
64 ; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
65 ; CHECK: liveins: $w0, $w1
66 ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
67 ; CHECK: %one:_(s32) = G_CONSTANT i32 2
68 ; CHECK: %cmp:_(s1) = G_ICMP intpred(sle), [[COPY]](s32), %one
69 ; CHECK: G_BRCOND %cmp(s1), %bb.2
72 ; CHECK: successors: %bb.3(0x80000000)
73 ; CHECK: %cst32_4:_(s32) = G_CONSTANT i32 4
74 ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT %cst32_4(s32)
77 ; CHECK: successors: %bb.3(0x80000000)
78 ; CHECK: %cst32_10:_(s32) = G_CONSTANT i32 10
79 ; CHECK: [[ZEXT1:%[0-9]+]]:_(s64) = G_ZEXT %cst32_10(s32)
81 ; CHECK: %ext:_(s64) = G_PHI [[ZEXT]](s64), %bb.1, [[ZEXT1]](s64), %bb.2
82 ; CHECK: $x0 = COPY %ext(s64)
83 ; CHECK: RET_ReallyLR implicit $x0
89 %zero:_(s32) = G_CONSTANT i32 0
90 %one:_(s32) = G_CONSTANT i32 2
91 %cmp:_(s1) = G_ICMP intpred(sgt), %0(s32), %one
92 G_BRCOND %cmp(s1), %bb.2
96 %cst32_4:_(s32) = G_CONSTANT i32 4
100 %cst32_10:_(s32) = G_CONSTANT i32 10
103 %phi:_(s32) = G_PHI %cst32_4(s32), %bb.2, %cst32_10(s32), %bb.3
104 %ext:_(s64) = G_ZEXT %phi
106 RET_ReallyLR implicit $x0
110 # Don't handle vectors because of potential cost issues.
112 name: sext_load_through_phi_vector
113 tracksRegLiveness: true
115 ; CHECK-LABEL: name: sext_load_through_phi_vector
117 ; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
118 ; CHECK: liveins: $x0, $q0, $q1
119 ; CHECK: %ptr:_(p0) = COPY $x0
120 ; CHECK: %cmp:_(s1) = G_IMPLICIT_DEF
121 ; CHECK: G_BRCOND %cmp(s1), %bb.2
124 ; CHECK: successors: %bb.3(0x80000000)
125 ; CHECK: %ld1:_(<4 x s32>) = G_LOAD %ptr(p0) :: (load (<4 x s32>))
128 ; CHECK: successors: %bb.3(0x80000000)
129 ; CHECK: %ld2:_(<4 x s32>) = G_LOAD %ptr(p0) :: (load (<4 x s32>))
131 ; CHECK: %phi:_(<4 x s32>) = G_PHI %ld1(<4 x s32>), %bb.1, %ld2(<4 x s32>), %bb.2
132 ; CHECK: %ext:_(<4 x s64>) = G_SEXT %phi(<4 x s32>)
133 ; CHECK: G_STORE %ext(<4 x s64>), %ptr(p0) :: (store (<4 x s64>))
134 ; CHECK: RET_ReallyLR
136 liveins: $x0, $q0, $q1
138 %0:_(<4 x s32>) = COPY $q0
139 %1:_(<4 x s32>) = COPY $q1
140 %ptr:_(p0) = COPY $x0
141 %cmp:_(s1) = G_IMPLICIT_DEF
142 G_BRCOND %cmp(s1), %bb.2
146 %ld1:_(<4 x s32>) = G_LOAD %ptr(p0) :: (load (<4 x s32>))
150 %ld2:_(<4 x s32>) = G_LOAD %ptr(p0) :: (load (<4 x s32>))
153 %phi:_(<4 x s32>) = G_PHI %ld1(<4 x s32>), %bb.2, %ld2(<4 x s32>), %bb.3
154 %ext:_(<4 x s64>) = G_SEXT %phi
155 G_STORE %ext(<4 x s64>), %ptr(p0) :: (store (<4 x s64>))
161 # Check that we don't propagate if the extend is used by a G_PTR_ADD, which on
162 # AArch64 has a good chance of folding in the extend.
164 name: sext_icst_through_phi_used_by_ptradd
165 tracksRegLiveness: true
167 ; CHECK-LABEL: name: sext_icst_through_phi_used_by_ptradd
169 ; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
170 ; CHECK: liveins: $w0, $w1, $x2
171 ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
172 ; CHECK: %base:_(p0) = COPY $x2
173 ; CHECK: %one:_(s32) = G_CONSTANT i32 2
174 ; CHECK: %cmp:_(s1) = G_ICMP intpred(sle), [[COPY]](s32), %one
175 ; CHECK: G_BRCOND %cmp(s1), %bb.2
178 ; CHECK: successors: %bb.3(0x80000000)
179 ; CHECK: %cst32_4:_(s32) = G_CONSTANT i32 4
182 ; CHECK: successors: %bb.3(0x80000000)
183 ; CHECK: %cst32_10:_(s32) = G_CONSTANT i32 10
185 ; CHECK: %phi:_(s32) = G_PHI %cst32_4(s32), %bb.1, %cst32_10(s32), %bb.2
186 ; CHECK: %ext:_(s64) = G_SEXT %phi(s32)
187 ; CHECK: %ptr:_(p0) = G_PTR_ADD %base, %ext(s64)
188 ; CHECK: $x0 = COPY %ptr(p0)
189 ; CHECK: RET_ReallyLR implicit $x0
191 liveins: $w0, $w1, $x2
195 %base:_(p0) = COPY $x2
196 %zero:_(s32) = G_CONSTANT i32 0
197 %one:_(s32) = G_CONSTANT i32 2
198 %cmp:_(s1) = G_ICMP intpred(sgt), %0(s32), %one
199 G_BRCOND %cmp(s1), %bb.2
203 %cst32_4:_(s32) = G_CONSTANT i32 4
207 %cst32_10:_(s32) = G_CONSTANT i32 10
210 %phi:_(s32) = G_PHI %cst32_4(s32), %bb.2, %cst32_10(s32), %bb.3
211 %ext:_(s64) = G_SEXT %phi
212 %ptr:_(p0) = G_PTR_ADD %base, %ext
214 RET_ReallyLR implicit $x0
218 # Same as above but we do it here because the extend has multiple users, so the
219 # it probably won't cost extra instructions if we remove it.
221 name: sext_icst_through_phi_used_by_ptradd_multiuse
222 tracksRegLiveness: true
224 ; CHECK-LABEL: name: sext_icst_through_phi_used_by_ptradd_multiuse
226 ; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
227 ; CHECK: liveins: $w0, $w1, $x2
228 ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
229 ; CHECK: %base:_(p0) = COPY $x2
230 ; CHECK: %one:_(s32) = G_CONSTANT i32 2
231 ; CHECK: %cmp:_(s1) = G_ICMP intpred(sle), [[COPY]](s32), %one
232 ; CHECK: G_BRCOND %cmp(s1), %bb.2
235 ; CHECK: successors: %bb.3(0x80000000)
236 ; CHECK: %cst32_4:_(s32) = G_CONSTANT i32 4
237 ; CHECK: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT %cst32_4(s32)
240 ; CHECK: successors: %bb.3(0x80000000)
241 ; CHECK: %cst32_10:_(s32) = G_CONSTANT i32 10
242 ; CHECK: [[SEXT1:%[0-9]+]]:_(s64) = G_SEXT %cst32_10(s32)
244 ; CHECK: %ext:_(s64) = G_PHI [[SEXT]](s64), %bb.1, [[SEXT1]](s64), %bb.2
245 ; CHECK: %ptr:_(p0) = G_PTR_ADD %base, %ext(s64)
246 ; CHECK: $x0 = COPY %ptr(p0)
247 ; CHECK: $x1 = COPY %ext(s64)
248 ; CHECK: RET_ReallyLR implicit $x0
250 liveins: $w0, $w1, $x2
254 %base:_(p0) = COPY $x2
255 %zero:_(s32) = G_CONSTANT i32 0
256 %one:_(s32) = G_CONSTANT i32 2
257 %cmp:_(s1) = G_ICMP intpred(sgt), %0(s32), %one
258 G_BRCOND %cmp(s1), %bb.2
262 %cst32_4:_(s32) = G_CONSTANT i32 4
266 %cst32_10:_(s32) = G_CONSTANT i32 10
269 %phi:_(s32) = G_PHI %cst32_4(s32), %bb.2, %cst32_10(s32), %bb.3
270 %ext:_(s64) = G_SEXT %phi
271 %ptr:_(p0) = G_PTR_ADD %base, %ext
274 RET_ReallyLR implicit $x0
278 # Check we don't propagate if there are more than 2 unique incoming values in the phi.
279 # Doing so might cause too much code bloat.
281 name: zext_icst_through_phi_too_many_incoming
282 tracksRegLiveness: true
284 ; CHECK-LABEL: name: zext_icst_through_phi_too_many_incoming
286 ; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
287 ; CHECK: liveins: $w0, $w1
288 ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
289 ; CHECK: %one:_(s32) = G_CONSTANT i32 2
290 ; CHECK: %cmp:_(s1) = G_ICMP intpred(sle), [[COPY]](s32), %one
291 ; CHECK: G_BRCOND %cmp(s1), %bb.2
294 ; CHECK: successors: %bb.3(0x40000000), %bb.4(0x40000000)
295 ; CHECK: %cst32_4:_(s32) = G_CONSTANT i32 4
296 ; CHECK: %cond:_(s1) = G_IMPLICIT_DEF
297 ; CHECK: G_BRCOND %cond(s1), %bb.3
300 ; CHECK: successors: %bb.4(0x80000000)
301 ; CHECK: %cst32_10:_(s32) = G_CONSTANT i32 10
304 ; CHECK: successors: %bb.4(0x80000000)
305 ; CHECK: %cst32_42:_(s32) = G_CONSTANT i32 42
307 ; CHECK: %phi:_(s32) = G_PHI %cst32_4(s32), %bb.1, %cst32_10(s32), %bb.2, %cst32_42(s32), %bb.3
308 ; CHECK: %ext:_(s64) = G_ZEXT %phi(s32)
309 ; CHECK: $x0 = COPY %ext(s64)
310 ; CHECK: RET_ReallyLR implicit $x0
316 %zero:_(s32) = G_CONSTANT i32 0
317 %one:_(s32) = G_CONSTANT i32 2
318 %cmp:_(s1) = G_ICMP intpred(sgt), %0(s32), %one
319 G_BRCOND %cmp(s1), %bb.2
323 %cst32_4:_(s32) = G_CONSTANT i32 4
324 %cond:_(s1) = G_IMPLICIT_DEF
325 G_BRCOND %cond, %bb.5
329 %cst32_10:_(s32) = G_CONSTANT i32 10
333 %cst32_42:_(s32) = G_CONSTANT i32 42
336 %phi:_(s32) = G_PHI %cst32_4(s32), %bb.2, %cst32_10(s32), %bb.3, %cst32_42(s32), %bb.5
337 %ext:_(s64) = G_ZEXT %phi
339 RET_ReallyLR implicit $x0
343 # Check that we don't propagate if the extension would be of a non-allowed inst.
345 name: sext_add_through_phi
346 tracksRegLiveness: true
348 ; CHECK-LABEL: name: sext_add_through_phi
350 ; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
351 ; CHECK: liveins: $w0, $w1
352 ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
353 ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $w1
354 ; CHECK: %one:_(s32) = G_CONSTANT i32 2
355 ; CHECK: %cmp:_(s1) = G_ICMP intpred(sle), [[COPY]](s32), %one
356 ; CHECK: G_BRCOND %cmp(s1), %bb.2
359 ; CHECK: successors: %bb.3(0x80000000)
360 ; CHECK: %add:_(s32) = G_ADD [[COPY]], [[COPY1]]
363 ; CHECK: successors: %bb.3(0x80000000)
364 ; CHECK: %cst32_10:_(s32) = G_CONSTANT i32 10
366 ; CHECK: %phi:_(s32) = G_PHI %add(s32), %bb.1, %cst32_10(s32), %bb.2
367 ; CHECK: %ext:_(s64) = G_SEXT %phi(s32)
368 ; CHECK: $x0 = COPY %ext(s64)
369 ; CHECK: RET_ReallyLR implicit $x0
375 %zero:_(s32) = G_CONSTANT i32 0
376 %one:_(s32) = G_CONSTANT i32 2
377 %cmp:_(s1) = G_ICMP intpred(sgt), %0(s32), %one
378 G_BRCOND %cmp(s1), %bb.2
382 %add:_(s32) = G_ADD %0, %1
386 %cst32_10:_(s32) = G_CONSTANT i32 10
389 %phi:_(s32) = G_PHI %add(s32), %bb.2, %cst32_10(s32), %bb.3
390 %ext:_(s64) = G_SEXT %phi
392 RET_ReallyLR implicit $x0
396 # Same as above but allowed with a G_ANYEXT.
398 name: anyext_add_through_phi
399 tracksRegLiveness: true
401 ; CHECK-LABEL: name: anyext_add_through_phi
403 ; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
404 ; CHECK: liveins: $w0, $w1
405 ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
406 ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $w1
407 ; CHECK: %one:_(s32) = G_CONSTANT i32 2
408 ; CHECK: %cmp:_(s1) = G_ICMP intpred(sle), [[COPY]](s32), %one
409 ; CHECK: G_BRCOND %cmp(s1), %bb.2
412 ; CHECK: successors: %bb.3(0x80000000)
413 ; CHECK: %add:_(s32) = G_ADD [[COPY]], [[COPY1]]
414 ; CHECK: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT %add(s32)
417 ; CHECK: successors: %bb.3(0x80000000)
418 ; CHECK: %cst32_10:_(s32) = G_CONSTANT i32 10
419 ; CHECK: [[ANYEXT1:%[0-9]+]]:_(s64) = G_ANYEXT %cst32_10(s32)
421 ; CHECK: %ext:_(s64) = G_PHI [[ANYEXT]](s64), %bb.1, [[ANYEXT1]](s64), %bb.2
422 ; CHECK: $x0 = COPY %ext(s64)
423 ; CHECK: RET_ReallyLR implicit $x0
429 %zero:_(s32) = G_CONSTANT i32 0
430 %one:_(s32) = G_CONSTANT i32 2
431 %cmp:_(s1) = G_ICMP intpred(sgt), %0(s32), %one
432 G_BRCOND %cmp(s1), %bb.2
436 %add:_(s32) = G_ADD %0, %1
440 %cst32_10:_(s32) = G_CONSTANT i32 10
443 %phi:_(s32) = G_PHI %add(s32), %bb.2, %cst32_10(s32), %bb.3
444 %ext:_(s64) = G_ANYEXT %phi
446 RET_ReallyLR implicit $x0