1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple aarch64-unknown-unknown -run-pass=instruction-select -global-isel-abort=1 -verify-machineinstrs %s -o - | FileCheck %s
4 # Check that we can continue matching when we are in a situation where we will
12 tracksRegLiveness: true
14 ; CHECK-LABEL: name: fold_zext
16 ; CHECK-NEXT: successors: %bb.0(0x40000000), %bb.1(0x40000000)
17 ; CHECK-NEXT: liveins: $x0
19 ; CHECK-NEXT: %copy:gpr32 = COPY $w0
20 ; CHECK-NEXT: TBNZW %copy, 3, %bb.1
24 ; CHECK-NEXT: RET_ReallyLR
26 successors: %bb.0, %bb.1
28 %copy:gpr(s32) = COPY $w0
29 %bit:gpr(s64) = G_CONSTANT i64 8
30 %zero:gpr(s64) = G_CONSTANT i64 0
31 %fold_me:gpr(s64) = G_ZEXT %copy(s32)
32 %and:gpr(s64) = G_AND %fold_me, %bit
33 %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
44 tracksRegLiveness: true
46 ; CHECK-LABEL: name: fold_anyext
48 ; CHECK-NEXT: successors: %bb.0(0x40000000), %bb.1(0x40000000)
49 ; CHECK-NEXT: liveins: $x0
51 ; CHECK-NEXT: %copy:gpr32 = COPY $w0
52 ; CHECK-NEXT: TBNZW %copy, 3, %bb.1
56 ; CHECK-NEXT: RET_ReallyLR
58 successors: %bb.0, %bb.1
60 %copy:gpr(s32) = COPY $w0
61 %bit:gpr(s64) = G_CONSTANT i64 8
62 %zero:gpr(s64) = G_CONSTANT i64 0
63 %fold_me:gpr(s64) = G_ANYEXT %copy(s32)
64 %and:gpr(s64) = G_AND %fold_me, %bit
65 %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
76 tracksRegLiveness: true
78 ; CHECK-LABEL: name: fold_multiple
80 ; CHECK-NEXT: successors: %bb.0(0x40000000), %bb.1(0x40000000)
81 ; CHECK-NEXT: liveins: $h0
83 ; CHECK-NEXT: [[SUBREG_TO_REG:%[0-9]+]]:fpr32 = SUBREG_TO_REG 0, $h0, %subreg.hsub
84 ; CHECK-NEXT: %copy:gpr32all = COPY [[SUBREG_TO_REG]]
85 ; CHECK-NEXT: [[COPY:%[0-9]+]]:gpr32 = COPY %copy
86 ; CHECK-NEXT: TBNZW [[COPY]], 3, %bb.1
90 ; CHECK-NEXT: RET_ReallyLR
92 successors: %bb.0, %bb.1
94 %copy:gpr(s16) = COPY $h0
95 %bit:gpr(s64) = G_CONSTANT i64 8
96 %zero:gpr(s64) = G_CONSTANT i64 0
97 %ext1:gpr(s32) = G_ZEXT %copy(s16)
98 %ext2:gpr(s64) = G_ANYEXT %ext1(s32)
99 %and:gpr(s64) = G_AND %ext2, %bit
100 %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
107 name: dont_fold_more_than_one_use
110 regBankSelected: true
111 tracksRegLiveness: true
113 ; CHECK-LABEL: name: dont_fold_more_than_one_use
115 ; CHECK-NEXT: successors: %bb.0(0x40000000), %bb.1(0x40000000)
116 ; CHECK-NEXT: liveins: $x0
118 ; CHECK-NEXT: %copy:gpr32 = COPY $w0
119 ; CHECK-NEXT: [[ORRWrs:%[0-9]+]]:gpr32 = ORRWrs $wzr, %copy, 0
120 ; CHECK-NEXT: %zext:gpr64 = SUBREG_TO_REG 0, [[ORRWrs]], %subreg.sub_32
121 ; CHECK-NEXT: [[COPY:%[0-9]+]]:gpr32all = COPY %zext.sub_32
122 ; CHECK-NEXT: [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
123 ; CHECK-NEXT: TBNZW [[COPY1]], 3, %bb.1
124 ; CHECK-NEXT: B %bb.0
127 ; CHECK-NEXT: $x0 = COPY %zext
128 ; CHECK-NEXT: RET_ReallyLR implicit $x0
130 successors: %bb.0, %bb.1
132 %copy:gpr(s32) = COPY $w0
133 %bit:gpr(s64) = G_CONSTANT i64 8
134 %zero:gpr(s64) = G_CONSTANT i64 0
135 %zext:gpr(s64) = G_ZEXT %copy(s32)
136 %and:gpr(s64) = G_AND %zext, %bit
137 %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
141 $x0 = COPY %zext:gpr(s64)
142 RET_ReallyLR implicit $x0