1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple aarch64-unknown-unknown -run-pass=instruction-select -global-isel-abort=1 -verify-machineinstrs %s -o - | FileCheck %s
4 # Check folding an AND into a G_BRCOND which has been matched as a TB(N)Z.
12 ; CHECK-LABEL: name: fold_and_rhs
14 ; CHECK: successors: %bb.0(0x40000000), %bb.1(0x40000000)
15 ; CHECK: %copy:gpr64all = COPY $x0
16 ; CHECK: [[COPY:%[0-9]+]]:gpr32all = COPY %copy.sub_32
17 ; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
18 ; CHECK: TBNZW [[COPY1]], 3, %bb.1
23 successors: %bb.0, %bb.1
25 %copy:gpr(s64) = COPY $x0
26 %bit:gpr(s64) = G_CONSTANT i64 8
27 %zero:gpr(s64) = G_CONSTANT i64 0
28 %fold_cst:gpr(s64) = G_CONSTANT i64 8
30 ; tbnz (and x, 8), 3 == tbnz x, 3 because the third bit of x & 8 is 1 when
31 ; the third bit of x is 1.
32 %fold_me:gpr(s64) = G_AND %copy, %fold_cst
34 %and:gpr(s64) = G_AND %fold_me, %bit
35 %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
47 ; CHECK-LABEL: name: fold_and_lhs
49 ; CHECK: successors: %bb.0(0x40000000), %bb.1(0x40000000)
50 ; CHECK: %copy:gpr64all = COPY $x0
51 ; CHECK: [[COPY:%[0-9]+]]:gpr32all = COPY %copy.sub_32
52 ; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
53 ; CHECK: TBNZW [[COPY1]], 3, %bb.1
58 successors: %bb.0, %bb.1
60 %copy:gpr(s64) = COPY $x0
61 %bit:gpr(s64) = G_CONSTANT i64 8
62 %zero:gpr(s64) = G_CONSTANT i64 0
63 %fold_cst:gpr(s64) = G_CONSTANT i64 8
65 ; Same as above, but with the constant on the other side.
66 %fold_me:gpr(s64) = G_AND %fold_cst, %copy
68 %and:gpr(s64) = G_AND %fold_me, %bit
69 %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
81 ; CHECK-LABEL: name: dont_fold_and
83 ; CHECK: successors: %bb.0(0x40000000), %bb.1(0x40000000)
84 ; CHECK: %copy:gpr64 = COPY $x0
85 ; CHECK: %fold_me:gpr64sp = ANDXri %copy, 4098
86 ; CHECK: [[COPY:%[0-9]+]]:gpr32all = COPY %fold_me.sub_32
87 ; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
88 ; CHECK: TBNZW [[COPY1]], 3, %bb.1
93 successors: %bb.0, %bb.1
95 %copy:gpr(s64) = COPY $x0
96 %bit:gpr(s64) = G_CONSTANT i64 8
97 %zero:gpr(s64) = G_CONSTANT i64 0
99 ; tbnz (and x, 7), 3 != tbnz x, 3, because the third bit of x & 7 is always
101 %fold_cst:gpr(s64) = G_CONSTANT i64 7
103 %fold_me:gpr(s64) = G_AND %copy, %fold_cst
104 %and:gpr(s64) = G_AND %fold_me, %bit
105 %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero