1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple aarch64-unknown-unknown -run-pass=instruction-select -verify-machineinstrs %s -o - | FileCheck %s
4 # Check folding an AND into a G_BRCOND which has been matched as a TB(N)Z.
12 ; CHECK-LABEL: name: fold_and_rhs
14 ; CHECK: successors: %bb.0(0x40000000), %bb.1(0x40000000)
15 ; CHECK: %copy:gpr64all = COPY $x0
16 ; CHECK: [[COPY:%[0-9]+]]:gpr32all = COPY %copy.sub_32
17 ; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
18 ; CHECK: TBNZW [[COPY1]], 3, %bb.1
23 successors: %bb.0, %bb.1
25 %copy:gpr(s64) = COPY $x0
26 %bit:gpr(s64) = G_CONSTANT i64 8
27 %zero:gpr(s64) = G_CONSTANT i64 0
28 %fold_cst:gpr(s64) = G_CONSTANT i64 8
30 ; tbnz (and x, 8), 3 == tbnz x, 3 because the third bit of x & 8 is 1 when
31 ; the third bit of x is 1.
32 %fold_me:gpr(s64) = G_AND %copy, %fold_cst
34 %and:gpr(s64) = G_AND %fold_me, %bit
35 %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
36 %cmp_trunc:gpr(s1) = G_TRUNC %cmp(s32)
37 G_BRCOND %cmp_trunc(s1), %bb.1
48 ; CHECK-LABEL: name: fold_and_lhs
50 ; CHECK: successors: %bb.0(0x40000000), %bb.1(0x40000000)
51 ; CHECK: %copy:gpr64all = COPY $x0
52 ; CHECK: [[COPY:%[0-9]+]]:gpr32all = COPY %copy.sub_32
53 ; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
54 ; CHECK: TBNZW [[COPY1]], 3, %bb.1
59 successors: %bb.0, %bb.1
61 %copy:gpr(s64) = COPY $x0
62 %bit:gpr(s64) = G_CONSTANT i64 8
63 %zero:gpr(s64) = G_CONSTANT i64 0
64 %fold_cst:gpr(s64) = G_CONSTANT i64 8
66 ; Same as above, but with the constant on the other side.
67 %fold_me:gpr(s64) = G_AND %fold_cst, %copy
69 %and:gpr(s64) = G_AND %fold_me, %bit
70 %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
71 %cmp_trunc:gpr(s1) = G_TRUNC %cmp(s32)
72 G_BRCOND %cmp_trunc(s1), %bb.1
83 ; CHECK-LABEL: name: dont_fold_and
85 ; CHECK: successors: %bb.0(0x40000000), %bb.1(0x40000000)
86 ; CHECK: %copy:gpr64 = COPY $x0
87 ; CHECK: %fold_me:gpr64sp = ANDXri %copy, 4098
88 ; CHECK: [[COPY:%[0-9]+]]:gpr32all = COPY %fold_me.sub_32
89 ; CHECK: [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
90 ; CHECK: TBNZW [[COPY1]], 3, %bb.1
95 successors: %bb.0, %bb.1
97 %copy:gpr(s64) = COPY $x0
98 %bit:gpr(s64) = G_CONSTANT i64 8
99 %zero:gpr(s64) = G_CONSTANT i64 0
101 ; tbnz (and x, 7), 3 != tbnz x, 3, because the third bit of x & 7 is always
103 %fold_cst:gpr(s64) = G_CONSTANT i64 7
105 %fold_me:gpr(s64) = G_AND %copy, %fold_cst
106 %and:gpr(s64) = G_AND %fold_me, %bit
107 %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
108 %cmp_trunc:gpr(s1) = G_TRUNC %cmp(s32)
109 G_BRCOND %cmp_trunc(s1), %bb.1