1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
3 # RUN: llc -mtriple aarch64 -debugify-and-strip-all-safe -run-pass=aarch64-prelegalizer-combiner -verify-machineinstrs %s -o - | FileCheck %s
8 tracksRegLiveness: true
12 ; (xor (and x, y), y) -> (and (not x), y)
13 ; CHECK-LABEL: name: fold_scalar
14 ; CHECK: liveins: $w0, $w1
15 ; CHECK: %x:_(s32) = COPY $w0
16 ; CHECK: %y:_(s32) = COPY $w1
17 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
18 ; CHECK: [[XOR:%[0-9]+]]:_(s32) = G_XOR %x, [[C]]
19 ; CHECK: %xor:_(s32) = G_AND [[XOR]], %y
20 ; CHECK: $w0 = COPY %xor(s32)
21 ; CHECK: RET_ReallyLR implicit $w0
24 %and:_(s32) = G_AND %x, %y
25 %xor:_(s32) = G_XOR %and, %y
27 RET_ReallyLR implicit $w0
32 tracksRegLiveness: true
37 ; CHECK-LABEL: name: fold_vector
38 ; CHECK: liveins: $x0, $x1
39 ; CHECK: %x:_(<2 x s32>) = COPY $x0
40 ; CHECK: %y:_(<2 x s32>) = COPY $x1
41 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
42 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[C]](s32), [[C]](s32)
43 ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s32>) = G_XOR %x, [[BUILD_VECTOR]]
44 ; CHECK: %xor:_(<2 x s32>) = G_AND [[XOR]], %y
45 ; CHECK: $x0 = COPY %xor(<2 x s32>)
46 ; CHECK: RET_ReallyLR implicit $x0
47 %x:_(<2 x s32>) = COPY $x0
48 %y:_(<2 x s32>) = COPY $x1
49 %and:_(<2 x s32>) = G_AND %x, %y
50 %xor:_(<2 x s32>) = G_XOR %and, %y
51 $x0 = COPY %xor(<2 x s32>)
52 RET_ReallyLR implicit $x0
56 name: fold_commuted_and
57 tracksRegLiveness: true
61 ; (xor (and y, x), y) -> (and (not x), y)
62 ; CHECK-LABEL: name: fold_commuted_and
63 ; CHECK: liveins: $w0, $w1
64 ; CHECK: %x:_(s32) = COPY $w0
65 ; CHECK: %y:_(s32) = COPY $w1
66 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
67 ; CHECK: [[XOR:%[0-9]+]]:_(s32) = G_XOR %x, [[C]]
68 ; CHECK: %xor:_(s32) = G_AND [[XOR]], %y
69 ; CHECK: $w0 = COPY %xor(s32)
70 ; CHECK: RET_ReallyLR implicit $w0
73 %and:_(s32) = G_AND %y, %x
74 %xor:_(s32) = G_XOR %and, %y
76 RET_ReallyLR implicit $w0
80 name: fold_commuted_xor
81 tracksRegLiveness: true
85 ; (xor y, (and x, y)) -> (and (not x), y)
86 ; CHECK-LABEL: name: fold_commuted_xor
87 ; CHECK: liveins: $w0, $w1
88 ; CHECK: %x:_(s32) = COPY $w0
89 ; CHECK: %y:_(s32) = COPY $w1
90 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
91 ; CHECK: [[XOR:%[0-9]+]]:_(s32) = G_XOR %x, [[C]]
92 ; CHECK: %xor:_(s32) = G_AND [[XOR]], %y
93 ; CHECK: $w0 = COPY %xor(s32)
94 ; CHECK: RET_ReallyLR implicit $w0
97 %and:_(s32) = G_AND %x, %y
98 %xor:_(s32) = G_XOR %y, %and
100 RET_ReallyLR implicit $w0
104 name: fold_commuted_xor_and
105 tracksRegLiveness: true
109 ; (xor y, (and x, y)) -> (and (not x), y)
110 ; CHECK-LABEL: name: fold_commuted_xor_and
111 ; CHECK: liveins: $w0, $w1
112 ; CHECK: %x:_(s32) = COPY $w0
113 ; CHECK: %y:_(s32) = COPY $w1
114 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
115 ; CHECK: [[XOR:%[0-9]+]]:_(s32) = G_XOR %x, [[C]]
116 ; CHECK: %xor:_(s32) = G_AND [[XOR]], %y
117 ; CHECK: $w0 = COPY %xor(s32)
118 ; CHECK: RET_ReallyLR implicit $w0
121 %and:_(s32) = G_AND %y, %x
122 %xor:_(s32) = G_XOR %y, %and
124 RET_ReallyLR implicit $w0
128 name: dont_fold_different_regs
129 tracksRegLiveness: true
132 liveins: $w0, $w1, $w2
133 ; The G_AND does not share any registers with the G_XOR
134 ; CHECK-LABEL: name: dont_fold_different_regs
135 ; CHECK: liveins: $w0, $w1, $w2
136 ; CHECK: %x:_(s32) = COPY $w0
137 ; CHECK: %y:_(s32) = COPY $w1
138 ; CHECK: %z:_(s32) = COPY $w2
139 ; CHECK: %and:_(s32) = G_AND %x, %z
140 ; CHECK: %xor:_(s32) = G_XOR %and, %y
141 ; CHECK: $w0 = COPY %xor(s32)
142 ; CHECK: RET_ReallyLR implicit $w0
146 %and:_(s32) = G_AND %x, %z
147 %xor:_(s32) = G_XOR %and, %y
149 RET_ReallyLR implicit $w0
153 name: dont_fold_more_than_one_use
154 tracksRegLiveness: true
157 liveins: $w0, $w1, $w2
158 ; Don't fold when the G_AND is used outside the G_XOR.
160 ; CHECK-LABEL: name: dont_fold_more_than_one_use
161 ; CHECK: liveins: $w0, $w1, $w2
162 ; CHECK: %x:_(s32) = COPY $w0
163 ; CHECK: %y:_(s32) = COPY $w1
164 ; CHECK: %z:_(s32) = COPY $w2
165 ; CHECK: %and:_(s32) = G_AND %x, %z
166 ; CHECK: %xor:_(s32) = G_XOR %and, %y
167 ; CHECK: %add:_(s32) = G_ADD %and, %xor
168 ; CHECK: $w0 = COPY %add(s32)
169 ; CHECK: RET_ReallyLR implicit $w0
173 %and:_(s32) = G_AND %x, %z
174 %xor:_(s32) = G_XOR %and, %y
175 %add:_(s32) = G_ADD %and, %xor
177 RET_ReallyLR implicit $w0