1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2 ; RUN: opt < %s -passes=instcombine -S | FileCheck %s
9 ; CHECK-NEXT: [[TMP1:%.*]] = sub i8 7, [[X:%.*]]
10 ; CHECK-NEXT: [[MASK:%.*]] = lshr i8 -1, [[TMP1]]
11 ; CHECK-NEXT: ret i8 [[MASK]]
13 %bitmask = shl i8 1, %x
14 %lowbitmask = add i8 %bitmask, -1
15 %mask = or i8 %lowbitmask, %bitmask
19 ; Same, but different bit width
20 define i16 @t1(i16 %x) {
22 ; CHECK-NEXT: [[TMP1:%.*]] = sub i16 15, [[X:%.*]]
23 ; CHECK-NEXT: [[MASK:%.*]] = lshr i16 -1, [[TMP1]]
24 ; CHECK-NEXT: ret i16 [[MASK]]
26 %bitmask = shl i16 1, %x
27 %lowbitmask = add i16 %bitmask, -1
28 %mask = or i16 %lowbitmask, %bitmask
33 define <2 x i8> @t2_vec(<2 x i8> %x) {
34 ; CHECK-LABEL: @t2_vec(
35 ; CHECK-NEXT: [[TMP1:%.*]] = sub <2 x i8> splat (i8 7), [[X:%.*]]
36 ; CHECK-NEXT: [[MASK:%.*]] = lshr <2 x i8> splat (i8 -1), [[TMP1]]
37 ; CHECK-NEXT: ret <2 x i8> [[MASK]]
39 %bitmask = shl <2 x i8> <i8 1, i8 1>, %x
40 %lowbitmask = add <2 x i8> %bitmask, <i8 -1, i8 -1>
41 %mask = or <2 x i8> %lowbitmask, %bitmask
44 define <3 x i8> @t3_vec_poison0(<3 x i8> %x) {
45 ; CHECK-LABEL: @t3_vec_poison0(
46 ; CHECK-NEXT: [[TMP1:%.*]] = sub <3 x i8> splat (i8 7), [[X:%.*]]
47 ; CHECK-NEXT: [[MASK:%.*]] = lshr <3 x i8> splat (i8 -1), [[TMP1]]
48 ; CHECK-NEXT: ret <3 x i8> [[MASK]]
50 %bitmask = shl <3 x i8> <i8 1, i8 poison, i8 1>, %x
51 %lowbitmask = add <3 x i8> %bitmask, <i8 -1, i8 -1, i8 -1>
52 %mask = or <3 x i8> %lowbitmask, %bitmask
55 define <3 x i8> @t4_vec_poison1(<3 x i8> %x) {
56 ; CHECK-LABEL: @t4_vec_poison1(
57 ; CHECK-NEXT: [[TMP1:%.*]] = sub <3 x i8> splat (i8 7), [[X:%.*]]
58 ; CHECK-NEXT: [[MASK:%.*]] = lshr <3 x i8> splat (i8 -1), [[TMP1]]
59 ; CHECK-NEXT: ret <3 x i8> [[MASK]]
61 %bitmask = shl <3 x i8> <i8 1, i8 1, i8 1>, %x
62 %lowbitmask = add <3 x i8> %bitmask, <i8 -1, i8 poison, i8 -1>
63 %mask = or <3 x i8> %lowbitmask, %bitmask
66 define <3 x i8> @t5_vec_poison2(<3 x i8> %x) {
67 ; CHECK-LABEL: @t5_vec_poison2(
68 ; CHECK-NEXT: [[TMP1:%.*]] = sub <3 x i8> splat (i8 7), [[X:%.*]]
69 ; CHECK-NEXT: [[MASK:%.*]] = lshr <3 x i8> splat (i8 -1), [[TMP1]]
70 ; CHECK-NEXT: ret <3 x i8> [[MASK]]
72 %bitmask = shl <3 x i8> <i8 1, i8 1, i8 poison>, %x
73 %lowbitmask = add <3 x i8> %bitmask, <i8 -1, i8 poison, i8 -1>
74 %mask = or <3 x i8> %lowbitmask, %bitmask
79 define i8 @t6_extrause0(i8 %x) {
80 ; CHECK-LABEL: @t6_extrause0(
81 ; CHECK-NEXT: [[BITMASK:%.*]] = shl nuw i8 1, [[X:%.*]]
82 ; CHECK-NEXT: call void @use8(i8 [[BITMASK]])
83 ; CHECK-NEXT: [[TMP1:%.*]] = sub i8 7, [[X]]
84 ; CHECK-NEXT: [[MASK:%.*]] = lshr i8 -1, [[TMP1]]
85 ; CHECK-NEXT: ret i8 [[MASK]]
87 %bitmask = shl i8 1, %x
88 call void @use8(i8 %bitmask)
89 %lowbitmask = add i8 %bitmask, -1
90 %mask = or i8 %lowbitmask, %bitmask
93 define i8 @t7_extrause1(i8 %x) {
94 ; CHECK-LABEL: @t7_extrause1(
95 ; CHECK-NEXT: [[BITMASK:%.*]] = shl nuw i8 1, [[X:%.*]]
96 ; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK]], -1
97 ; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
98 ; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK]]
99 ; CHECK-NEXT: ret i8 [[MASK]]
101 %bitmask = shl i8 1, %x
102 %lowbitmask = add i8 %bitmask, -1
103 call void @use8(i8 %lowbitmask)
104 %mask = or i8 %lowbitmask, %bitmask
107 define i8 @t8_extrause2(i8 %x) {
108 ; CHECK-LABEL: @t8_extrause2(
109 ; CHECK-NEXT: [[BITMASK:%.*]] = shl nuw i8 1, [[X:%.*]]
110 ; CHECK-NEXT: call void @use8(i8 [[BITMASK]])
111 ; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK]], -1
112 ; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
113 ; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK]]
114 ; CHECK-NEXT: ret i8 [[MASK]]
116 %bitmask = shl i8 1, %x
117 call void @use8(i8 %bitmask)
118 %lowbitmask = add i8 %bitmask, -1
119 call void @use8(i8 %lowbitmask)
120 %mask = or i8 %lowbitmask, %bitmask
125 define i8 @t9_nocse(i8 %x) {
126 ; CHECK-LABEL: @t9_nocse(
127 ; CHECK-NEXT: [[BITMASK1:%.*]] = shl nuw i8 1, [[X:%.*]]
128 ; CHECK-NEXT: [[NOTMASK:%.*]] = shl nsw i8 -1, [[X]]
129 ; CHECK-NEXT: [[LOWBITMASK:%.*]] = xor i8 [[NOTMASK]], -1
130 ; CHECK-NEXT: [[MASK:%.*]] = or i8 [[BITMASK1]], [[LOWBITMASK]]
131 ; CHECK-NEXT: ret i8 [[MASK]]
133 %bitmask0 = shl i8 1, %x
134 %bitmask1 = shl i8 1, %x
135 %lowbitmask = add i8 %bitmask0, -1
136 %mask = or i8 %lowbitmask, %bitmask1
140 ; Non-CSE'd extra uses test
141 define i8 @t10_nocse_extrause0(i8 %x) {
142 ; CHECK-LABEL: @t10_nocse_extrause0(
143 ; CHECK-NEXT: [[BITMASK0:%.*]] = shl nuw i8 1, [[X:%.*]]
144 ; CHECK-NEXT: call void @use8(i8 [[BITMASK0]])
145 ; CHECK-NEXT: [[TMP1:%.*]] = sub i8 7, [[X]]
146 ; CHECK-NEXT: [[MASK:%.*]] = lshr i8 -1, [[TMP1]]
147 ; CHECK-NEXT: ret i8 [[MASK]]
149 %bitmask0 = shl i8 1, %x
150 call void @use8(i8 %bitmask0)
151 %bitmask1 = shl i8 1, %x
152 %lowbitmask = add i8 %bitmask0, -1
153 %mask = or i8 %lowbitmask, %bitmask1
156 define i8 @t11_nocse_extrause1(i8 %x) {
157 ; CHECK-LABEL: @t11_nocse_extrause1(
158 ; CHECK-NEXT: [[BITMASK1:%.*]] = shl nuw i8 1, [[X:%.*]]
159 ; CHECK-NEXT: call void @use8(i8 [[BITMASK1]])
160 ; CHECK-NEXT: [[NOTMASK:%.*]] = shl nsw i8 -1, [[X]]
161 ; CHECK-NEXT: [[LOWBITMASK:%.*]] = xor i8 [[NOTMASK]], -1
162 ; CHECK-NEXT: [[MASK:%.*]] = or i8 [[BITMASK1]], [[LOWBITMASK]]
163 ; CHECK-NEXT: ret i8 [[MASK]]
165 %bitmask0 = shl i8 1, %x
166 %bitmask1 = shl i8 1, %x
167 call void @use8(i8 %bitmask1)
168 %lowbitmask = add i8 %bitmask0, -1
169 %mask = or i8 %lowbitmask, %bitmask1
172 define i8 @t12_nocse_extrause2(i8 %x) {
173 ; CHECK-LABEL: @t12_nocse_extrause2(
174 ; CHECK-NEXT: [[BITMASK1:%.*]] = shl nuw i8 1, [[X:%.*]]
175 ; CHECK-NEXT: [[NOTMASK:%.*]] = shl nsw i8 -1, [[X]]
176 ; CHECK-NEXT: [[LOWBITMASK:%.*]] = xor i8 [[NOTMASK]], -1
177 ; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
178 ; CHECK-NEXT: [[MASK:%.*]] = or i8 [[BITMASK1]], [[LOWBITMASK]]
179 ; CHECK-NEXT: ret i8 [[MASK]]
181 %bitmask0 = shl i8 1, %x
182 %bitmask1 = shl i8 1, %x
183 %lowbitmask = add i8 %bitmask0, -1
184 call void @use8(i8 %lowbitmask)
185 %mask = or i8 %lowbitmask, %bitmask1
188 define i8 @t13_nocse_extrause3(i8 %x) {
189 ; CHECK-LABEL: @t13_nocse_extrause3(
190 ; CHECK-NEXT: [[BITMASK0:%.*]] = shl nuw i8 1, [[X:%.*]]
191 ; CHECK-NEXT: call void @use8(i8 [[BITMASK0]])
192 ; CHECK-NEXT: [[BITMASK1:%.*]] = shl nuw i8 1, [[X]]
193 ; CHECK-NEXT: call void @use8(i8 [[BITMASK1]])
194 ; CHECK-NEXT: [[TMP1:%.*]] = sub i8 7, [[X]]
195 ; CHECK-NEXT: [[MASK:%.*]] = lshr i8 -1, [[TMP1]]
196 ; CHECK-NEXT: ret i8 [[MASK]]
198 %bitmask0 = shl i8 1, %x
199 call void @use8(i8 %bitmask0)
200 %bitmask1 = shl i8 1, %x
201 call void @use8(i8 %bitmask1)
202 %lowbitmask = add i8 %bitmask0, -1
203 %mask = or i8 %lowbitmask, %bitmask1
206 define i8 @t14_nocse_extrause4(i8 %x) {
207 ; CHECK-LABEL: @t14_nocse_extrause4(
208 ; CHECK-NEXT: [[BITMASK0:%.*]] = shl nuw i8 1, [[X:%.*]]
209 ; CHECK-NEXT: call void @use8(i8 [[BITMASK0]])
210 ; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK0]], -1
211 ; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
212 ; CHECK-NEXT: [[TMP1:%.*]] = sub i8 7, [[X]]
213 ; CHECK-NEXT: [[MASK:%.*]] = lshr i8 -1, [[TMP1]]
214 ; CHECK-NEXT: ret i8 [[MASK]]
216 %bitmask0 = shl i8 1, %x
217 call void @use8(i8 %bitmask0)
218 %bitmask1 = shl i8 1, %x
219 %lowbitmask = add i8 %bitmask0, -1
220 call void @use8(i8 %lowbitmask)
221 %mask = or i8 %lowbitmask, %bitmask1
224 define i8 @t15_nocse_extrause5(i8 %x) {
225 ; CHECK-LABEL: @t15_nocse_extrause5(
226 ; CHECK-NEXT: [[BITMASK1:%.*]] = shl nuw i8 1, [[X:%.*]]
227 ; CHECK-NEXT: call void @use8(i8 [[BITMASK1]])
228 ; CHECK-NEXT: [[NOTMASK:%.*]] = shl nsw i8 -1, [[X]]
229 ; CHECK-NEXT: [[LOWBITMASK:%.*]] = xor i8 [[NOTMASK]], -1
230 ; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
231 ; CHECK-NEXT: [[MASK:%.*]] = or i8 [[BITMASK1]], [[LOWBITMASK]]
232 ; CHECK-NEXT: ret i8 [[MASK]]
234 %bitmask0 = shl i8 1, %x
235 %bitmask1 = shl i8 1, %x
236 call void @use8(i8 %bitmask1)
237 %lowbitmask = add i8 %bitmask0, -1
238 call void @use8(i8 %lowbitmask)
239 %mask = or i8 %lowbitmask, %bitmask1
242 define i8 @t16_nocse_extrause6(i8 %x) {
243 ; CHECK-LABEL: @t16_nocse_extrause6(
244 ; CHECK-NEXT: [[BITMASK0:%.*]] = shl nuw i8 1, [[X:%.*]]
245 ; CHECK-NEXT: call void @use8(i8 [[BITMASK0]])
246 ; CHECK-NEXT: [[BITMASK1:%.*]] = shl nuw i8 1, [[X]]
247 ; CHECK-NEXT: call void @use8(i8 [[BITMASK1]])
248 ; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK0]], -1
249 ; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
250 ; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK1]]
251 ; CHECK-NEXT: ret i8 [[MASK]]
253 %bitmask0 = shl i8 1, %x
254 call void @use8(i8 %bitmask0)
255 %bitmask1 = shl i8 1, %x
256 call void @use8(i8 %bitmask1)
257 %lowbitmask = add i8 %bitmask0, -1
258 call void @use8(i8 %lowbitmask)
259 %mask = or i8 %lowbitmask, %bitmask1
263 ; Non-CSE'd test with mismatching X's.
264 define i8 @t17_nocse_mismatching_x(i8 %x0, i8 %x1) {
265 ; CHECK-LABEL: @t17_nocse_mismatching_x(
266 ; CHECK-NEXT: [[BITMASK1:%.*]] = shl nuw i8 1, [[X1:%.*]]
267 ; CHECK-NEXT: [[NOTMASK:%.*]] = shl nsw i8 -1, [[X0:%.*]]
268 ; CHECK-NEXT: [[LOWBITMASK:%.*]] = xor i8 [[NOTMASK]], -1
269 ; CHECK-NEXT: [[MASK:%.*]] = or i8 [[BITMASK1]], [[LOWBITMASK]]
270 ; CHECK-NEXT: ret i8 [[MASK]]
272 %bitmask0 = shl i8 1, %x0
273 %bitmask1 = shl i8 1, %x1
274 %lowbitmask = add i8 %bitmask0, -1
275 %mask = or i8 %lowbitmask, %bitmask1