1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2 ; RUN: opt -passes=constraint-elimination -S %s | FileCheck %s
4 declare void @llvm.assume(i1 noundef) #0
6 define i1 @gep_add_1_uge_inbounds(ptr %dst, ptr %lower) {
7 ; CHECK-LABEL: @gep_add_1_uge_inbounds(
8 ; CHECK-NEXT: [[PRE:%.*]] = icmp uge ptr [[DST:%.*]], [[LOWER:%.*]]
9 ; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]])
10 ; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr inbounds i8, ptr [[DST]], i64 3
11 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr inbounds i8, ptr [[DST]], i64 1
12 ; CHECK-NEXT: [[DST_ADD_2:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_1]], i64 1
13 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 true, true
14 ; CHECK-NEXT: [[DST_ADD_4:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_3]], i64 3
15 ; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], true
16 ; CHECK-NEXT: ret i1 [[RES_2]]
18 %pre = icmp uge ptr %dst, %lower
19 call void @llvm.assume(i1 %pre)
20 %dst.add.3 = getelementptr inbounds i8, ptr %dst, i64 3
21 %dst.add.1 = getelementptr inbounds i8, ptr %dst, i64 1
22 %cmp.add.1 = icmp uge ptr %dst.add.1, %lower
23 %dst.add.2 = getelementptr inbounds i8, ptr %dst.add.1, i64 1
24 %cmp.add.3 = icmp uge ptr %dst.add.3, %lower
25 %res.1 = xor i1 %cmp.add.1, %cmp.add.3
26 %dst.add.4 = getelementptr inbounds i8, ptr %dst.add.3, i64 3
27 %cmp.add.4 = icmp uge ptr %dst.add.4, %lower
28 %res.2 = xor i1 %res.1, %cmp.add.4
32 define i1 @gep_add_1_uge_inbounds_scalable_vector(ptr %dst, ptr %lower) {
33 ; CHECK-LABEL: @gep_add_1_uge_inbounds_scalable_vector(
34 ; CHECK-NEXT: [[PRE:%.*]] = icmp uge ptr [[DST:%.*]], [[LOWER:%.*]]
35 ; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]])
36 ; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr inbounds <vscale x 4 x i8>, ptr [[DST]], i64 3
37 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr inbounds <vscale x 4 x i8>, ptr [[DST]], i64 1
38 ; CHECK-NEXT: [[CMP_ADD_1:%.*]] = icmp uge ptr [[DST_ADD_1]], [[LOWER]]
39 ; CHECK-NEXT: [[DST_ADD_2:%.*]] = getelementptr inbounds <vscale x 4 x i8>, ptr [[DST_ADD_1]], i64 1
40 ; CHECK-NEXT: [[CMP_ADD_3:%.*]] = icmp uge ptr [[DST_ADD_3]], [[LOWER]]
41 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_ADD_1]], [[CMP_ADD_3]]
42 ; CHECK-NEXT: [[DST_ADD_4:%.*]] = getelementptr inbounds <vscale x 4 x i8>, ptr [[DST_ADD_3]], i64 3
43 ; CHECK-NEXT: [[CMP_ADD_4:%.*]] = icmp uge ptr [[DST_ADD_4]], [[LOWER]]
44 ; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_ADD_4]]
45 ; CHECK-NEXT: ret i1 [[RES_2]]
47 %pre = icmp uge ptr %dst, %lower
48 call void @llvm.assume(i1 %pre)
49 %dst.add.3 = getelementptr inbounds <vscale x 4 x i8>, ptr %dst, i64 3
50 %dst.add.1 = getelementptr inbounds <vscale x 4 x i8>, ptr %dst, i64 1
51 %cmp.add.1 = icmp uge ptr %dst.add.1, %lower
52 %dst.add.2 = getelementptr inbounds <vscale x 4 x i8>, ptr %dst.add.1, i64 1
53 %cmp.add.3 = icmp uge ptr %dst.add.3, %lower
54 %res.1 = xor i1 %cmp.add.1, %cmp.add.3
55 %dst.add.4 = getelementptr inbounds <vscale x 4 x i8>, ptr %dst.add.3, i64 3
56 %cmp.add.4 = icmp uge ptr %dst.add.4, %lower
57 %res.2 = xor i1 %res.1, %cmp.add.4
61 define i1 @gep_add_1_uge_only_inner_inbounds(ptr %dst, ptr %lower) {
62 ; CHECK-LABEL: @gep_add_1_uge_only_inner_inbounds(
63 ; CHECK-NEXT: [[PRE:%.*]] = icmp uge ptr [[DST:%.*]], [[LOWER:%.*]]
64 ; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]])
65 ; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr inbounds i8, ptr [[DST]], i64 3
66 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr inbounds i8, ptr [[DST]], i64 1
67 ; CHECK-NEXT: [[DST_ADD_2:%.*]] = getelementptr i8, ptr [[DST_ADD_1]], i64 1
68 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 true, true
69 ; CHECK-NEXT: [[DST_ADD_4:%.*]] = getelementptr i8, ptr [[DST_ADD_3]], i64 3
70 ; CHECK-NEXT: [[CMP_ADD_4:%.*]] = icmp uge ptr [[DST_ADD_4]], [[LOWER]]
71 ; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_ADD_4]]
72 ; CHECK-NEXT: ret i1 [[RES_2]]
74 %pre = icmp uge ptr %dst, %lower
75 call void @llvm.assume(i1 %pre)
76 %dst.add.3 = getelementptr inbounds i8, ptr %dst, i64 3
77 %dst.add.1 = getelementptr inbounds i8, ptr %dst, i64 1
78 %cmp.add.1 = icmp uge ptr %dst.add.1, %lower
79 %dst.add.2 = getelementptr i8, ptr %dst.add.1, i64 1
80 %cmp.add.3 = icmp uge ptr %dst.add.3, %lower
81 %res.1 = xor i1 %cmp.add.1, %cmp.add.3
82 %dst.add.4 = getelementptr i8, ptr %dst.add.3, i64 3
83 %cmp.add.4 = icmp uge ptr %dst.add.4, %lower
84 %res.2 = xor i1 %res.1, %cmp.add.4
88 define i1 @gep_add_1_uge_only_outer_inbounds(ptr %dst, ptr %lower) {
89 ; CHECK-LABEL: @gep_add_1_uge_only_outer_inbounds(
90 ; CHECK-NEXT: [[PRE:%.*]] = icmp uge ptr [[DST:%.*]], [[LOWER:%.*]]
91 ; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]])
92 ; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr i8, ptr [[DST]], i64 3
93 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr i8, ptr [[DST]], i64 1
94 ; CHECK-NEXT: [[CMP_ADD_1:%.*]] = icmp uge ptr [[DST_ADD_1]], [[LOWER]]
95 ; CHECK-NEXT: [[DST_ADD_2:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_1]], i64 1
96 ; CHECK-NEXT: [[CMP_ADD_3:%.*]] = icmp uge ptr [[DST_ADD_3]], [[LOWER]]
97 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_ADD_1]], [[CMP_ADD_3]]
98 ; CHECK-NEXT: [[DST_ADD_4:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_3]], i64 3
99 ; CHECK-NEXT: [[CMP_ADD_4:%.*]] = icmp uge ptr [[DST_ADD_4]], [[LOWER]]
100 ; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_ADD_4]]
101 ; CHECK-NEXT: ret i1 [[RES_2]]
103 %pre = icmp uge ptr %dst, %lower
104 call void @llvm.assume(i1 %pre)
105 %dst.add.3 = getelementptr i8, ptr %dst, i64 3
106 %dst.add.1 = getelementptr i8, ptr %dst, i64 1
107 %cmp.add.1 = icmp uge ptr %dst.add.1, %lower
108 %dst.add.2 = getelementptr inbounds i8, ptr %dst.add.1, i64 1
109 %cmp.add.3 = icmp uge ptr %dst.add.3, %lower
110 %res.1 = xor i1 %cmp.add.1, %cmp.add.3
111 %dst.add.4 = getelementptr inbounds i8, ptr %dst.add.3, i64 3
112 %cmp.add.4 = icmp uge ptr %dst.add.4, %lower
113 %res.2 = xor i1 %res.1, %cmp.add.4
117 define i1 @gep_add_1_uge_no_inbounds(ptr %dst, ptr %lower) {
118 ; CHECK-LABEL: @gep_add_1_uge_no_inbounds(
119 ; CHECK-NEXT: [[PRE:%.*]] = icmp uge ptr [[DST:%.*]], [[LOWER:%.*]]
120 ; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]])
121 ; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr i8, ptr [[DST]], i64 3
122 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr i8, ptr [[DST]], i64 1
123 ; CHECK-NEXT: [[CMP_ADD_1:%.*]] = icmp uge ptr [[DST_ADD_1]], [[LOWER]]
124 ; CHECK-NEXT: [[DST_ADD_2:%.*]] = getelementptr i8, ptr [[DST_ADD_1]], i64 1
125 ; CHECK-NEXT: [[CMP_ADD_3:%.*]] = icmp uge ptr [[DST_ADD_3]], [[LOWER]]
126 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_ADD_1]], [[CMP_ADD_3]]
127 ; CHECK-NEXT: [[DST_ADD_4:%.*]] = getelementptr i8, ptr [[DST_ADD_3]], i64 3
128 ; CHECK-NEXT: [[CMP_ADD_4:%.*]] = icmp uge ptr [[DST_ADD_4]], [[LOWER]]
129 ; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_ADD_4]]
130 ; CHECK-NEXT: ret i1 [[RES_2]]
132 %pre = icmp uge ptr %dst, %lower
133 call void @llvm.assume(i1 %pre)
134 %dst.add.3 = getelementptr i8, ptr %dst, i64 3
135 %dst.add.1 = getelementptr i8, ptr %dst, i64 1
136 %cmp.add.1 = icmp uge ptr %dst.add.1, %lower
137 %dst.add.2 = getelementptr i8, ptr %dst.add.1, i64 1
138 %cmp.add.3 = icmp uge ptr %dst.add.3, %lower
139 %res.1 = xor i1 %cmp.add.1, %cmp.add.3
140 %dst.add.4 = getelementptr i8, ptr %dst.add.3, i64 3
141 %cmp.add.4 = icmp uge ptr %dst.add.4, %lower
142 %res.2 = xor i1 %res.1, %cmp.add.4
146 define i1 @gep_add_1_ult(ptr %dst, ptr %lower, ptr %upper) {
147 ; CHECK-LABEL: @gep_add_1_ult(
148 ; CHECK-NEXT: [[END:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i64 4
149 ; CHECK-NEXT: [[PRE:%.*]] = icmp ult ptr [[END]], [[UPPER:%.*]]
150 ; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]])
151 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr inbounds i8, ptr [[DST]], i64 1
152 ; CHECK-NEXT: [[DST_ADD_3:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_1]], i64 2
153 ; CHECK-NEXT: [[DST_ADD_4:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_1]], i64 3
154 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 true, true
155 ; CHECK-NEXT: [[DST_ADD_5:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_1]], i64 4
156 ; CHECK-NEXT: [[CMP_ADD_5:%.*]] = icmp ult ptr [[DST_ADD_5]], [[UPPER]]
157 ; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_ADD_5]]
158 ; CHECK-NEXT: ret i1 [[RES_2]]
160 %end = getelementptr inbounds i8, ptr %dst, i64 4
161 %pre = icmp ult ptr %end, %upper
162 call void @llvm.assume(i1 %pre)
163 %dst.add.1 = getelementptr inbounds i8, ptr %dst, i64 1
164 %dst.add.3 = getelementptr inbounds i8, ptr %dst.add.1, i64 2
165 %cmp.add.3 = icmp ult ptr %dst.add.3, %upper
166 %dst.add.4 = getelementptr inbounds i8, ptr %dst.add.1, i64 3
167 %cmp.add.4 = icmp ult ptr %dst.add.4, %upper
168 %res.1 = xor i1 %cmp.add.3, %cmp.add.4
169 %dst.add.5 = getelementptr inbounds i8, ptr %dst.add.1, i64 4
170 %cmp.add.5 = icmp ult ptr %dst.add.5, %upper
171 %res.2 = xor i1 %res.1, %cmp.add.5
175 define i1 @gep_add_ult_var_idx(ptr %dst, ptr %upper, i8 %idx) {
176 ; CHECK-LABEL: @gep_add_ult_var_idx(
177 ; CHECK-NEXT: [[NOT_ZERO:%.*]] = icmp ne i8 [[IDX:%.*]], 0
178 ; CHECK-NEXT: call void @llvm.assume(i1 [[NOT_ZERO]])
179 ; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16
180 ; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[IDX_EXT]]
181 ; CHECK-NEXT: [[PRE:%.*]] = icmp ult ptr [[DST_ADD_IDX]], [[UPPER:%.*]]
182 ; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]])
183 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 1
184 ; CHECK-NEXT: [[DST_ADD_2:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 2
185 ; CHECK-NEXT: [[CMP_ADD_2:%.*]] = icmp ule ptr [[DST_ADD_2]], [[UPPER]]
186 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 true, [[CMP_ADD_2]]
187 ; CHECK-NEXT: ret i1 [[RES_1]]
189 %not.zero = icmp ne i8 %idx, 0
190 call void @llvm.assume(i1 %not.zero)
191 %idx.ext = zext i8 %idx to i16
192 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext
193 %pre = icmp ult ptr %dst.add.idx, %upper
194 call void @llvm.assume(i1 %pre)
195 %dst.add.1 = getelementptr inbounds i8, ptr %dst.add.idx, i64 1
196 %cmp.add.1 = icmp ule ptr %dst.add.1, %upper
197 %dst.add.2 = getelementptr inbounds i8, ptr %dst.add.idx, i64 2
198 %cmp.add.2 = icmp ule ptr %dst.add.2, %upper
199 %res.1 = xor i1 %cmp.add.1, %cmp.add.2
203 define i1 @gep_add_ult_var_idx_sgt_1(ptr %dst, ptr %upper, i8 %idx) {
204 ; CHECK-LABEL: @gep_add_ult_var_idx_sgt_1(
205 ; CHECK-NEXT: [[SGT_1:%.*]] = icmp sgt i8 [[IDX:%.*]], 1
206 ; CHECK-NEXT: call void @llvm.assume(i1 [[SGT_1]])
207 ; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16
208 ; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[IDX_EXT]]
209 ; CHECK-NEXT: [[PRE:%.*]] = icmp ult ptr [[DST_ADD_IDX]], [[UPPER:%.*]]
210 ; CHECK-NEXT: call void @llvm.assume(i1 [[PRE]])
211 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 1
212 ; CHECK-NEXT: [[DST_ADD_2:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 2
213 ; CHECK-NEXT: [[CMP_ADD_2:%.*]] = icmp ule ptr [[DST_ADD_2]], [[UPPER]]
214 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 true, [[CMP_ADD_2]]
215 ; CHECK-NEXT: ret i1 [[RES_1]]
217 %sgt.1 = icmp sgt i8 %idx, 1
218 call void @llvm.assume(i1 %sgt.1)
219 %idx.ext = zext i8 %idx to i16
220 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext
221 %pre = icmp ult ptr %dst.add.idx, %upper
222 call void @llvm.assume(i1 %pre)
223 %dst.add.1 = getelementptr inbounds i8, ptr %dst.add.idx, i64 1
224 %cmp.add.1 = icmp ule ptr %dst.add.1, %upper
225 %dst.add.2 = getelementptr inbounds i8, ptr %dst.add.idx, i64 2
226 %cmp.add.2 = icmp ule ptr %dst.add.2, %upper
227 %res.1 = xor i1 %cmp.add.1, %cmp.add.2
231 define i1 @gep_add_1_ult_var_idx_inbounds(ptr %dst, ptr %upper, i8 %len, i8 %idx) {
232 ; CHECK-LABEL: @gep_add_1_ult_var_idx_inbounds(
233 ; CHECK-NEXT: [[NOT_ZERO:%.*]] = icmp ne i8 [[LEN:%.*]], 0
234 ; CHECK-NEXT: call void @llvm.assume(i1 [[NOT_ZERO]])
235 ; CHECK-NEXT: [[LEN_EXT:%.*]] = zext i8 [[LEN]] to i16
236 ; CHECK-NEXT: [[DST_ADD_LEN:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[LEN_EXT]]
237 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_LEN]], i64 1
238 ; CHECK-NEXT: [[CMP_ADD_1:%.*]] = icmp ult ptr [[DST_ADD_1]], [[UPPER:%.*]]
239 ; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_ADD_1]])
240 ; CHECK-NEXT: [[CMP_IDX_ULT_LEN:%.*]] = icmp ult i8 [[IDX:%.*]], [[LEN]]
241 ; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_IDX_ULT_LEN]])
242 ; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16
243 ; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST]], i16 [[IDX_EXT]]
244 ; CHECK-NEXT: [[DST_ADD_IDX_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 1
245 ; CHECK-NEXT: [[DST_ADD_IDX_2:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 2
246 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 true, true
247 ; CHECK-NEXT: [[DST_ADD_IDX_3:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_IDX]], i64 3
248 ; CHECK-NEXT: [[CMP_IDX_3:%.*]] = icmp ult ptr [[DST_ADD_IDX_3]], [[UPPER]]
249 ; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_IDX_3]]
250 ; CHECK-NEXT: ret i1 [[RES_2]]
252 %not.zero = icmp ne i8 %len, 0
253 call void @llvm.assume(i1 %not.zero)
254 %len.ext = zext i8 %len to i16
255 %dst.add.len = getelementptr inbounds i8, ptr %dst, i16 %len.ext
256 %dst.add.1 = getelementptr inbounds i8, ptr %dst.add.len, i64 1
257 %cmp.add.1 = icmp ult ptr %dst.add.1, %upper
258 call void @llvm.assume(i1 %cmp.add.1)
259 %cmp.idx.ult.len = icmp ult i8 %idx, %len
260 call void @llvm.assume(i1 %cmp.idx.ult.len)
261 %idx.ext = zext i8 %idx to i16
262 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext
263 %dst.add.idx.1 = getelementptr inbounds i8, ptr %dst.add.idx, i64 1
264 %cmp.idx.1 = icmp ult ptr %dst.add.idx.1, %upper
265 %dst.add.idx.2 = getelementptr inbounds i8, ptr %dst.add.idx, i64 2
266 %cmp.idx.2 = icmp ult ptr %dst.add.idx.2, %upper
267 %res.1 = xor i1 %cmp.idx.1, %cmp.idx.2
268 %dst.add.idx.3 = getelementptr inbounds i8, ptr %dst.add.idx, i64 3
269 %cmp.idx.3 = icmp ult ptr %dst.add.idx.3, %upper
270 %res.2 = xor i1 %res.1, %cmp.idx.3
274 define i1 @gep_add_1_ult_var_idx_only_inner_inbounds(ptr %dst, ptr %upper, i8 %len, i8 %idx) {
275 ; CHECK-LABEL: @gep_add_1_ult_var_idx_only_inner_inbounds(
276 ; CHECK-NEXT: [[NOT_ZERO:%.*]] = icmp ne i8 [[LEN:%.*]], 0
277 ; CHECK-NEXT: call void @llvm.assume(i1 [[NOT_ZERO]])
278 ; CHECK-NEXT: [[LEN_EXT:%.*]] = zext i8 [[LEN]] to i16
279 ; CHECK-NEXT: [[DST_ADD_LEN:%.*]] = getelementptr inbounds i8, ptr [[DST:%.*]], i16 [[LEN_EXT]]
280 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr inbounds i8, ptr [[DST_ADD_LEN]], i64 1
281 ; CHECK-NEXT: [[CMP_ADD_1:%.*]] = icmp ult ptr [[DST_ADD_1]], [[UPPER:%.*]]
282 ; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_ADD_1]])
283 ; CHECK-NEXT: [[CMP_IDX_ULT_LEN:%.*]] = icmp ult i8 [[IDX:%.*]], [[LEN]]
284 ; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_IDX_ULT_LEN]])
285 ; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16
286 ; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr inbounds i8, ptr [[DST]], i16 [[IDX_EXT]]
287 ; CHECK-NEXT: [[DST_ADD_IDX_1:%.*]] = getelementptr i8, ptr [[DST_ADD_IDX]], i64 1
288 ; CHECK-NEXT: [[CMP_IDX_1:%.*]] = icmp ult ptr [[DST_ADD_IDX_1]], [[UPPER]]
289 ; CHECK-NEXT: [[DST_ADD_IDX_2:%.*]] = getelementptr i8, ptr [[DST_ADD_IDX]], i64 2
290 ; CHECK-NEXT: [[CMP_IDX_2:%.*]] = icmp ult ptr [[DST_ADD_IDX_2]], [[UPPER]]
291 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_IDX_1]], [[CMP_IDX_2]]
292 ; CHECK-NEXT: [[DST_ADD_IDX_3:%.*]] = getelementptr i8, ptr [[DST_ADD_IDX]], i64 3
293 ; CHECK-NEXT: [[CMP_IDX_3:%.*]] = icmp ult ptr [[DST_ADD_IDX_3]], [[UPPER]]
294 ; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_IDX_3]]
295 ; CHECK-NEXT: ret i1 [[RES_2]]
297 %not.zero = icmp ne i8 %len, 0
298 call void @llvm.assume(i1 %not.zero)
299 %len.ext = zext i8 %len to i16
300 %dst.add.len = getelementptr inbounds i8, ptr %dst, i16 %len.ext
301 %dst.add.1 = getelementptr inbounds i8, ptr %dst.add.len, i64 1
302 %cmp.add.1 = icmp ult ptr %dst.add.1, %upper
303 call void @llvm.assume(i1 %cmp.add.1)
304 %cmp.idx.ult.len = icmp ult i8 %idx, %len
305 call void @llvm.assume(i1 %cmp.idx.ult.len)
306 %idx.ext = zext i8 %idx to i16
307 %dst.add.idx = getelementptr inbounds i8, ptr %dst, i16 %idx.ext
308 %dst.add.idx.1 = getelementptr i8, ptr %dst.add.idx, i64 1
309 %cmp.idx.1 = icmp ult ptr %dst.add.idx.1, %upper
310 %dst.add.idx.2 = getelementptr i8, ptr %dst.add.idx, i64 2
311 %cmp.idx.2 = icmp ult ptr %dst.add.idx.2, %upper
312 %res.1 = xor i1 %cmp.idx.1, %cmp.idx.2
313 %dst.add.idx.3 = getelementptr i8, ptr %dst.add.idx, i64 3
314 %cmp.idx.3 = icmp ult ptr %dst.add.idx.3, %upper
315 %res.2 = xor i1 %res.1, %cmp.idx.3
319 define i1 @gep_add_1_ult_var_idx_no_inbounds(ptr %dst, ptr %upper, i8 %len, i8 %idx) {
320 ; CHECK-LABEL: @gep_add_1_ult_var_idx_no_inbounds(
321 ; CHECK-NEXT: [[NOT_ZERO:%.*]] = icmp ne i8 [[LEN:%.*]], 0
322 ; CHECK-NEXT: call void @llvm.assume(i1 [[NOT_ZERO]])
323 ; CHECK-NEXT: [[LEN_EXT:%.*]] = zext i8 [[LEN]] to i16
324 ; CHECK-NEXT: [[DST_ADD_LEN:%.*]] = getelementptr i8, ptr [[DST:%.*]], i16 [[LEN_EXT]]
325 ; CHECK-NEXT: [[DST_ADD_1:%.*]] = getelementptr i8, ptr [[DST_ADD_LEN]], i64 1
326 ; CHECK-NEXT: [[CMP_ADD_1:%.*]] = icmp ult ptr [[DST_ADD_1]], [[UPPER:%.*]]
327 ; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_ADD_1]])
328 ; CHECK-NEXT: [[CMP_IDX_ULT_LEN:%.*]] = icmp ult i8 [[IDX:%.*]], [[LEN]]
329 ; CHECK-NEXT: call void @llvm.assume(i1 [[CMP_IDX_ULT_LEN]])
330 ; CHECK-NEXT: [[IDX_EXT:%.*]] = zext i8 [[IDX]] to i16
331 ; CHECK-NEXT: [[DST_ADD_IDX:%.*]] = getelementptr i8, ptr [[DST]], i16 [[IDX_EXT]]
332 ; CHECK-NEXT: [[DST_ADD_IDX_1:%.*]] = getelementptr i8, ptr [[DST_ADD_IDX]], i64 1
333 ; CHECK-NEXT: [[CMP_IDX_1:%.*]] = icmp ult ptr [[DST_ADD_IDX_1]], [[UPPER]]
334 ; CHECK-NEXT: [[DST_ADD_IDX_2:%.*]] = getelementptr i8, ptr [[DST_ADD_IDX]], i64 2
335 ; CHECK-NEXT: [[CMP_IDX_2:%.*]] = icmp ult ptr [[DST_ADD_IDX_2]], [[UPPER]]
336 ; CHECK-NEXT: [[RES_1:%.*]] = xor i1 [[CMP_IDX_1]], [[CMP_IDX_2]]
337 ; CHECK-NEXT: [[DST_ADD_IDX_3:%.*]] = getelementptr i8, ptr [[DST_ADD_IDX]], i64 3
338 ; CHECK-NEXT: [[CMP_IDX_3:%.*]] = icmp ult ptr [[DST_ADD_IDX_3]], [[UPPER]]
339 ; CHECK-NEXT: [[RES_2:%.*]] = xor i1 [[RES_1]], [[CMP_IDX_3]]
340 ; CHECK-NEXT: ret i1 [[RES_2]]
342 %not.zero = icmp ne i8 %len, 0
343 call void @llvm.assume(i1 %not.zero)
344 %len.ext = zext i8 %len to i16
345 %dst.add.len = getelementptr i8, ptr %dst, i16 %len.ext
346 %dst.add.1 = getelementptr i8, ptr %dst.add.len, i64 1
347 %cmp.add.1 = icmp ult ptr %dst.add.1, %upper
348 call void @llvm.assume(i1 %cmp.add.1)
349 %cmp.idx.ult.len = icmp ult i8 %idx, %len
350 call void @llvm.assume(i1 %cmp.idx.ult.len)
351 %idx.ext = zext i8 %idx to i16
352 %dst.add.idx = getelementptr i8, ptr %dst, i16 %idx.ext
353 %dst.add.idx.1 = getelementptr i8, ptr %dst.add.idx, i64 1
354 %cmp.idx.1 = icmp ult ptr %dst.add.idx.1, %upper
355 %dst.add.idx.2 = getelementptr i8, ptr %dst.add.idx, i64 2
356 %cmp.idx.2 = icmp ult ptr %dst.add.idx.2, %upper
357 %res.1 = xor i1 %cmp.idx.1, %cmp.idx.2
358 %dst.add.idx.3 = getelementptr i8, ptr %dst.add.idx, i64 3
359 %cmp.idx.3 = icmp ult ptr %dst.add.idx.3, %upper
360 %res.2 = xor i1 %res.1, %cmp.idx.3
364 define i1 @test_chained_no_inbounds(ptr %A, ptr %B) {
365 ; CHECK-LABEL: @test_chained_no_inbounds(
367 ; CHECK-NEXT: [[B_1:%.*]] = getelementptr i8, ptr [[B:%.*]], i64 1
368 ; CHECK-NEXT: [[B_2:%.*]] = getelementptr i8, ptr [[B_1]], i64 1
369 ; CHECK-NEXT: [[C_1:%.*]] = icmp ugt ptr [[A:%.*]], null
370 ; CHECK-NEXT: [[C_2:%.*]] = icmp ugt ptr [[B_1]], [[B_2]]
371 ; CHECK-NEXT: [[OR:%.*]] = or i1 [[C_1]], [[C_2]]
372 ; CHECK-NEXT: br i1 [[OR]], label [[THEN:%.*]], label [[ELSE:%.*]]
374 ; CHECK-NEXT: ret i1 true
376 ; CHECK-NEXT: ret i1 false
379 %B.1 = getelementptr i8, ptr %B, i64 1
380 %B.2 = getelementptr i8, ptr %B.1, i64 1
381 %c.1 = icmp ugt ptr %A, null
382 %c.2 = icmp ugt ptr %B.1, %B.2
383 %or = or i1 %c.1, %c.2
384 br i1 %or, label %then, label %else