1 // Test CodeGen for Security Check Overflow Builtins.
3 // RUN: %clang_cc1 -triple "i686-unknown-unknown" -emit-llvm -x c %s -o - | FileCheck -DLONG_TYPE=i32 -DLONG_MAX=2147483647 %s
4 // RUN: %clang_cc1 -triple "x86_64-unknown-unknown" -emit-llvm -x c %s -o - | FileCheck -DLONG_TYPE=i64 -DLONG_MAX=9223372036854775807 %s
5 // RUN: %clang_cc1 -triple "x86_64-mingw32" -emit-llvm -x c %s -o - | FileCheck -DLONG_TYPE=i32 -DLONG_MAX=2147483647 %s
7 extern unsigned UnsignedErrorCode
;
8 extern unsigned long UnsignedLongErrorCode
;
9 extern unsigned long long UnsignedLongLongErrorCode
;
10 extern int IntErrorCode
;
11 extern long LongErrorCode
;
12 extern long long LongLongErrorCode
;
13 void overflowed(void);
15 unsigned test_add_overflow_uint_uint_uint(unsigned x
, unsigned y
) {
16 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_add_overflow_uint_uint_uint
18 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.uadd.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
19 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
20 // CHECK-DAG: [[C:%.+]] = extractvalue { i32, i1 } [[S]], 1
21 // CHECK: store i32 [[Q]], ptr
24 if (__builtin_add_overflow(x
, y
, &r
))
29 int test_add_overflow_int_int_int(int x
, int y
) {
30 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_add_overflow_int_int_int
32 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.sadd.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
33 // CHECK-DAG: [[C:%.+]] = extractvalue { i32, i1 } [[S]], 1
34 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
35 // CHECK: store i32 [[Q]], ptr
38 if (__builtin_add_overflow(x
, y
, &r
))
43 int test_add_overflow_xint31_xint31_xint31(_BitInt(31) x
, _BitInt(31) y
) {
44 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_add_overflow_xint31_xint31_xint31({{.+}})
46 // CHECK: [[S:%.+]] = call { i31, i1 } @llvm.sadd.with.overflow.i31(i31 %{{.+}}, i31 %{{.+}})
47 // CHECK-DAG: [[C:%.+]] = extractvalue { i31, i1 } [[S]], 1
48 // CHECK-DAG: [[Q:%.+]] = extractvalue { i31, i1 } [[S]], 0
49 // CHECK: store i31 [[Q]], ptr
52 if (__builtin_add_overflow(x
, y
, &r
))
57 unsigned test_sub_overflow_uint_uint_uint(unsigned x
, unsigned y
) {
58 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_sub_overflow_uint_uint_uint
60 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.usub.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
61 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
62 // CHECK-DAG: [[C:%.+]] = extractvalue { i32, i1 } [[S]], 1
63 // CHECK: store i32 [[Q]], ptr
66 if (__builtin_sub_overflow(x
, y
, &r
))
71 int test_sub_overflow_int_int_int(int x
, int y
) {
72 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_sub_overflow_int_int_int
74 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.ssub.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
75 // CHECK-DAG: [[C:%.+]] = extractvalue { i32, i1 } [[S]], 1
76 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
77 // CHECK: store i32 [[Q]], ptr
80 if (__builtin_sub_overflow(x
, y
, &r
))
85 int test_sub_overflow_xint31_xint31_xint31(_BitInt(31) x
, _BitInt(31) y
) {
86 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_sub_overflow_xint31_xint31_xint31({{.+}})
88 // CHECK: [[S:%.+]] = call { i31, i1 } @llvm.ssub.with.overflow.i31(i31 %{{.+}}, i31 %{{.+}})
89 // CHECK-DAG: [[C:%.+]] = extractvalue { i31, i1 } [[S]], 1
90 // CHECK-DAG: [[Q:%.+]] = extractvalue { i31, i1 } [[S]], 0
91 // CHECK: store i31 [[Q]], ptr
94 if (__builtin_sub_overflow(x
, y
, &r
))
99 unsigned test_mul_overflow_uint_uint_uint(unsigned x
, unsigned y
) {
100 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_mul_overflow_uint_uint_uint
102 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.umul.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
103 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
104 // CHECK-DAG: [[C:%.+]] = extractvalue { i32, i1 } [[S]], 1
105 // CHECK: store i32 [[Q]], ptr
106 // CHECK: br i1 [[C]]
108 if (__builtin_mul_overflow(x
, y
, &r
))
113 int test_mul_overflow_uint_uint_int(unsigned x
, unsigned y
) {
114 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_mul_overflow_uint_uint_int
115 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.umul.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
116 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
117 // CHECK-DAG: [[C:%.+]] = extractvalue { i32, i1 } [[S]], 1
118 // CHECK: [[C1:%.+]] = icmp ugt i32 [[Q]], 2147483647
119 // CHECK: [[C2:%.+]] = or i1 [[C]], [[C1]]
120 // CHECK: store i32 [[Q]], ptr
121 // CHECK: br i1 [[C2]]
123 if (__builtin_mul_overflow(x
, y
, &r
))
128 int test_mul_overflow_uint_uint_int_volatile(unsigned x
, unsigned y
) {
129 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_mul_overflow_uint_uint_int_volatile
130 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.umul.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
131 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
132 // CHECK-DAG: [[C:%.+]] = extractvalue { i32, i1 } [[S]], 1
133 // CHECK: [[C1:%.+]] = icmp ugt i32 [[Q]], 2147483647
134 // CHECK: [[C2:%.+]] = or i1 [[C]], [[C1]]
135 // CHECK: store volatile i32 [[Q]], ptr
136 // CHECK: br i1 [[C2]]
138 if (__builtin_mul_overflow(x
, y
, &r
))
143 long test_mul_overflow_ulong_ulong_long(unsigned long x
, unsigned long y
) {
144 // CHECK-LABEL: @test_mul_overflow_ulong_ulong_long
145 // CHECK: [[S:%.+]] = call { [[LONG_TYPE]], i1 } @llvm.umul.with.overflow.[[LONG_TYPE]]([[LONG_TYPE]] %{{.+}}, [[LONG_TYPE]] %{{.+}})
146 // CHECK-DAG: [[Q:%.+]] = extractvalue { [[LONG_TYPE]], i1 } [[S]], 0
147 // CHECK-DAG: [[C:%.+]] = extractvalue { [[LONG_TYPE]], i1 } [[S]], 1
148 // CHECK: [[C1:%.+]] = icmp ugt [[LONG_TYPE]] [[Q]], [[LONG_MAX]]
149 // CHECK: [[C2:%.+]] = or i1 [[C]], [[C1]]
150 // LONG64: store [[LONG_TYPE]] [[Q]], ptr
151 // LONG64: br i1 [[C2]]
153 if (__builtin_mul_overflow(x
, y
, &r
))
158 int test_mul_overflow_int_int_int(int x
, int y
) {
159 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_mul_overflow_int_int_int
161 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.smul.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
162 // CHECK-DAG: [[C:%.+]] = extractvalue { i32, i1 } [[S]], 1
163 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
164 // CHECK: store i32 [[Q]], ptr
165 // CHECK: br i1 [[C]]
167 if (__builtin_mul_overflow(x
, y
, &r
))
172 int test_mul_overflow_xint31_xint31_xint31(_BitInt(31) x
, _BitInt(31) y
) {
173 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_mul_overflow_xint31_xint31_xint31({{.+}})
175 // CHECK: [[S:%.+]] = call { i31, i1 } @llvm.smul.with.overflow.i31(i31 %{{.+}}, i31 %{{.+}})
176 // CHECK-DAG: [[C:%.+]] = extractvalue { i31, i1 } [[S]], 1
177 // CHECK-DAG: [[Q:%.+]] = extractvalue { i31, i1 } [[S]], 0
178 // CHECK: store i31 [[Q]], ptr
179 // CHECK: br i1 [[C]]
181 if (__builtin_mul_overflow(x
, y
, &r
))
186 int test_mul_overflow_xint127_xint127_xint127(_BitInt(127) x
, _BitInt(127) y
) {
187 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_mul_overflow_xint127_xint127_xint127({{.+}})
189 // CHECK: [[S:%.+]] = call { i127, i1 } @llvm.smul.with.overflow.i127(i127 %{{.+}}, i127 %{{.+}})
190 // CHECK-DAG: [[C:%.+]] = extractvalue { i127, i1 } [[S]], 1
191 // CHECK-DAG: [[Q:%.+]] = extractvalue { i127, i1 } [[S]], 0
192 // CHECK: store i127 [[Q]], ptr
193 // CHECK: br i1 [[C]]
195 if (__builtin_mul_overflow(x
, y
, &r
))
200 int test_mul_overflow_xint128_xint128_xint128(_BitInt(128) x
, _BitInt(128) y
) {
201 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_mul_overflow_xint128_xint128_xint128({{.+}})
203 // CHECK: [[S:%.+]] = call { i128, i1 } @llvm.smul.with.overflow.i128(i128 %{{.+}}, i128 %{{.+}})
204 // CHECK-DAG: [[C:%.+]] = extractvalue { i128, i1 } [[S]], 1
205 // CHECK-DAG: [[Q:%.+]] = extractvalue { i128, i1 } [[S]], 0
206 // CHECK: store i128 [[Q]], ptr
207 // CHECK: br i1 [[C]]
209 if (__builtin_mul_overflow(x
, y
, &r
))
214 int test_add_overflow_uint_int_int(unsigned x
, int y
) {
215 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_add_overflow_uint_int_int
216 // CHECK: [[XE:%.+]] = zext i32 %{{.+}} to i33
217 // CHECK: [[YE:%.+]] = sext i32 %{{.+}} to i33
218 // CHECK: [[S:%.+]] = call { i33, i1 } @llvm.sadd.with.overflow.i33(i33 [[XE]], i33 [[YE]])
219 // CHECK-DAG: [[Q:%.+]] = extractvalue { i33, i1 } [[S]], 0
220 // CHECK-DAG: [[C1:%.+]] = extractvalue { i33, i1 } [[S]], 1
221 // CHECK: [[QT:%.+]] = trunc i33 [[Q]] to i32
222 // CHECK: [[QTE:%.+]] = sext i32 [[QT]] to i33
223 // CHECK: [[C2:%.+]] = icmp ne i33 [[Q]], [[QTE]]
224 // CHECK: [[C3:%.+]] = or i1 [[C1]], [[C2]]
225 // CHECK: store i32 [[QT]], ptr
226 // CHECK: br i1 [[C3]]
228 if (__builtin_add_overflow(x
, y
, &r
))
233 _Bool
test_add_overflow_uint_uint_bool(unsigned x
, unsigned y
) {
234 // CHECK-LABEL: define {{.*}} i1 @test_add_overflow_uint_uint_bool
236 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.uadd.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
237 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
238 // CHECK-DAG: [[C1:%.+]] = extractvalue { i32, i1 } [[S]], 1
239 // CHECK: [[QT:%.+]] = trunc i32 [[Q]] to i1
240 // CHECK: [[QTE:%.+]] = zext i1 [[QT]] to i32
241 // CHECK: [[C2:%.+]] = icmp ne i32 [[Q]], [[QTE]]
242 // CHECK: [[C3:%.+]] = or i1 [[C1]], [[C2]]
243 // CHECK: [[QT2:%.+]] = zext i1 [[QT]] to i8
244 // CHECK: store i8 [[QT2]], ptr
245 // CHECK: br i1 [[C3]]
247 if (__builtin_add_overflow(x
, y
, &r
))
252 unsigned test_add_overflow_bool_bool_uint(_Bool x
, _Bool y
) {
253 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_add_overflow_bool_bool_uint
254 // CHECK: [[XE:%.+]] = zext i1 %{{.+}} to i32
255 // CHECK: [[YE:%.+]] = zext i1 %{{.+}} to i32
256 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.uadd.with.overflow.i32(i32 [[XE]], i32 [[YE]])
257 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
258 // CHECK-DAG: [[C:%.+]] = extractvalue { i32, i1 } [[S]], 1
259 // CHECK: store i32 [[Q]], ptr
260 // CHECK: br i1 [[C]]
262 if (__builtin_add_overflow(x
, y
, &r
))
267 _Bool
test_add_overflow_bool_bool_bool(_Bool x
, _Bool y
) {
268 // CHECK-LABEL: define {{.*}} i1 @test_add_overflow_bool_bool_bool
269 // CHECK: [[S:%.+]] = call { i1, i1 } @llvm.uadd.with.overflow.i1(i1 %{{.+}}, i1 %{{.+}})
270 // CHECK-DAG: [[Q:%.+]] = extractvalue { i1, i1 } [[S]], 0
271 // CHECK-DAG: [[C:%.+]] = extractvalue { i1, i1 } [[S]], 1
272 // CHECK: [[QT2:%.+]] = zext i1 [[Q]] to i8
273 // CHECK: store i8 [[QT2]], ptr
274 // CHECK: br i1 [[C]]
276 if (__builtin_add_overflow(x
, y
, &r
))
281 int test_add_overflow_volatile(int x
, int y
) {
282 // CHECK-LABEL: define {{(dso_local )?}}i32 @test_add_overflow_volatile
283 // CHECK: [[S:%.+]] = call { i32, i1 } @llvm.sadd.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
284 // CHECK-DAG: [[Q:%.+]] = extractvalue { i32, i1 } [[S]], 0
285 // CHECK-DAG: [[C:%.+]] = extractvalue { i32, i1 } [[S]], 1
286 // CHECK: store volatile i32 [[Q]], ptr
287 // CHECK: br i1 [[C]]
289 if (__builtin_add_overflow(x
, y
, &result
))
294 unsigned test_uadd_overflow(unsigned x
, unsigned y
) {
295 // CHECK: @test_uadd_overflow
296 // CHECK: %{{.+}} = call { i32, i1 } @llvm.uadd.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
298 if (__builtin_uadd_overflow(x
, y
, &result
))
299 return UnsignedErrorCode
;
303 unsigned long test_uaddl_overflow(unsigned long x
, unsigned long y
) {
304 // CHECK: @test_uaddl_overflow([[UL:i32|i64]] noundef %x
305 // CHECK: %{{.+}} = call { [[UL]], i1 } @llvm.uadd.with.overflow.[[UL]]([[UL]] %{{.+}}, [[UL]] %{{.+}})
306 unsigned long result
;
307 if (__builtin_uaddl_overflow(x
, y
, &result
))
308 return UnsignedLongErrorCode
;
312 unsigned long long test_uaddll_overflow(unsigned long long x
, unsigned long long y
) {
313 // CHECK: @test_uaddll_overflow
314 // CHECK: %{{.+}} = call { i64, i1 } @llvm.uadd.with.overflow.i64(i64 %{{.+}}, i64 %{{.+}})
315 unsigned long long result
;
316 if (__builtin_uaddll_overflow(x
, y
, &result
))
317 return UnsignedLongLongErrorCode
;
321 unsigned test_usub_overflow(unsigned x
, unsigned y
) {
322 // CHECK: @test_usub_overflow
323 // CHECK: %{{.+}} = call { i32, i1 } @llvm.usub.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
325 if (__builtin_usub_overflow(x
, y
, &result
))
326 return UnsignedErrorCode
;
330 unsigned long test_usubl_overflow(unsigned long x
, unsigned long y
) {
331 // CHECK: @test_usubl_overflow([[UL:i32|i64]] noundef %x
332 // CHECK: %{{.+}} = call { [[UL]], i1 } @llvm.usub.with.overflow.[[UL]]([[UL]] %{{.+}}, [[UL]] %{{.+}})
333 unsigned long result
;
334 if (__builtin_usubl_overflow(x
, y
, &result
))
335 return UnsignedLongErrorCode
;
339 unsigned long long test_usubll_overflow(unsigned long long x
, unsigned long long y
) {
340 // CHECK: @test_usubll_overflow
341 // CHECK: %{{.+}} = call { i64, i1 } @llvm.usub.with.overflow.i64(i64 %{{.+}}, i64 %{{.+}})
342 unsigned long long result
;
343 if (__builtin_usubll_overflow(x
, y
, &result
))
344 return UnsignedLongLongErrorCode
;
348 unsigned test_umul_overflow(unsigned x
, unsigned y
) {
349 // CHECK: @test_umul_overflow
350 // CHECK: %{{.+}} = call { i32, i1 } @llvm.umul.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
352 if (__builtin_umul_overflow(x
, y
, &result
))
353 return UnsignedErrorCode
;
357 unsigned long test_umull_overflow(unsigned long x
, unsigned long y
) {
358 // CHECK: @test_umull_overflow([[UL:i32|i64]] noundef %x
359 // CHECK: %{{.+}} = call { [[UL]], i1 } @llvm.umul.with.overflow.[[UL]]([[UL]] %{{.+}}, [[UL]] %{{.+}})
360 unsigned long result
;
361 if (__builtin_umull_overflow(x
, y
, &result
))
362 return UnsignedLongErrorCode
;
366 unsigned long long test_umulll_overflow(unsigned long long x
, unsigned long long y
) {
367 // CHECK: @test_umulll_overflow
368 // CHECK: %{{.+}} = call { i64, i1 } @llvm.umul.with.overflow.i64(i64 %{{.+}}, i64 %{{.+}})
369 unsigned long long result
;
370 if (__builtin_umulll_overflow(x
, y
, &result
))
371 return UnsignedLongLongErrorCode
;
375 int test_sadd_overflow(int x
, int y
) {
376 // CHECK: @test_sadd_overflow
377 // CHECK: %{{.+}} = call { i32, i1 } @llvm.sadd.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
379 if (__builtin_sadd_overflow(x
, y
, &result
))
384 long test_saddl_overflow(long x
, long y
) {
385 // CHECK: @test_saddl_overflow([[UL:i32|i64]] noundef %x
386 // CHECK: %{{.+}} = call { [[UL]], i1 } @llvm.sadd.with.overflow.[[UL]]([[UL]] %{{.+}}, [[UL]] %{{.+}})
388 if (__builtin_saddl_overflow(x
, y
, &result
))
389 return LongErrorCode
;
393 long long test_saddll_overflow(long long x
, long long y
) {
394 // CHECK: @test_saddll_overflow
395 // CHECK: %{{.+}} = call { i64, i1 } @llvm.sadd.with.overflow.i64(i64 %{{.+}}, i64 %{{.+}})
397 if (__builtin_saddll_overflow(x
, y
, &result
))
398 return LongLongErrorCode
;
402 int test_ssub_overflow(int x
, int y
) {
403 // CHECK: @test_ssub_overflow
404 // CHECK: %{{.+}} = call { i32, i1 } @llvm.ssub.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
406 if (__builtin_ssub_overflow(x
, y
, &result
))
411 long test_ssubl_overflow(long x
, long y
) {
412 // CHECK: @test_ssubl_overflow([[UL:i32|i64]] noundef %x
413 // CHECK: %{{.+}} = call { [[UL]], i1 } @llvm.ssub.with.overflow.[[UL]]([[UL]] %{{.+}}, [[UL]] %{{.+}})
415 if (__builtin_ssubl_overflow(x
, y
, &result
))
416 return LongErrorCode
;
420 long long test_ssubll_overflow(long long x
, long long y
) {
421 // CHECK: @test_ssubll_overflow
422 // CHECK: %{{.+}} = call { i64, i1 } @llvm.ssub.with.overflow.i64(i64 %{{.+}}, i64 %{{.+}})
424 if (__builtin_ssubll_overflow(x
, y
, &result
))
425 return LongLongErrorCode
;
429 int test_smul_overflow(int x
, int y
) {
430 // CHECK: @test_smul_overflow
431 // CHECK: %{{.+}} = call { i32, i1 } @llvm.smul.with.overflow.i32(i32 %{{.+}}, i32 %{{.+}})
433 if (__builtin_smul_overflow(x
, y
, &result
))
438 long test_smull_overflow(long x
, long y
) {
439 // CHECK: @test_smull_overflow([[UL:i32|i64]] noundef %x
440 // CHECK: %{{.+}} = call { [[UL]], i1 } @llvm.smul.with.overflow.[[UL]]([[UL]] %{{.+}}, [[UL]] %{{.+}})
442 if (__builtin_smull_overflow(x
, y
, &result
))
443 return LongErrorCode
;
447 long long test_smulll_overflow(long long x
, long long y
) {
448 // CHECK: @test_smulll_overflow
449 // CHECK: %{{.+}} = call { i64, i1 } @llvm.smul.with.overflow.i64(i64 %{{.+}}, i64 %{{.+}})
451 if (__builtin_smulll_overflow(x
, y
, &result
))
452 return LongLongErrorCode
;
456 int test_mixed_sign_mul_overflow_sext_signed_op(int x
, unsigned long long y
) {
457 // CHECK: @test_mixed_sign_mul_overflow_sext_signed_op
458 // CHECK: [[SignedOp:%.*]] = sext i32 %0 to i64
459 // CHECK: [[IsNeg:%.*]] = icmp slt i64 [[SignedOp]], 0
461 if (__builtin_mul_overflow(x
, y
, &result
))
462 return LongErrorCode
;
466 int test_mixed_sign_mul_overflow_zext_unsigned_op(long long x
, unsigned y
) {
467 // CHECK: @test_mixed_sign_mul_overflow_zext_unsigned_op
468 // CHECK: [[UnsignedOp:%.*]] = zext i32 %1 to i64
469 // CHECK: [[IsNeg:%.*]] = icmp slt i64 %0, 0
470 // CHECK: @llvm.umul.with.overflow.i64({{.*}}, i64 [[UnsignedOp]])
472 if (__builtin_mul_overflow(x
, y
, &result
))
473 return LongErrorCode
;
477 int test_mixed_sign_mull_overflow(int x
, unsigned y
) {
478 // CHECK: @test_mixed_sign_mull_overflow
479 // CHECK: [[IsNeg:%.*]] = icmp slt i32 [[Op1:%.*]], 0
480 // CHECK-NEXT: [[Signed:%.*]] = sub i32 0, [[Op1]]
481 // CHECK-NEXT: [[AbsSigned:%.*]] = select i1 [[IsNeg]], i32 [[Signed]], i32 [[Op1]]
482 // CHECK-NEXT: call { i32, i1 } @llvm.umul.with.overflow.i32(i32 [[AbsSigned]], i32 %{{.*}})
483 // CHECK-NEXT: [[UnsignedOFlow:%.*]] = extractvalue { i32, i1 } %{{.*}}, 1
484 // CHECK-NEXT: [[UnsignedResult:%.*]] = extractvalue { i32, i1 } %{{.*}}, 0
485 // CHECK-NEXT: [[IsNegZext:%.*]] = zext i1 [[IsNeg]] to i32
486 // CHECK-NEXT: [[MaxResult:%.*]] = add i32 2147483647, [[IsNegZext]]
487 // CHECK-NEXT: [[SignedOFlow:%.*]] = icmp ugt i32 [[UnsignedResult]], [[MaxResult]]
488 // CHECK-NEXT: [[OFlow:%.*]] = or i1 [[UnsignedOFlow]], [[SignedOFlow]]
489 // CHECK-NEXT: [[NegativeResult:%.*]] = sub i32 0, [[UnsignedResult]]
490 // CHECK-NEXT: [[Result:%.*]] = select i1 [[IsNeg]], i32 [[NegativeResult]], i32 [[UnsignedResult]]
491 // CHECK-NEXT: store i32 [[Result]], ptr %{{.*}}, align 4
492 // CHECK: br i1 [[OFlow]]
495 if (__builtin_mul_overflow(x
, y
, &result
))
496 return LongErrorCode
;
500 int test_mixed_sign_mull_overflow_unsigned(int x
, unsigned y
) {
501 // CHECK: @test_mixed_sign_mull_overflow_unsigned
502 // CHECK: [[IsNeg:%.*]] = icmp slt i32 [[Op1:%.*]], 0
503 // CHECK-NEXT: [[Signed:%.*]] = sub i32 0, [[Op1]]
504 // CHECK-NEXT: [[AbsSigned:%.*]] = select i1 [[IsNeg]], i32 [[Signed]], i32 [[Op1]]
505 // CHECK-NEXT: call { i32, i1 } @llvm.umul.with.overflow.i32(i32 [[AbsSigned]], i32 %{{.*}})
506 // CHECK-NEXT: [[UnsignedOFlow:%.*]] = extractvalue { i32, i1 } %{{.*}}, 1
507 // CHECK-NEXT: [[UnsignedResult:%.*]] = extractvalue { i32, i1 } %{{.*}}, 0
508 // CHECK-NEXT: [[NotNull:%.*]] = icmp ne i32 [[UnsignedResult]], 0
509 // CHECK-NEXT: [[Underflow:%.*]] = and i1 [[IsNeg]], [[NotNull]]
510 // CHECK-NEXT: [[OFlow:%.*]] = or i1 [[UnsignedOFlow]], [[Underflow]]
511 // CHECK-NEXT: [[NegatedResult:%.*]] = sub i32 0, [[UnsignedResult]]
512 // CHECK-NEXT: [[Result:%.*]] = select i1 [[IsNeg]], i32 [[NegatedResult]], i32 [[UnsignedResult]]
513 // CHECK-NEXT: store i32 [[Result]], ptr %{{.*}}, align 4
514 // CHECK: br i1 [[OFlow]]
517 if (__builtin_mul_overflow(x
, y
, &result
))
518 return LongErrorCode
;
522 int test_mixed_sign_mull_overflow_swapped(int x
, unsigned y
) {
523 // CHECK: @test_mixed_sign_mull_overflow_swapped
524 // CHECK: call { i32, i1 } @llvm.umul.with.overflow.i32
525 // CHECK: add i32 2147483647
527 if (__builtin_mul_overflow(y
, x
, &result
))
528 return LongErrorCode
;
532 long long test_mixed_sign_mulll_overflow(long long x
, unsigned long long y
) {
533 // CHECK: @test_mixed_sign_mulll_overflow
534 // CHECK: call { i64, i1 } @llvm.umul.with.overflow.i64
535 // CHECK: add i64 92233720368547
537 if (__builtin_mul_overflow(x
, y
, &result
))
538 return LongLongErrorCode
;
542 long long test_mixed_sign_mulll_overflow_swapped(long long x
, unsigned long long y
) {
543 // CHECK: @test_mixed_sign_mulll_overflow_swapped
544 // CHECK: call { i64, i1 } @llvm.umul.with.overflow.i64
545 // CHECK: add i64 92233720368547
547 if (__builtin_mul_overflow(y
, x
, &result
))
548 return LongLongErrorCode
;
552 long long test_mixed_sign_mulll_overflow_trunc_signed(long long x
, unsigned long long y
) {
553 // CHECK: @test_mixed_sign_mulll_overflow_trunc_signed
554 // CHECK: call { i64, i1 } @llvm.umul.with.overflow.i64
555 // CHECK: add i64 2147483647
559 if (__builtin_mul_overflow(y
, x
, &result
))
560 return LongLongErrorCode
;
564 long long test_mixed_sign_mulll_overflow_trunc_unsigned(long long x
, unsigned long long y
) {
565 // CHECK: @test_mixed_sign_mulll_overflow_trunc_unsigned
566 // CHECK: call { i64, i1 } @llvm.umul.with.overflow.i64
567 // CHECK: [[NON_ZERO:%.*]] = icmp ne i64 [[UNSIGNED_RESULT:%.*]], 0
568 // CHECK-NEXT: [[UNDERFLOW:%.*]] = and i1 {{.*}}, [[NON_ZERO]]
569 // CHECK-NEXT: [[OVERFLOW_PRE_TRUNC:%.*]] = or i1 {{.*}}, [[UNDERFLOW]]
570 // CHECK-NEXT: [[TRUNC_OVERFLOW:%.*]] = icmp ugt i64 [[UNSIGNED_RESULT]], 4294967295
571 // CHECK-NEXT: [[OVERFLOW:%.*]] = or i1 [[OVERFLOW_PRE_TRUNC]], [[TRUNC_OVERFLOW]]
572 // CHECK-NEXT: [[NEGATED:%.*]] = sub i64 0, [[UNSIGNED_RESULT]]
573 // CHECK-NEXT: [[RESULT:%.*]] = select i1 {{.*}}, i64 [[NEGATED]], i64 [[UNSIGNED_RESULT]]
574 // CHECK-NEXT: trunc i64 [[RESULT]] to i32
577 if (__builtin_mul_overflow(y
, x
, &result
))
578 return LongLongErrorCode
;
582 long long test_mixed_sign_mul_overflow_extend_signed(int x
, unsigned y
) {
583 // CHECK: @test_mixed_sign_mul_overflow_extend_signed
584 // CHECK: call { i64, i1 } @llvm.smul.with.overflow.i64
586 if (__builtin_mul_overflow(y
, x
, &result
))
587 return LongLongErrorCode
;
591 long long test_mixed_sign_mul_overflow_extend_unsigned(int x
, unsigned y
) {
592 // CHECK: @test_mixed_sign_mul_overflow_extend_unsigned
593 // CHECK: call { i65, i1 } @llvm.smul.with.overflow.i65
594 unsigned long long result
;
595 if (__builtin_mul_overflow(y
, x
, &result
))
596 return LongLongErrorCode
;