1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2 ; RUN: opt %s -instcombine -S | FileCheck %s
4 declare { i4, i1 } @llvm.umul.with.overflow.i4(i4, i4) #1
6 define i1 @t0_umul(i4 %size, i4 %nmemb) {
7 ; CHECK-LABEL: @t0_umul(
8 ; CHECK-NEXT: [[UMUL:%.*]] = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 [[SIZE:%.*]], i4 [[NMEMB:%.*]])
9 ; CHECK-NEXT: [[UMUL_OV:%.*]] = extractvalue { i4, i1 } [[UMUL]], 1
10 ; CHECK-NEXT: ret i1 [[UMUL_OV]]
12 %cmp = icmp ne i4 %size, 0
13 %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size, i4 %nmemb)
14 %umul.ov = extractvalue { i4, i1 } %umul, 1
15 %and = select i1 %umul.ov, i1 %cmp, i1 false
19 define i1 @t1_commutative(i4 %size, i4 %nmemb) {
20 ; CHECK-LABEL: @t1_commutative(
21 ; CHECK-NEXT: [[NMEMB_FR:%.*]] = freeze i4 [[NMEMB:%.*]]
22 ; CHECK-NEXT: [[UMUL:%.*]] = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 [[SIZE:%.*]], i4 [[NMEMB_FR]])
23 ; CHECK-NEXT: [[UMUL_OV:%.*]] = extractvalue { i4, i1 } [[UMUL]], 1
24 ; CHECK-NEXT: ret i1 [[UMUL_OV]]
26 %cmp = icmp ne i4 %size, 0
27 %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size, i4 %nmemb)
28 %umul.ov = extractvalue { i4, i1 } %umul, 1
29 %and = select i1 %cmp, i1 %umul.ov, i1 false ; swapped
33 define i1 @n2_wrong_size(i4 %size0, i4 %size1, i4 %nmemb) {
34 ; CHECK-LABEL: @n2_wrong_size(
35 ; CHECK-NEXT: [[CMP:%.*]] = icmp ne i4 [[SIZE1:%.*]], 0
36 ; CHECK-NEXT: [[UMUL:%.*]] = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 [[SIZE0:%.*]], i4 [[NMEMB:%.*]])
37 ; CHECK-NEXT: [[UMUL_OV:%.*]] = extractvalue { i4, i1 } [[UMUL]], 1
38 ; CHECK-NEXT: [[AND:%.*]] = select i1 [[UMUL_OV]], i1 [[CMP]], i1 false
39 ; CHECK-NEXT: ret i1 [[AND]]
41 %cmp = icmp ne i4 %size1, 0 ; not %size0
42 %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size0, i4 %nmemb)
43 %umul.ov = extractvalue { i4, i1 } %umul, 1
44 %and = select i1 %umul.ov, i1 %cmp, i1 false
48 define i1 @n3_wrong_pred(i4 %size, i4 %nmemb) {
49 ; CHECK-LABEL: @n3_wrong_pred(
50 ; CHECK-NEXT: [[CMP:%.*]] = icmp eq i4 [[SIZE:%.*]], 0
51 ; CHECK-NEXT: [[UMUL:%.*]] = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 [[SIZE]], i4 [[NMEMB:%.*]])
52 ; CHECK-NEXT: [[UMUL_OV:%.*]] = extractvalue { i4, i1 } [[UMUL]], 1
53 ; CHECK-NEXT: [[AND:%.*]] = and i1 [[UMUL_OV]], [[CMP]]
54 ; CHECK-NEXT: ret i1 [[AND]]
56 %cmp = icmp eq i4 %size, 0 ; not 'ne'
57 %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size, i4 %nmemb)
58 %umul.ov = extractvalue { i4, i1 } %umul, 1
59 %and = select i1 %umul.ov, i1 %cmp, i1 false
63 define i1 @n4_not_and(i4 %size, i4 %nmemb) {
64 ; CHECK-LABEL: @n4_not_and(
65 ; CHECK-NEXT: [[CMP:%.*]] = icmp ne i4 [[SIZE:%.*]], 0
66 ; CHECK-NEXT: [[UMUL:%.*]] = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 [[SIZE]], i4 [[NMEMB:%.*]])
67 ; CHECK-NEXT: [[UMUL_OV:%.*]] = extractvalue { i4, i1 } [[UMUL]], 1
68 ; CHECK-NEXT: [[AND:%.*]] = or i1 [[UMUL_OV]], [[CMP]]
69 ; CHECK-NEXT: ret i1 [[AND]]
71 %cmp = icmp ne i4 %size, 0
72 %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size, i4 %nmemb)
73 %umul.ov = extractvalue { i4, i1 } %umul, 1
74 %and = select i1 %umul.ov, i1 true, i1 %cmp ; not 'and'
78 define i1 @n5_not_zero(i4 %size, i4 %nmemb) {
79 ; CHECK-LABEL: @n5_not_zero(
80 ; CHECK-NEXT: [[CMP:%.*]] = icmp ne i4 [[SIZE:%.*]], 1
81 ; CHECK-NEXT: [[UMUL:%.*]] = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 [[SIZE]], i4 [[NMEMB:%.*]])
82 ; CHECK-NEXT: [[UMUL_OV:%.*]] = extractvalue { i4, i1 } [[UMUL]], 1
83 ; CHECK-NEXT: [[AND:%.*]] = and i1 [[UMUL_OV]], [[CMP]]
84 ; CHECK-NEXT: ret i1 [[AND]]
86 %cmp = icmp ne i4 %size, 1 ; should be '0'
87 %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size, i4 %nmemb)
88 %umul.ov = extractvalue { i4, i1 } %umul, 1
89 %and = select i1 %umul.ov, i1 %cmp, i1 false