1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2 ; RUN: opt -S -early-cse -earlycse-debug-hash < %s | FileCheck %s --check-prefixes=CHECK,NO_ASSUME
3 ; RUN: opt -S -basic-aa -early-cse-memssa < %s | FileCheck %s --check-prefixes=CHECK,NO_ASSUME
4 ; RUN: opt -S -basic-aa -early-cse-memssa --enable-knowledge-retention < %s | FileCheck %s --check-prefixes=CHECK,USE_ASSUME
6 declare void @clobber_and_use(i32)
8 define void @f_0(i32* %ptr) {
9 ; NO_ASSUME-LABEL: @f_0(
10 ; NO_ASSUME-NEXT: [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
11 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
12 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
13 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
14 ; NO_ASSUME-NEXT: ret void
16 ; USE_ASSUME-LABEL: @f_0(
17 ; USE_ASSUME-NEXT: [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
18 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
19 ; USE_ASSUME-NEXT: call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[PTR]], i64 4), "nonnull"(i32* [[PTR]]), "align"(i32* [[PTR]], i64 4) ]
20 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
21 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
22 ; USE_ASSUME-NEXT: ret void
25 %val0 = load i32, i32* %ptr, !invariant.load !{}
26 call void @clobber_and_use(i32 %val0)
27 %val1 = load i32, i32* %ptr, !invariant.load !{}
28 call void @clobber_and_use(i32 %val1)
29 %val2 = load i32, i32* %ptr, !invariant.load !{}
30 call void @clobber_and_use(i32 %val2)
34 define void @f_1(i32* %ptr) {
35 ; We can forward invariant loads to non-invariant loads.
36 ; NO_ASSUME-LABEL: @f_1(
37 ; NO_ASSUME-NEXT: [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
38 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
39 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
40 ; NO_ASSUME-NEXT: ret void
42 ; USE_ASSUME-LABEL: @f_1(
43 ; USE_ASSUME-NEXT: [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
44 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
45 ; USE_ASSUME-NEXT: call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[PTR]], i64 4), "nonnull"(i32* [[PTR]]), "align"(i32* [[PTR]], i64 4) ]
46 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
47 ; USE_ASSUME-NEXT: ret void
50 %val0 = load i32, i32* %ptr, !invariant.load !{}
51 call void @clobber_and_use(i32 %val0)
52 %val1 = load i32, i32* %ptr
53 call void @clobber_and_use(i32 %val1)
57 define void @f_2(i32* %ptr) {
58 ; We can forward a non-invariant load into an invariant load.
59 ; NO_ASSUME-LABEL: @f_2(
60 ; NO_ASSUME-NEXT: [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4
61 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
62 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
63 ; NO_ASSUME-NEXT: ret void
65 ; USE_ASSUME-LABEL: @f_2(
66 ; USE_ASSUME-NEXT: [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4
67 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
68 ; USE_ASSUME-NEXT: call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[PTR]], i64 4), "nonnull"(i32* [[PTR]]), "align"(i32* [[PTR]], i64 4) ]
69 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
70 ; USE_ASSUME-NEXT: ret void
73 %val0 = load i32, i32* %ptr
74 call void @clobber_and_use(i32 %val0)
75 %val1 = load i32, i32* %ptr, !invariant.load !{}
76 call void @clobber_and_use(i32 %val1)
80 define void @f_3(i1 %cond, i32* %ptr) {
81 ; NO_ASSUME-LABEL: @f_3(
82 ; NO_ASSUME-NEXT: [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
83 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
84 ; NO_ASSUME-NEXT: br i1 [[COND:%.*]], label [[LEFT:%.*]], label [[RIGHT:%.*]]
86 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
87 ; NO_ASSUME-NEXT: ret void
89 ; NO_ASSUME-NEXT: ret void
91 ; USE_ASSUME-LABEL: @f_3(
92 ; USE_ASSUME-NEXT: [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
93 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
94 ; USE_ASSUME-NEXT: br i1 [[COND:%.*]], label [[LEFT:%.*]], label [[RIGHT:%.*]]
96 ; USE_ASSUME-NEXT: call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[PTR]], i64 4), "nonnull"(i32* [[PTR]]), "align"(i32* [[PTR]], i64 4) ]
97 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[VAL0]])
98 ; USE_ASSUME-NEXT: ret void
100 ; USE_ASSUME-NEXT: ret void
102 %val0 = load i32, i32* %ptr, !invariant.load !{}
103 call void @clobber_and_use(i32 %val0)
104 br i1 %cond, label %left, label %right
108 %val1 = load i32, i32* %ptr
109 call void @clobber_and_use(i32 %val1)
116 define void @f_4(i1 %cond, i32* %ptr) {
117 ; Negative test -- can't forward %val0 to %va1 because that'll break
120 ; CHECK-NEXT: br i1 [[COND:%.*]], label [[LEFT:%.*]], label [[MERGE:%.*]]
122 ; CHECK-NEXT: [[VAL0:%.*]] = load i32, i32* [[PTR:%.*]], align 4, !invariant.load !0
123 ; CHECK-NEXT: call void @clobber_and_use(i32 [[VAL0]])
124 ; CHECK-NEXT: br label [[MERGE]]
126 ; CHECK-NEXT: [[VAL1:%.*]] = load i32, i32* [[PTR]], align 4
127 ; CHECK-NEXT: call void @clobber_and_use(i32 [[VAL1]])
128 ; CHECK-NEXT: ret void
130 br i1 %cond, label %left, label %merge
134 %val0 = load i32, i32* %ptr, !invariant.load !{}
135 call void @clobber_and_use(i32 %val0)
140 %val1 = load i32, i32* %ptr
141 call void @clobber_and_use(i32 %val1)
145 ; By assumption, the call can't change contents of p
146 ; LangRef is a bit unclear about whether the store is reachable, so
147 ; for the moment we chose to be conservative and just assume it's valid
148 ; to restore the same unchanging value.
149 define void @test_dse1(i32* %p) {
150 ; NO_ASSUME-LABEL: @test_dse1(
151 ; NO_ASSUME-NEXT: [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4, !invariant.load !0
152 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[V1]])
153 ; NO_ASSUME-NEXT: ret void
155 ; USE_ASSUME-LABEL: @test_dse1(
156 ; USE_ASSUME-NEXT: [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4, !invariant.load !0
157 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[V1]])
158 ; USE_ASSUME-NEXT: call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 4) ]
159 ; USE_ASSUME-NEXT: ret void
161 %v1 = load i32, i32* %p, !invariant.load !{}
162 call void @clobber_and_use(i32 %v1)
163 store i32 %v1, i32* %p
167 ; By assumption, v1 must equal v2 (TODO)
168 define void @test_false_negative_dse2(i32* %p, i32 %v2) {
169 ; CHECK-LABEL: @test_false_negative_dse2(
170 ; CHECK-NEXT: [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4, !invariant.load !0
171 ; CHECK-NEXT: call void @clobber_and_use(i32 [[V1]])
172 ; CHECK-NEXT: store i32 [[V2:%.*]], i32* [[P]], align 4
173 ; CHECK-NEXT: ret void
175 %v1 = load i32, i32* %p, !invariant.load !{}
176 call void @clobber_and_use(i32 %v1)
177 store i32 %v2, i32* %p
181 ; If we remove the load, we still start an invariant scope since
182 ; it lets us remove later loads not explicitly marked invariant
183 define void @test_scope_start_without_load(i32* %p) {
184 ; NO_ASSUME-LABEL: @test_scope_start_without_load(
185 ; NO_ASSUME-NEXT: [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4
186 ; NO_ASSUME-NEXT: [[ADD:%.*]] = add i32 [[V1]], [[V1]]
187 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[ADD]])
188 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[V1]])
189 ; NO_ASSUME-NEXT: ret void
191 ; USE_ASSUME-LABEL: @test_scope_start_without_load(
192 ; USE_ASSUME-NEXT: [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4
193 ; USE_ASSUME-NEXT: call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 4) ]
194 ; USE_ASSUME-NEXT: [[ADD:%.*]] = add i32 [[V1]], [[V1]]
195 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[ADD]])
196 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[V1]])
197 ; USE_ASSUME-NEXT: ret void
199 %v1 = load i32, i32* %p
200 %v2 = load i32, i32* %p, !invariant.load !{}
201 %add = add i32 %v1, %v2
202 call void @clobber_and_use(i32 %add)
203 %v3 = load i32, i32* %p
204 call void @clobber_and_use(i32 %v3)
208 ; If we already have an invariant scope, don't want to start a new one
209 ; with a potentially greater generation. This hides the earlier invariant
211 define void @test_scope_restart(i32* %p) {
212 ; NO_ASSUME-LABEL: @test_scope_restart(
213 ; NO_ASSUME-NEXT: [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4, !invariant.load !0
214 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[V1]])
215 ; NO_ASSUME-NEXT: [[ADD:%.*]] = add i32 [[V1]], [[V1]]
216 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[ADD]])
217 ; NO_ASSUME-NEXT: call void @clobber_and_use(i32 [[V1]])
218 ; NO_ASSUME-NEXT: ret void
220 ; USE_ASSUME-LABEL: @test_scope_restart(
221 ; USE_ASSUME-NEXT: [[V1:%.*]] = load i32, i32* [[P:%.*]], align 4, !invariant.load !0
222 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[V1]])
223 ; USE_ASSUME-NEXT: call void @llvm.assume(i1 true) [ "dereferenceable"(i32* [[P]], i64 4), "nonnull"(i32* [[P]]), "align"(i32* [[P]], i64 4) ]
224 ; USE_ASSUME-NEXT: [[ADD:%.*]] = add i32 [[V1]], [[V1]]
225 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[ADD]])
226 ; USE_ASSUME-NEXT: call void @clobber_and_use(i32 [[V1]])
227 ; USE_ASSUME-NEXT: ret void
229 %v1 = load i32, i32* %p, !invariant.load !{}
230 call void @clobber_and_use(i32 %v1)
231 %v2 = load i32, i32* %p, !invariant.load !{}
232 %add = add i32 %v1, %v2
233 call void @clobber_and_use(i32 %add)
234 %v3 = load i32, i32* %p
235 call void @clobber_and_use(i32 %v3)