1 //===- Loads.cpp - Local load analysis ------------------------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines simple local analyses for load instructions.
12 //===----------------------------------------------------------------------===//
14 #include "llvm/Analysis/Loads.h"
15 #include "llvm/Analysis/AliasAnalysis.h"
16 #include "llvm/Target/TargetData.h"
17 #include "llvm/GlobalAlias.h"
18 #include "llvm/GlobalVariable.h"
19 #include "llvm/IntrinsicInst.h"
22 /// AreEquivalentAddressValues - Test if A and B will obviously have the same
23 /// value. This includes recognizing that %t0 and %t1 will have the same
24 /// value in code like this:
25 /// %t0 = getelementptr \@a, 0, 3
26 /// store i32 0, i32* %t0
27 /// %t1 = getelementptr \@a, 0, 3
28 /// %t2 = load i32* %t1
30 static bool AreEquivalentAddressValues(const Value
*A
, const Value
*B
) {
31 // Test if the values are trivially equivalent.
32 if (A
== B
) return true;
34 // Test if the values come from identical arithmetic instructions.
35 // Use isIdenticalToWhenDefined instead of isIdenticalTo because
36 // this function is only used when one address use dominates the
37 // other, which means that they'll always either have the same
38 // value or one of them will have an undefined value.
39 if (isa
<BinaryOperator
>(A
) || isa
<CastInst
>(A
) ||
40 isa
<PHINode
>(A
) || isa
<GetElementPtrInst
>(A
))
41 if (const Instruction
*BI
= dyn_cast
<Instruction
>(B
))
42 if (cast
<Instruction
>(A
)->isIdenticalToWhenDefined(BI
))
45 // Otherwise they may not be equivalent.
49 /// getUnderlyingObjectWithOffset - Strip off up to MaxLookup GEPs and
50 /// bitcasts to get back to the underlying object being addressed, keeping
51 /// track of the offset in bytes from the GEPs relative to the result.
52 /// This is closely related to Value::getUnderlyingObject but is located
53 /// here to avoid making VMCore depend on TargetData.
54 static Value
*getUnderlyingObjectWithOffset(Value
*V
, const TargetData
*TD
,
56 unsigned MaxLookup
= 6) {
57 if (!V
->getType()->isPointerTy())
59 for (unsigned Count
= 0; MaxLookup
== 0 || Count
< MaxLookup
; ++Count
) {
60 if (GEPOperator
*GEP
= dyn_cast
<GEPOperator
>(V
)) {
61 if (!GEP
->hasAllConstantIndices())
63 SmallVector
<Value
*, 8> Indices(GEP
->op_begin() + 1, GEP
->op_end());
64 ByteOffset
+= TD
->getIndexedOffset(GEP
->getPointerOperandType(),
65 &Indices
[0], Indices
.size());
66 V
= GEP
->getPointerOperand();
67 } else if (Operator::getOpcode(V
) == Instruction::BitCast
) {
68 V
= cast
<Operator
>(V
)->getOperand(0);
69 } else if (GlobalAlias
*GA
= dyn_cast
<GlobalAlias
>(V
)) {
70 if (GA
->mayBeOverridden())
76 assert(V
->getType()->isPointerTy() && "Unexpected operand type!");
81 /// isSafeToLoadUnconditionally - Return true if we know that executing a load
82 /// from this value cannot trap. If it is not obviously safe to load from the
83 /// specified pointer, we do a quick local scan of the basic block containing
84 /// ScanFrom, to determine if the address is already accessed.
85 bool llvm::isSafeToLoadUnconditionally(Value
*V
, Instruction
*ScanFrom
,
86 unsigned Align
, const TargetData
*TD
) {
87 uint64_t ByteOffset
= 0;
90 Base
= getUnderlyingObjectWithOffset(V
, TD
, ByteOffset
);
92 const Type
*BaseType
= 0;
93 unsigned BaseAlign
= 0;
94 if (const AllocaInst
*AI
= dyn_cast
<AllocaInst
>(Base
)) {
95 // An alloca is safe to load from as load as it is suitably aligned.
96 BaseType
= AI
->getAllocatedType();
97 BaseAlign
= AI
->getAlignment();
98 } else if (const GlobalValue
*GV
= dyn_cast
<GlobalValue
>(Base
)) {
99 // Global variables are safe to load from but their size cannot be
100 // guaranteed if they are overridden.
101 if (!isa
<GlobalAlias
>(GV
) && !GV
->mayBeOverridden()) {
102 BaseType
= GV
->getType()->getElementType();
103 BaseAlign
= GV
->getAlignment();
107 if (BaseType
&& BaseType
->isSized()) {
108 if (TD
&& BaseAlign
== 0)
109 BaseAlign
= TD
->getPrefTypeAlignment(BaseType
);
111 if (Align
<= BaseAlign
) {
113 return true; // Loading directly from an alloca or global is OK.
115 // Check if the load is within the bounds of the underlying object.
116 const PointerType
*AddrTy
= cast
<PointerType
>(V
->getType());
117 uint64_t LoadSize
= TD
->getTypeStoreSize(AddrTy
->getElementType());
118 if (ByteOffset
+ LoadSize
<= TD
->getTypeAllocSize(BaseType
) &&
119 (Align
== 0 || (ByteOffset
% Align
) == 0))
124 // Otherwise, be a little bit aggressive by scanning the local block where we
125 // want to check to see if the pointer is already being loaded or stored
126 // from/to. If so, the previous load or store would have already trapped,
127 // so there is no harm doing an extra load (also, CSE will later eliminate
128 // the load entirely).
129 BasicBlock::iterator BBI
= ScanFrom
, E
= ScanFrom
->getParent()->begin();
134 // If we see a free or a call which may write to memory (i.e. which might do
135 // a free) the pointer could be marked invalid.
136 if (isa
<CallInst
>(BBI
) && BBI
->mayWriteToMemory() &&
137 !isa
<DbgInfoIntrinsic
>(BBI
))
140 if (LoadInst
*LI
= dyn_cast
<LoadInst
>(BBI
)) {
141 if (AreEquivalentAddressValues(LI
->getOperand(0), V
)) return true;
142 } else if (StoreInst
*SI
= dyn_cast
<StoreInst
>(BBI
)) {
143 if (AreEquivalentAddressValues(SI
->getOperand(1), V
)) return true;
149 /// FindAvailableLoadedValue - Scan the ScanBB block backwards (starting at the
150 /// instruction before ScanFrom) checking to see if we have the value at the
151 /// memory address *Ptr locally available within a small number of instructions.
152 /// If the value is available, return it.
154 /// If not, return the iterator for the last validated instruction that the
155 /// value would be live through. If we scanned the entire block and didn't find
156 /// something that invalidates *Ptr or provides it, ScanFrom would be left at
157 /// begin() and this returns null. ScanFrom could also be left
159 /// MaxInstsToScan specifies the maximum instructions to scan in the block. If
160 /// it is set to 0, it will scan the whole block. You can also optionally
161 /// specify an alias analysis implementation, which makes this more precise.
162 Value
*llvm::FindAvailableLoadedValue(Value
*Ptr
, BasicBlock
*ScanBB
,
163 BasicBlock::iterator
&ScanFrom
,
164 unsigned MaxInstsToScan
,
166 if (MaxInstsToScan
== 0) MaxInstsToScan
= ~0U;
168 // If we're using alias analysis to disambiguate get the size of *Ptr.
169 uint64_t AccessSize
= 0;
171 const Type
*AccessTy
= cast
<PointerType
>(Ptr
->getType())->getElementType();
172 AccessSize
= AA
->getTypeStoreSize(AccessTy
);
175 while (ScanFrom
!= ScanBB
->begin()) {
176 // We must ignore debug info directives when counting (otherwise they
177 // would affect codegen).
178 Instruction
*Inst
= --ScanFrom
;
179 if (isa
<DbgInfoIntrinsic
>(Inst
))
182 // Restore ScanFrom to expected value in case next test succeeds
185 // Don't scan huge blocks.
186 if (MaxInstsToScan
-- == 0) return 0;
189 // If this is a load of Ptr, the loaded value is available.
190 if (LoadInst
*LI
= dyn_cast
<LoadInst
>(Inst
))
191 if (AreEquivalentAddressValues(LI
->getOperand(0), Ptr
))
194 if (StoreInst
*SI
= dyn_cast
<StoreInst
>(Inst
)) {
195 // If this is a store through Ptr, the value is available!
196 if (AreEquivalentAddressValues(SI
->getOperand(1), Ptr
))
197 return SI
->getOperand(0);
199 // If Ptr is an alloca and this is a store to a different alloca, ignore
200 // the store. This is a trivial form of alias analysis that is important
201 // for reg2mem'd code.
202 if ((isa
<AllocaInst
>(Ptr
) || isa
<GlobalVariable
>(Ptr
)) &&
203 (isa
<AllocaInst
>(SI
->getOperand(1)) ||
204 isa
<GlobalVariable
>(SI
->getOperand(1))))
207 // If we have alias analysis and it says the store won't modify the loaded
208 // value, ignore the store.
210 (AA
->getModRefInfo(SI
, Ptr
, AccessSize
) & AliasAnalysis::Mod
) == 0)
213 // Otherwise the store that may or may not alias the pointer, bail out.
218 // If this is some other instruction that may clobber Ptr, bail out.
219 if (Inst
->mayWriteToMemory()) {
220 // If alias analysis claims that it really won't modify the load,
223 (AA
->getModRefInfo(Inst
, Ptr
, AccessSize
) & AliasAnalysis::Mod
) == 0)
226 // May modify the pointer, bail out.
232 // Got to the start of the block, we didn't find it, but are done for this