1 //===- BasicAliasAnalysis.cpp - Local Alias Analysis Impl -----------------===//
3 // The LLVM Compiler Infrastructure
5 // This file was developed by the LLVM research group and is distributed under
6 // the University of Illinois Open Source License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines the default implementation of the Alias Analysis interface
11 // that simply implements a few identities (two different globals cannot alias,
12 // etc), but otherwise does no analysis.
14 //===----------------------------------------------------------------------===//
16 #include "llvm/Analysis/AliasAnalysis.h"
17 #include "llvm/Analysis/Passes.h"
18 #include "llvm/Constants.h"
19 #include "llvm/DerivedTypes.h"
20 #include "llvm/Function.h"
21 #include "llvm/GlobalVariable.h"
22 #include "llvm/Instructions.h"
23 #include "llvm/Pass.h"
24 #include "llvm/Target/TargetData.h"
25 #include "llvm/Support/GetElementPtrTypeIterator.h"
26 #include "llvm/Support/Visibility.h"
31 /// NoAA - This class implements the -no-aa pass, which always returns "I
32 /// don't know" for alias queries. NoAA is unlike other alias analysis
33 /// implementations, in that it does not chain to a previous analysis. As
34 /// such it doesn't follow many of the rules that other alias analyses must.
36 struct VISIBILITY_HIDDEN NoAA
: public ImmutablePass
, public AliasAnalysis
{
37 virtual void getAnalysisUsage(AnalysisUsage
&AU
) const {
38 AU
.addRequired
<TargetData
>();
41 virtual void initializePass() {
42 TD
= &getAnalysis
<TargetData
>();
45 virtual AliasResult
alias(const Value
*V1
, unsigned V1Size
,
46 const Value
*V2
, unsigned V2Size
) {
50 virtual ModRefBehavior
getModRefBehavior(Function
*F
, CallSite CS
,
51 std::vector
<PointerAccessInfo
> *Info
) {
52 return UnknownModRefBehavior
;
55 virtual void getArgumentAccesses(Function
*F
, CallSite CS
,
56 std::vector
<PointerAccessInfo
> &Info
) {
57 assert(0 && "This method may not be called on this function!");
60 virtual void getMustAliases(Value
*P
, std::vector
<Value
*> &RetVals
) { }
61 virtual bool pointsToConstantMemory(const Value
*P
) { return false; }
62 virtual ModRefResult
getModRefInfo(CallSite CS
, Value
*P
, unsigned Size
) {
65 virtual ModRefResult
getModRefInfo(CallSite CS1
, CallSite CS2
) {
68 virtual bool hasNoModRefInfoForCalls() const { return true; }
70 virtual void deleteValue(Value
*V
) {}
71 virtual void copyValue(Value
*From
, Value
*To
) {}
74 // Register this pass...
76 U("no-aa", "No Alias Analysis (always returns 'may' alias)");
78 // Declare that we implement the AliasAnalysis interface
79 RegisterAnalysisGroup
<AliasAnalysis
, NoAA
> V
;
80 } // End of anonymous namespace
82 ImmutablePass
*llvm::createNoAAPass() { return new NoAA(); }
85 /// BasicAliasAnalysis - This is the default alias analysis implementation.
86 /// Because it doesn't chain to a previous alias analysis (like -no-aa), it
87 /// derives from the NoAA class.
88 struct VISIBILITY_HIDDEN BasicAliasAnalysis
: public NoAA
{
89 AliasResult
alias(const Value
*V1
, unsigned V1Size
,
90 const Value
*V2
, unsigned V2Size
);
92 ModRefResult
getModRefInfo(CallSite CS
, Value
*P
, unsigned Size
);
93 ModRefResult
getModRefInfo(CallSite CS1
, CallSite CS2
) {
94 return NoAA::getModRefInfo(CS1
,CS2
);
97 /// hasNoModRefInfoForCalls - We can provide mod/ref information against
98 /// non-escaping allocations.
99 virtual bool hasNoModRefInfoForCalls() const { return false; }
101 /// pointsToConstantMemory - Chase pointers until we find a (constant
103 bool pointsToConstantMemory(const Value
*P
);
105 virtual ModRefBehavior
getModRefBehavior(Function
*F
, CallSite CS
,
106 std::vector
<PointerAccessInfo
> *Info
);
109 // CheckGEPInstructions - Check two GEP instructions with known
110 // must-aliasing base pointers. This checks to see if the index expressions
111 // preclude the pointers from aliasing...
113 CheckGEPInstructions(const Type
* BasePtr1Ty
, std::vector
<Value
*> &GEP1Ops
,
115 const Type
*BasePtr2Ty
, std::vector
<Value
*> &GEP2Ops
,
119 // Register this pass...
120 RegisterOpt
<BasicAliasAnalysis
>
121 X("basicaa", "Basic Alias Analysis (default AA impl)");
123 // Declare that we implement the AliasAnalysis interface
124 RegisterAnalysisGroup
<AliasAnalysis
, BasicAliasAnalysis
, true> Y
;
125 } // End of anonymous namespace
127 ImmutablePass
*llvm::createBasicAliasAnalysisPass() {
128 return new BasicAliasAnalysis();
131 // hasUniqueAddress - Return true if the specified value points to something
132 // with a unique, discernable, address.
133 static inline bool hasUniqueAddress(const Value
*V
) {
134 return isa
<GlobalValue
>(V
) || isa
<AllocationInst
>(V
);
137 // getUnderlyingObject - This traverses the use chain to figure out what object
138 // the specified value points to. If the value points to, or is derived from, a
139 // unique object or an argument, return it.
140 static const Value
*getUnderlyingObject(const Value
*V
) {
141 if (!isa
<PointerType
>(V
->getType())) return 0;
143 // If we are at some type of object... return it.
144 if (hasUniqueAddress(V
) || isa
<Argument
>(V
)) return V
;
146 // Traverse through different addressing mechanisms...
147 if (const Instruction
*I
= dyn_cast
<Instruction
>(V
)) {
148 if (isa
<CastInst
>(I
) || isa
<GetElementPtrInst
>(I
))
149 return getUnderlyingObject(I
->getOperand(0));
150 } else if (const ConstantExpr
*CE
= dyn_cast
<ConstantExpr
>(V
)) {
151 if (CE
->getOpcode() == Instruction::Cast
||
152 CE
->getOpcode() == Instruction::GetElementPtr
)
153 return getUnderlyingObject(CE
->getOperand(0));
154 } else if (const GlobalValue
*GV
= dyn_cast
<GlobalValue
>(V
)) {
160 static const User
*isGEP(const Value
*V
) {
161 if (isa
<GetElementPtrInst
>(V
) ||
162 (isa
<ConstantExpr
>(V
) &&
163 cast
<ConstantExpr
>(V
)->getOpcode() == Instruction::GetElementPtr
))
164 return cast
<User
>(V
);
168 static const Value
*GetGEPOperands(const Value
*V
, std::vector
<Value
*> &GEPOps
){
169 assert(GEPOps
.empty() && "Expect empty list to populate!");
170 GEPOps
.insert(GEPOps
.end(), cast
<User
>(V
)->op_begin()+1,
171 cast
<User
>(V
)->op_end());
173 // Accumulate all of the chained indexes into the operand array
174 V
= cast
<User
>(V
)->getOperand(0);
176 while (const User
*G
= isGEP(V
)) {
177 if (!isa
<Constant
>(GEPOps
[0]) || isa
<GlobalValue
>(GEPOps
[0]) ||
178 !cast
<Constant
>(GEPOps
[0])->isNullValue())
179 break; // Don't handle folding arbitrary pointer offsets yet...
180 GEPOps
.erase(GEPOps
.begin()); // Drop the zero index
181 GEPOps
.insert(GEPOps
.begin(), G
->op_begin()+1, G
->op_end());
182 V
= G
->getOperand(0);
187 /// pointsToConstantMemory - Chase pointers until we find a (constant
189 bool BasicAliasAnalysis::pointsToConstantMemory(const Value
*P
) {
190 if (const Value
*V
= getUnderlyingObject(P
))
191 if (const GlobalVariable
*GV
= dyn_cast
<GlobalVariable
>(V
))
192 return GV
->isConstant();
196 static bool AddressMightEscape(const Value
*V
) {
197 for (Value::use_const_iterator UI
= V
->use_begin(), E
= V
->use_end();
199 const Instruction
*I
= cast
<Instruction
>(*UI
);
200 switch (I
->getOpcode()) {
201 case Instruction::Load
: break;
202 case Instruction::Store
:
203 if (I
->getOperand(0) == V
)
204 return true; // Escapes if the pointer is stored.
206 case Instruction::GetElementPtr
:
207 if (AddressMightEscape(I
)) return true;
209 case Instruction::Cast
:
210 if (!isa
<PointerType
>(I
->getType()))
212 if (AddressMightEscape(I
)) return true;
214 case Instruction::Ret
:
215 // If returned, the address will escape to calling functions, but no
216 // callees could modify it.
225 // getModRefInfo - Check to see if the specified callsite can clobber the
226 // specified memory object. Since we only look at local properties of this
227 // function, we really can't say much about this query. We do, however, use
228 // simple "address taken" analysis on local objects.
230 AliasAnalysis::ModRefResult
231 BasicAliasAnalysis::getModRefInfo(CallSite CS
, Value
*P
, unsigned Size
) {
232 if (!isa
<Constant
>(P
))
233 if (const AllocationInst
*AI
=
234 dyn_cast_or_null
<AllocationInst
>(getUnderlyingObject(P
))) {
235 // Okay, the pointer is to a stack allocated object. If we can prove that
236 // the pointer never "escapes", then we know the call cannot clobber it,
237 // because it simply can't get its address.
238 if (!AddressMightEscape(AI
))
241 // If this is a tail call and P points to a stack location, we know that
242 // the tail call cannot access or modify the local stack.
243 if (CallInst
*CI
= dyn_cast
<CallInst
>(CS
.getInstruction()))
244 if (CI
->isTailCall() && isa
<AllocaInst
>(AI
))
248 // The AliasAnalysis base class has some smarts, lets use them.
249 return AliasAnalysis::getModRefInfo(CS
, P
, Size
);
252 // alias - Provide a bunch of ad-hoc rules to disambiguate in common cases, such
253 // as array references. Note that this function is heavily tail recursive.
254 // Hopefully we have a smart C++ compiler. :)
256 AliasAnalysis::AliasResult
257 BasicAliasAnalysis::alias(const Value
*V1
, unsigned V1Size
,
258 const Value
*V2
, unsigned V2Size
) {
259 // Strip off any constant expression casts if they exist
260 if (const ConstantExpr
*CE
= dyn_cast
<ConstantExpr
>(V1
))
261 if (CE
->getOpcode() == Instruction::Cast
&&
262 isa
<PointerType
>(CE
->getOperand(0)->getType()))
263 V1
= CE
->getOperand(0);
264 if (const ConstantExpr
*CE
= dyn_cast
<ConstantExpr
>(V2
))
265 if (CE
->getOpcode() == Instruction::Cast
&&
266 isa
<PointerType
>(CE
->getOperand(0)->getType()))
267 V2
= CE
->getOperand(0);
269 // Are we checking for alias of the same value?
270 if (V1
== V2
) return MustAlias
;
272 if ((!isa
<PointerType
>(V1
->getType()) || !isa
<PointerType
>(V2
->getType())) &&
273 V1
->getType() != Type::LongTy
&& V2
->getType() != Type::LongTy
)
274 return NoAlias
; // Scalars cannot alias each other
276 // Strip off cast instructions...
277 if (const Instruction
*I
= dyn_cast
<CastInst
>(V1
))
278 if (isa
<PointerType
>(I
->getOperand(0)->getType()))
279 return alias(I
->getOperand(0), V1Size
, V2
, V2Size
);
280 if (const Instruction
*I
= dyn_cast
<CastInst
>(V2
))
281 if (isa
<PointerType
>(I
->getOperand(0)->getType()))
282 return alias(V1
, V1Size
, I
->getOperand(0), V2Size
);
284 // Figure out what objects these things are pointing to if we can...
285 const Value
*O1
= getUnderlyingObject(V1
);
286 const Value
*O2
= getUnderlyingObject(V2
);
288 // Pointing at a discernible object?
291 if (isa
<Argument
>(O1
)) {
292 // Incoming argument cannot alias locally allocated object!
293 if (isa
<AllocationInst
>(O2
)) return NoAlias
;
294 // Otherwise, nothing is known...
295 } else if (isa
<Argument
>(O2
)) {
296 // Incoming argument cannot alias locally allocated object!
297 if (isa
<AllocationInst
>(O1
)) return NoAlias
;
298 // Otherwise, nothing is known...
299 } else if (O1
!= O2
) {
300 // If they are two different objects, we know that we have no alias...
304 // If they are the same object, they we can look at the indexes. If they
305 // index off of the object is the same for both pointers, they must alias.
306 // If they are provably different, they must not alias. Otherwise, we
307 // can't tell anything.
311 if (!isa
<Argument
>(O1
) && isa
<ConstantPointerNull
>(V2
))
312 return NoAlias
; // Unique values don't alias null
314 if (isa
<GlobalVariable
>(O1
) ||
315 (isa
<AllocationInst
>(O1
) &&
316 !cast
<AllocationInst
>(O1
)->isArrayAllocation()))
317 if (cast
<PointerType
>(O1
->getType())->getElementType()->isSized()) {
318 // If the size of the other access is larger than the total size of the
319 // global/alloca/malloc, it cannot be accessing the global (it's
320 // undefined to load or store bytes before or after an object).
321 const Type
*ElTy
= cast
<PointerType
>(O1
->getType())->getElementType();
322 unsigned GlobalSize
= getTargetData().getTypeSize(ElTy
);
323 if (GlobalSize
< V2Size
&& V2Size
!= ~0U)
329 if (!isa
<Argument
>(O2
) && isa
<ConstantPointerNull
>(V1
))
330 return NoAlias
; // Unique values don't alias null
332 if (isa
<GlobalVariable
>(O2
) ||
333 (isa
<AllocationInst
>(O2
) &&
334 !cast
<AllocationInst
>(O2
)->isArrayAllocation()))
335 if (cast
<PointerType
>(O2
->getType())->getElementType()->isSized()) {
336 // If the size of the other access is larger than the total size of the
337 // global/alloca/malloc, it cannot be accessing the object (it's
338 // undefined to load or store bytes before or after an object).
339 const Type
*ElTy
= cast
<PointerType
>(O2
->getType())->getElementType();
340 unsigned GlobalSize
= getTargetData().getTypeSize(ElTy
);
341 if (GlobalSize
< V1Size
&& V1Size
!= ~0U)
346 // If we have two gep instructions with must-alias'ing base pointers, figure
347 // out if the indexes to the GEP tell us anything about the derived pointer.
348 // Note that we also handle chains of getelementptr instructions as well as
349 // constant expression getelementptrs here.
351 if (isGEP(V1
) && isGEP(V2
)) {
352 // Drill down into the first non-gep value, to test for must-aliasing of
353 // the base pointers.
354 const Value
*BasePtr1
= V1
, *BasePtr2
= V2
;
356 BasePtr1
= cast
<User
>(BasePtr1
)->getOperand(0);
357 } while (isGEP(BasePtr1
) &&
358 cast
<User
>(BasePtr1
)->getOperand(1) ==
359 Constant::getNullValue(cast
<User
>(BasePtr1
)->getOperand(1)->getType()));
361 BasePtr2
= cast
<User
>(BasePtr2
)->getOperand(0);
362 } while (isGEP(BasePtr2
) &&
363 cast
<User
>(BasePtr2
)->getOperand(1) ==
364 Constant::getNullValue(cast
<User
>(BasePtr2
)->getOperand(1)->getType()));
366 // Do the base pointers alias?
367 AliasResult BaseAlias
= alias(BasePtr1
, V1Size
, BasePtr2
, V2Size
);
368 if (BaseAlias
== NoAlias
) return NoAlias
;
369 if (BaseAlias
== MustAlias
) {
370 // If the base pointers alias each other exactly, check to see if we can
371 // figure out anything about the resultant pointers, to try to prove
374 // Collect all of the chained GEP operands together into one simple place
375 std::vector
<Value
*> GEP1Ops
, GEP2Ops
;
376 BasePtr1
= GetGEPOperands(V1
, GEP1Ops
);
377 BasePtr2
= GetGEPOperands(V2
, GEP2Ops
);
379 // If GetGEPOperands were able to fold to the same must-aliased pointer,
380 // do the comparison.
381 if (BasePtr1
== BasePtr2
) {
383 CheckGEPInstructions(BasePtr1
->getType(), GEP1Ops
, V1Size
,
384 BasePtr2
->getType(), GEP2Ops
, V2Size
);
385 if (GAlias
!= MayAlias
)
391 // Check to see if these two pointers are related by a getelementptr
392 // instruction. If one pointer is a GEP with a non-zero index of the other
393 // pointer, we know they cannot alias.
397 std::swap(V1Size
, V2Size
);
400 if (V1Size
!= ~0U && V2Size
!= ~0U)
401 if (const User
*GEP
= isGEP(V1
)) {
402 std::vector
<Value
*> GEPOperands
;
403 const Value
*BasePtr
= GetGEPOperands(V1
, GEPOperands
);
405 AliasResult R
= alias(BasePtr
, V1Size
, V2
, V2Size
);
406 if (R
== MustAlias
) {
407 // If there is at least one non-zero constant index, we know they cannot
409 bool ConstantFound
= false;
410 bool AllZerosFound
= true;
411 for (unsigned i
= 0, e
= GEPOperands
.size(); i
!= e
; ++i
)
412 if (const Constant
*C
= dyn_cast
<Constant
>(GEPOperands
[i
])) {
413 if (!C
->isNullValue()) {
414 ConstantFound
= true;
415 AllZerosFound
= false;
419 AllZerosFound
= false;
422 // If we have getelementptr <ptr>, 0, 0, 0, 0, ... and V2 must aliases
423 // the ptr, the end result is a must alias also.
428 if (V2Size
<= 1 && V1Size
<= 1) // Just pointer check?
431 // Otherwise we have to check to see that the distance is more than
432 // the size of the argument... build an index vector that is equal to
433 // the arguments provided, except substitute 0's for any variable
434 // indexes we find...
435 if (cast
<PointerType
>(
436 BasePtr
->getType())->getElementType()->isSized()) {
437 for (unsigned i
= 0; i
!= GEPOperands
.size(); ++i
)
438 if (!isa
<ConstantInt
>(GEPOperands
[i
]))
440 Constant::getNullValue(GEPOperands
[i
]->getType());
442 getTargetData().getIndexedOffset(BasePtr
->getType(), GEPOperands
);
444 if (Offset
>= (int64_t)V2Size
|| Offset
<= -(int64_t)V1Size
)
454 static bool ValuesEqual(Value
*V1
, Value
*V2
) {
455 if (V1
->getType() == V2
->getType())
457 if (Constant
*C1
= dyn_cast
<Constant
>(V1
))
458 if (Constant
*C2
= dyn_cast
<Constant
>(V2
)) {
459 // Sign extend the constants to long types.
460 C1
= ConstantExpr::getSignExtend(C1
, Type::LongTy
);
461 C2
= ConstantExpr::getSignExtend(C2
, Type::LongTy
);
467 /// CheckGEPInstructions - Check two GEP instructions with known must-aliasing
468 /// base pointers. This checks to see if the index expressions preclude the
469 /// pointers from aliasing...
470 AliasAnalysis::AliasResult
BasicAliasAnalysis::
471 CheckGEPInstructions(const Type
* BasePtr1Ty
, std::vector
<Value
*> &GEP1Ops
,
473 const Type
*BasePtr2Ty
, std::vector
<Value
*> &GEP2Ops
,
475 // We currently can't handle the case when the base pointers have different
476 // primitive types. Since this is uncommon anyway, we are happy being
477 // extremely conservative.
478 if (BasePtr1Ty
!= BasePtr2Ty
)
481 const PointerType
*GEPPointerTy
= cast
<PointerType
>(BasePtr1Ty
);
483 // Find the (possibly empty) initial sequence of equal values... which are not
484 // necessarily constants.
485 unsigned NumGEP1Operands
= GEP1Ops
.size(), NumGEP2Operands
= GEP2Ops
.size();
486 unsigned MinOperands
= std::min(NumGEP1Operands
, NumGEP2Operands
);
487 unsigned MaxOperands
= std::max(NumGEP1Operands
, NumGEP2Operands
);
488 unsigned UnequalOper
= 0;
489 while (UnequalOper
!= MinOperands
&&
490 ValuesEqual(GEP1Ops
[UnequalOper
], GEP2Ops
[UnequalOper
])) {
491 // Advance through the type as we go...
493 if (const CompositeType
*CT
= dyn_cast
<CompositeType
>(BasePtr1Ty
))
494 BasePtr1Ty
= CT
->getTypeAtIndex(GEP1Ops
[UnequalOper
-1]);
496 // If all operands equal each other, then the derived pointers must
497 // alias each other...
499 assert(UnequalOper
== NumGEP1Operands
&& UnequalOper
== NumGEP2Operands
&&
500 "Ran out of type nesting, but not out of operands?");
505 // If we have seen all constant operands, and run out of indexes on one of the
506 // getelementptrs, check to see if the tail of the leftover one is all zeros.
507 // If so, return mustalias.
508 if (UnequalOper
== MinOperands
) {
509 if (GEP1Ops
.size() < GEP2Ops
.size()) std::swap(GEP1Ops
, GEP2Ops
);
511 bool AllAreZeros
= true;
512 for (unsigned i
= UnequalOper
; i
!= MaxOperands
; ++i
)
513 if (!isa
<Constant
>(GEP1Ops
[i
]) ||
514 !cast
<Constant
>(GEP1Ops
[i
])->isNullValue()) {
518 if (AllAreZeros
) return MustAlias
;
522 // So now we know that the indexes derived from the base pointers,
523 // which are known to alias, are different. We can still determine a
524 // no-alias result if there are differing constant pairs in the index
525 // chain. For example:
526 // A[i][0] != A[j][1] iff (&A[0][1]-&A[0][0] >= std::max(G1S, G2S))
528 // We have to be careful here about array accesses. In particular, consider:
529 // A[1][0] vs A[0][i]
530 // In this case, we don't *know* that the array will be accessed in bounds:
531 // the index could even be negative. Because of this, we have to
532 // conservatively *give up* and return may alias. We disregard differing
533 // array subscripts that are followed by a variable index without going
536 unsigned SizeMax
= std::max(G1S
, G2S
);
537 if (SizeMax
== ~0U) return MayAlias
; // Avoid frivolous work.
539 // Scan for the first operand that is constant and unequal in the
540 // two getelementptrs...
541 unsigned FirstConstantOper
= UnequalOper
;
542 for (; FirstConstantOper
!= MinOperands
; ++FirstConstantOper
) {
543 const Value
*G1Oper
= GEP1Ops
[FirstConstantOper
];
544 const Value
*G2Oper
= GEP2Ops
[FirstConstantOper
];
546 if (G1Oper
!= G2Oper
) // Found non-equal constant indexes...
547 if (Constant
*G1OC
= dyn_cast
<ConstantInt
>(const_cast<Value
*>(G1Oper
)))
548 if (Constant
*G2OC
= dyn_cast
<ConstantInt
>(const_cast<Value
*>(G2Oper
))){
549 if (G1OC
->getType() != G2OC
->getType()) {
550 // Sign extend both operands to long.
551 G1OC
= ConstantExpr::getSignExtend(G1OC
, Type::LongTy
);
552 G2OC
= ConstantExpr::getSignExtend(G2OC
, Type::LongTy
);
553 GEP1Ops
[FirstConstantOper
] = G1OC
;
554 GEP2Ops
[FirstConstantOper
] = G2OC
;
558 // Handle the "be careful" case above: if this is an array
559 // subscript, scan for a subsequent variable array index.
560 if (isa
<ArrayType
>(BasePtr1Ty
)) {
561 const Type
*NextTy
=cast
<ArrayType
>(BasePtr1Ty
)->getElementType();
562 bool isBadCase
= false;
564 for (unsigned Idx
= FirstConstantOper
+1;
565 Idx
!= MinOperands
&& isa
<ArrayType
>(NextTy
); ++Idx
) {
566 const Value
*V1
= GEP1Ops
[Idx
], *V2
= GEP2Ops
[Idx
];
567 if (!isa
<Constant
>(V1
) || !isa
<Constant
>(V2
)) {
571 NextTy
= cast
<ArrayType
>(NextTy
)->getElementType();
574 if (isBadCase
) G1OC
= 0;
577 // Make sure they are comparable (ie, not constant expressions), and
578 // make sure the GEP with the smaller leading constant is GEP1.
580 Constant
*Compare
= ConstantExpr::getSetGT(G1OC
, G2OC
);
581 if (ConstantBool
*CV
= dyn_cast
<ConstantBool
>(Compare
)) {
582 if (CV
->getValue()) // If they are comparable and G2 > G1
583 std::swap(GEP1Ops
, GEP2Ops
); // Make GEP1 < GEP2
589 BasePtr1Ty
= cast
<CompositeType
>(BasePtr1Ty
)->getTypeAtIndex(G1Oper
);
592 // No shared constant operands, and we ran out of common operands. At this
593 // point, the GEP instructions have run through all of their operands, and we
594 // haven't found evidence that there are any deltas between the GEP's.
595 // However, one GEP may have more operands than the other. If this is the
596 // case, there may still be hope. Check this now.
597 if (FirstConstantOper
== MinOperands
) {
598 // Make GEP1Ops be the longer one if there is a longer one.
599 if (GEP1Ops
.size() < GEP2Ops
.size())
600 std::swap(GEP1Ops
, GEP2Ops
);
602 // Is there anything to check?
603 if (GEP1Ops
.size() > MinOperands
) {
604 for (unsigned i
= FirstConstantOper
; i
!= MaxOperands
; ++i
)
605 if (isa
<ConstantInt
>(GEP1Ops
[i
]) &&
606 !cast
<Constant
>(GEP1Ops
[i
])->isNullValue()) {
607 // Yup, there's a constant in the tail. Set all variables to
608 // constants in the GEP instruction to make it suiteable for
609 // TargetData::getIndexedOffset.
610 for (i
= 0; i
!= MaxOperands
; ++i
)
611 if (!isa
<ConstantInt
>(GEP1Ops
[i
]))
612 GEP1Ops
[i
] = Constant::getNullValue(GEP1Ops
[i
]->getType());
613 // Okay, now get the offset. This is the relative offset for the full
615 const TargetData
&TD
= getTargetData();
616 int64_t Offset1
= TD
.getIndexedOffset(GEPPointerTy
, GEP1Ops
);
618 // Now crop off any constants from the end...
619 GEP1Ops
.resize(MinOperands
);
620 int64_t Offset2
= TD
.getIndexedOffset(GEPPointerTy
, GEP1Ops
);
622 // If the tail provided a bit enough offset, return noalias!
623 if ((uint64_t)(Offset2
-Offset1
) >= SizeMax
)
628 // Couldn't find anything useful.
632 // If there are non-equal constants arguments, then we can figure
633 // out a minimum known delta between the two index expressions... at
634 // this point we know that the first constant index of GEP1 is less
635 // than the first constant index of GEP2.
637 // Advance BasePtr[12]Ty over this first differing constant operand.
638 BasePtr2Ty
= cast
<CompositeType
>(BasePtr1Ty
)->
639 getTypeAtIndex(GEP2Ops
[FirstConstantOper
]);
640 BasePtr1Ty
= cast
<CompositeType
>(BasePtr1Ty
)->
641 getTypeAtIndex(GEP1Ops
[FirstConstantOper
]);
643 // We are going to be using TargetData::getIndexedOffset to determine the
644 // offset that each of the GEP's is reaching. To do this, we have to convert
645 // all variable references to constant references. To do this, we convert the
646 // initial sequence of array subscripts into constant zeros to start with.
647 const Type
*ZeroIdxTy
= GEPPointerTy
;
648 for (unsigned i
= 0; i
!= FirstConstantOper
; ++i
) {
649 if (!isa
<StructType
>(ZeroIdxTy
))
650 GEP1Ops
[i
] = GEP2Ops
[i
] = Constant::getNullValue(Type::UIntTy
);
652 if (const CompositeType
*CT
= dyn_cast
<CompositeType
>(ZeroIdxTy
))
653 ZeroIdxTy
= CT
->getTypeAtIndex(GEP1Ops
[i
]);
656 // We know that GEP1Ops[FirstConstantOper] & GEP2Ops[FirstConstantOper] are ok
658 // Loop over the rest of the operands...
659 for (unsigned i
= FirstConstantOper
+1; i
!= MaxOperands
; ++i
) {
660 const Value
*Op1
= i
< GEP1Ops
.size() ? GEP1Ops
[i
] : 0;
661 const Value
*Op2
= i
< GEP2Ops
.size() ? GEP2Ops
[i
] : 0;
662 // If they are equal, use a zero index...
663 if (Op1
== Op2
&& BasePtr1Ty
== BasePtr2Ty
) {
664 if (!isa
<ConstantInt
>(Op1
))
665 GEP1Ops
[i
] = GEP2Ops
[i
] = Constant::getNullValue(Op1
->getType());
666 // Otherwise, just keep the constants we have.
669 if (const ConstantInt
*Op1C
= dyn_cast
<ConstantInt
>(Op1
)) {
670 // If this is an array index, make sure the array element is in range.
671 if (const ArrayType
*AT
= dyn_cast
<ArrayType
>(BasePtr1Ty
))
672 if (Op1C
->getRawValue() >= AT
->getNumElements())
673 return MayAlias
; // Be conservative with out-of-range accesses
676 // GEP1 is known to produce a value less than GEP2. To be
677 // conservatively correct, we must assume the largest possible
678 // constant is used in this position. This cannot be the initial
679 // index to the GEP instructions (because we know we have at least one
680 // element before this one with the different constant arguments), so
681 // we know that the current index must be into either a struct or
682 // array. Because we know it's not constant, this cannot be a
683 // structure index. Because of this, we can calculate the maximum
686 if (const ArrayType
*AT
= dyn_cast
<ArrayType
>(BasePtr1Ty
))
687 GEP1Ops
[i
] = ConstantSInt::get(Type::LongTy
,AT
->getNumElements()-1);
692 if (const ConstantInt
*Op2C
= dyn_cast
<ConstantInt
>(Op2
)) {
693 // If this is an array index, make sure the array element is in range.
694 if (const ArrayType
*AT
= dyn_cast
<ArrayType
>(BasePtr1Ty
))
695 if (Op2C
->getRawValue() >= AT
->getNumElements())
696 return MayAlias
; // Be conservative with out-of-range accesses
697 } else { // Conservatively assume the minimum value for this index
698 GEP2Ops
[i
] = Constant::getNullValue(Op2
->getType());
703 if (BasePtr1Ty
&& Op1
) {
704 if (const CompositeType
*CT
= dyn_cast
<CompositeType
>(BasePtr1Ty
))
705 BasePtr1Ty
= CT
->getTypeAtIndex(GEP1Ops
[i
]);
710 if (BasePtr2Ty
&& Op2
) {
711 if (const CompositeType
*CT
= dyn_cast
<CompositeType
>(BasePtr2Ty
))
712 BasePtr2Ty
= CT
->getTypeAtIndex(GEP2Ops
[i
]);
718 if (GEPPointerTy
->getElementType()->isSized()) {
719 int64_t Offset1
= getTargetData().getIndexedOffset(GEPPointerTy
, GEP1Ops
);
720 int64_t Offset2
= getTargetData().getIndexedOffset(GEPPointerTy
, GEP2Ops
);
721 assert(Offset1
<Offset2
&& "There is at least one different constant here!");
723 if ((uint64_t)(Offset2
-Offset1
) >= SizeMax
) {
724 //std::cerr << "Determined that these two GEP's don't alias ["
725 // << SizeMax << " bytes]: \n" << *GEP1 << *GEP2;
733 struct StringCompare
{
734 bool operator()(const char *LHS
, const char *RHS
) {
735 return strcmp(LHS
, RHS
) < 0;
740 // Note that this list cannot contain libm functions (such as acos and sqrt)
741 // that set errno on a domain or other error.
742 static const char *DoesntAccessMemoryFns
[] = {
743 "abs", "labs", "llabs", "imaxabs", "fabs", "fabsf", "fabsl",
744 "trunc", "truncf", "truncl", "ldexp",
746 "atan", "atanf", "atanl", "atan2", "atan2f", "atan2l",
748 "cos", "cosf", "cosl",
749 "exp", "expf", "expl",
751 "sin", "sinf", "sinl",
752 "tan", "tanf", "tanl", "tanh", "tanhf", "tanhl",
754 "floor", "floorf", "floorl", "ceil", "ceilf", "ceill",
757 "isalnum", "isalpha", "iscntrl", "isdigit", "isgraph", "islower", "isprint"
758 "ispunct", "isspace", "isupper", "isxdigit", "tolower", "toupper",
761 "iswalnum", "iswalpha", "iswcntrl", "iswdigit", "iswgraph", "iswlower",
762 "iswprint", "iswpunct", "iswspace", "iswupper", "iswxdigit",
764 "iswctype", "towctrans", "towlower", "towupper",
768 "isinf", "isnan", "finite",
770 // C99 math functions
771 "copysign", "copysignf", "copysignd",
772 "nexttoward", "nexttowardf", "nexttowardd",
773 "nextafter", "nextafterf", "nextafterd",
776 "__signbit", "__signbitf", "__signbitl",
780 static const char *OnlyReadsMemoryFns
[] = {
781 "atoi", "atol", "atof", "atoll", "atoq", "a64l",
782 "bcmp", "memcmp", "memchr", "memrchr", "wmemcmp", "wmemchr",
785 "strcmp", "strcasecmp", "strcoll", "strncmp", "strncasecmp",
786 "strchr", "strcspn", "strlen", "strpbrk", "strrchr", "strspn", "strstr",
790 "wcschr", "wcscmp", "wcscoll", "wcscspn", "wcslen", "wcsncmp", "wcspbrk",
791 "wcsrchr", "wcsspn", "wcsstr",
794 "alphasort", "alphasort64", "versionsort", "versionsort64",
797 "nan", "nanf", "nand",
800 "feof", "ferror", "fileno",
801 "feof_unlocked", "ferror_unlocked", "fileno_unlocked"
804 AliasAnalysis::ModRefBehavior
805 BasicAliasAnalysis::getModRefBehavior(Function
*F
, CallSite CS
,
806 std::vector
<PointerAccessInfo
> *Info
) {
807 if (!F
->isExternal()) return UnknownModRefBehavior
;
809 static std::vector
<const char*> NoMemoryTable
, OnlyReadsMemoryTable
;
811 static bool Initialized
= false;
813 NoMemoryTable
.insert(NoMemoryTable
.end(),
814 DoesntAccessMemoryFns
,
815 DoesntAccessMemoryFns
+
816 sizeof(DoesntAccessMemoryFns
)/sizeof(DoesntAccessMemoryFns
[0]));
818 OnlyReadsMemoryTable
.insert(OnlyReadsMemoryTable
.end(),
821 sizeof(OnlyReadsMemoryFns
)/sizeof(OnlyReadsMemoryFns
[0]));
822 #define GET_MODREF_BEHAVIOR
823 #include "llvm/Intrinsics.gen"
824 #undef GET_MODREF_BEHAVIOR
826 // Sort the table the first time through.
827 std::sort(NoMemoryTable
.begin(), NoMemoryTable
.end(), StringCompare());
828 std::sort(OnlyReadsMemoryTable
.begin(), OnlyReadsMemoryTable
.end(),
833 std::vector
<const char*>::iterator Ptr
=
834 std::lower_bound(NoMemoryTable
.begin(), NoMemoryTable
.end(),
835 F
->getName().c_str(), StringCompare());
836 if (Ptr
!= NoMemoryTable
.end() && *Ptr
== F
->getName())
837 return DoesNotAccessMemory
;
839 Ptr
= std::lower_bound(OnlyReadsMemoryTable
.begin(),
840 OnlyReadsMemoryTable
.end(),
841 F
->getName().c_str(), StringCompare());
842 if (Ptr
!= OnlyReadsMemoryTable
.end() && *Ptr
== F
->getName())
843 return OnlyReadsMemory
;
845 return UnknownModRefBehavior
;
848 // Make sure that anything that uses AliasAnalysis pulls in this file...
849 DEFINING_FILE_FOR(BasicAliasAnalysis
)