1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file contains routines that help determine which pointers are captured.
10 // A pointer value is captured if the function makes a copy of any part of the
11 // pointer that outlives the call. Not being captured means, more or less, that
12 // the pointer is only dereferenced and not stored in a global. Returning part
13 // of the pointer as the function return value may or may not count as capturing
14 // the pointer, depending on the context.
16 //===----------------------------------------------------------------------===//
18 #include "llvm/Analysis/CaptureTracking.h"
19 #include "llvm/ADT/SmallPtrSet.h"
20 #include "llvm/ADT/SmallSet.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/Analysis/AliasAnalysis.h"
24 #include "llvm/Analysis/CFG.h"
25 #include "llvm/Analysis/ValueTracking.h"
26 #include "llvm/IR/Constants.h"
27 #include "llvm/IR/Dominators.h"
28 #include "llvm/IR/Instructions.h"
29 #include "llvm/IR/IntrinsicInst.h"
30 #include "llvm/Support/CommandLine.h"
34 #define DEBUG_TYPE "capture-tracking"
36 STATISTIC(NumCaptured
, "Number of pointers maybe captured");
37 STATISTIC(NumNotCaptured
, "Number of pointers not captured");
38 STATISTIC(NumCapturedBefore
, "Number of pointers maybe captured before");
39 STATISTIC(NumNotCapturedBefore
, "Number of pointers not captured before");
41 /// The default value for MaxUsesToExplore argument. It's relatively small to
42 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis,
43 /// where the results can't be cached.
44 /// TODO: we should probably introduce a caching CaptureTracking analysis and
45 /// use it where possible. The caching version can use much higher limit or
46 /// don't have this cap at all.
47 static cl::opt
<unsigned>
48 DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden
,
49 cl::desc("Maximal number of uses to explore."),
52 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() {
53 return DefaultMaxUsesToExplore
;
56 CaptureTracker::~CaptureTracker() = default;
58 bool CaptureTracker::shouldExplore(const Use
*U
) { return true; }
60 bool CaptureTracker::isDereferenceableOrNull(Value
*O
, const DataLayout
&DL
) {
61 // We want comparisons to null pointers to not be considered capturing,
62 // but need to guard against cases like gep(p, -ptrtoint(p2)) == null,
63 // which are equivalent to p == p2 and would capture the pointer.
65 // A dereferenceable pointer is a case where this is known to be safe,
66 // because the pointer resulting from such a construction would not be
69 // It is not sufficient to check for inbounds GEP here, because GEP with
70 // zero offset is always inbounds.
71 bool CanBeNull
, CanBeFreed
;
72 return O
->getPointerDereferenceableBytes(DL
, CanBeNull
, CanBeFreed
);
76 struct SimpleCaptureTracker
: public CaptureTracker
{
77 explicit SimpleCaptureTracker(
79 const SmallPtrSetImpl
<const Value
*> &EphValues
, bool ReturnCaptures
)
80 : EphValues(EphValues
), ReturnCaptures(ReturnCaptures
) {}
82 void tooManyUses() override
{
83 LLVM_DEBUG(dbgs() << "Captured due to too many uses\n");
87 bool captured(const Use
*U
) override
{
88 if (isa
<ReturnInst
>(U
->getUser()) && !ReturnCaptures
)
91 if (EphValues
.contains(U
->getUser()))
94 LLVM_DEBUG(dbgs() << "Captured by: " << *U
->getUser() << "\n");
100 const SmallPtrSetImpl
<const Value
*> &EphValues
;
104 bool Captured
= false;
107 /// Only find pointer captures which happen before the given instruction. Uses
108 /// the dominator tree to determine whether one instruction is before another.
109 /// Only support the case where the Value is defined in the same basic block
110 /// as the given instruction and the use.
111 struct CapturesBefore
: public CaptureTracker
{
113 CapturesBefore(bool ReturnCaptures
, const Instruction
*I
,
114 const DominatorTree
*DT
, bool IncludeI
, const LoopInfo
*LI
)
115 : BeforeHere(I
), DT(DT
), ReturnCaptures(ReturnCaptures
),
116 IncludeI(IncludeI
), LI(LI
) {}
118 void tooManyUses() override
{ Captured
= true; }
120 bool isSafeToPrune(Instruction
*I
) {
124 // We explore this usage only if the usage can reach "BeforeHere".
125 // If use is not reachable from entry, there is no need to explore.
126 if (!DT
->isReachableFromEntry(I
->getParent()))
129 // Check whether there is a path from I to BeforeHere.
130 return !isPotentiallyReachable(I
, BeforeHere
, nullptr, DT
, LI
);
133 bool captured(const Use
*U
) override
{
134 Instruction
*I
= cast
<Instruction
>(U
->getUser());
135 if (isa
<ReturnInst
>(I
) && !ReturnCaptures
)
138 // Check isSafeToPrune() here rather than in shouldExplore() to avoid
139 // an expensive reachability query for every instruction we look at.
140 // Instead we only do one for actual capturing candidates.
141 if (isSafeToPrune(I
))
148 const Instruction
*BeforeHere
;
149 const DominatorTree
*DT
;
154 bool Captured
= false;
159 /// Find the 'earliest' instruction before which the pointer is known not to
160 /// be captured. Here an instruction A is considered earlier than instruction
161 /// B, if A dominates B. If 2 escapes do not dominate each other, the
162 /// terminator of the common dominator is chosen. If not all uses cannot be
163 /// analyzed, the earliest escape is set to the first instruction in the
164 /// function entry block.
165 // NOTE: Users have to make sure instructions compared against the earliest
166 // escape are not in a cycle.
167 struct EarliestCaptures
: public CaptureTracker
{
169 EarliestCaptures(bool ReturnCaptures
, Function
&F
, const DominatorTree
&DT
,
170 const SmallPtrSetImpl
<const Value
*> *EphValues
)
171 : EphValues(EphValues
), DT(DT
), ReturnCaptures(ReturnCaptures
), F(F
) {}
173 void tooManyUses() override
{
175 EarliestCapture
= &*F
.getEntryBlock().begin();
178 bool captured(const Use
*U
) override
{
179 Instruction
*I
= cast
<Instruction
>(U
->getUser());
180 if (isa
<ReturnInst
>(I
) && !ReturnCaptures
)
183 if (EphValues
&& EphValues
->contains(I
))
186 if (!EarliestCapture
)
189 EarliestCapture
= DT
.findNearestCommonDominator(EarliestCapture
, I
);
192 // Return false to continue analysis; we need to see all potential
197 const SmallPtrSetImpl
<const Value
*> *EphValues
;
199 Instruction
*EarliestCapture
= nullptr;
201 const DominatorTree
&DT
;
205 bool Captured
= false;
211 /// PointerMayBeCaptured - Return true if this pointer value may be captured
212 /// by the enclosing function (which is required to exist). This routine can
213 /// be expensive, so consider caching the results. The boolean ReturnCaptures
214 /// specifies whether returning the value (or part of it) from the function
215 /// counts as capturing it or not. The boolean StoreCaptures specified whether
216 /// storing the value (or part of it) into memory anywhere automatically
217 /// counts as capturing it or not.
218 bool llvm::PointerMayBeCaptured(const Value
*V
, bool ReturnCaptures
,
219 bool StoreCaptures
, unsigned MaxUsesToExplore
) {
220 SmallPtrSet
<const Value
*, 1> Empty
;
221 return PointerMayBeCaptured(V
, ReturnCaptures
, StoreCaptures
, Empty
,
225 /// Variant of the above function which accepts a set of Values that are
226 /// ephemeral and cannot cause pointers to escape.
227 bool llvm::PointerMayBeCaptured(const Value
*V
, bool ReturnCaptures
,
229 const SmallPtrSetImpl
<const Value
*> &EphValues
,
230 unsigned MaxUsesToExplore
) {
231 assert(!isa
<GlobalValue
>(V
) &&
232 "It doesn't make sense to ask whether a global is captured.");
234 // TODO: If StoreCaptures is not true, we could do Fancy analysis
235 // to determine whether this store is not actually an escape point.
236 // In that case, BasicAliasAnalysis should be updated as well to
237 // take advantage of this.
240 LLVM_DEBUG(dbgs() << "Captured?: " << *V
<< " = ");
242 SimpleCaptureTracker
SCT(EphValues
, ReturnCaptures
);
243 PointerMayBeCaptured(V
, &SCT
, MaxUsesToExplore
);
248 LLVM_DEBUG(dbgs() << "not captured\n");
253 /// PointerMayBeCapturedBefore - Return true if this pointer value may be
254 /// captured by the enclosing function (which is required to exist). If a
255 /// DominatorTree is provided, only captures which happen before the given
256 /// instruction are considered. This routine can be expensive, so consider
257 /// caching the results. The boolean ReturnCaptures specifies whether
258 /// returning the value (or part of it) from the function counts as capturing
259 /// it or not. The boolean StoreCaptures specified whether storing the value
260 /// (or part of it) into memory anywhere automatically counts as capturing it
262 bool llvm::PointerMayBeCapturedBefore(const Value
*V
, bool ReturnCaptures
,
263 bool StoreCaptures
, const Instruction
*I
,
264 const DominatorTree
*DT
, bool IncludeI
,
265 unsigned MaxUsesToExplore
,
266 const LoopInfo
*LI
) {
267 assert(!isa
<GlobalValue
>(V
) &&
268 "It doesn't make sense to ask whether a global is captured.");
271 return PointerMayBeCaptured(V
, ReturnCaptures
, StoreCaptures
,
274 // TODO: See comment in PointerMayBeCaptured regarding what could be done
275 // with StoreCaptures.
277 CapturesBefore
CB(ReturnCaptures
, I
, DT
, IncludeI
, LI
);
278 PointerMayBeCaptured(V
, &CB
, MaxUsesToExplore
);
282 ++NumNotCapturedBefore
;
287 llvm::FindEarliestCapture(const Value
*V
, Function
&F
, bool ReturnCaptures
,
288 bool StoreCaptures
, const DominatorTree
&DT
,
289 const SmallPtrSetImpl
<const Value
*> *EphValues
,
290 unsigned MaxUsesToExplore
) {
291 assert(!isa
<GlobalValue
>(V
) &&
292 "It doesn't make sense to ask whether a global is captured.");
294 EarliestCaptures
CB(ReturnCaptures
, F
, DT
, EphValues
);
295 PointerMayBeCaptured(V
, &CB
, MaxUsesToExplore
);
299 ++NumNotCapturedBefore
;
300 return CB
.EarliestCapture
;
303 UseCaptureKind
llvm::DetermineUseCaptureKind(
305 function_ref
<bool(Value
*, const DataLayout
&)> IsDereferenceableOrNull
) {
306 Instruction
*I
= dyn_cast
<Instruction
>(U
.getUser());
308 // TODO: Investigate non-instruction uses.
310 return UseCaptureKind::MAY_CAPTURE
;
312 switch (I
->getOpcode()) {
313 case Instruction::Call
:
314 case Instruction::Invoke
: {
315 auto *Call
= cast
<CallBase
>(I
);
316 // Not captured if the callee is readonly, doesn't return a copy through
317 // its return value and doesn't unwind (a readonly function can leak bits
318 // by throwing an exception or not depending on the input value).
319 if (Call
->onlyReadsMemory() && Call
->doesNotThrow() &&
320 Call
->getType()->isVoidTy())
321 return UseCaptureKind::NO_CAPTURE
;
323 // The pointer is not captured if returned pointer is not captured.
324 // NOTE: CaptureTracking users should not assume that only functions
325 // marked with nocapture do not capture. This means that places like
326 // getUnderlyingObject in ValueTracking or DecomposeGEPExpression
327 // in BasicAA also need to know about this property.
328 if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call
, true))
329 return UseCaptureKind::PASSTHROUGH
;
331 // Volatile operations effectively capture the memory location that they
332 // load and store to.
333 if (auto *MI
= dyn_cast
<MemIntrinsic
>(Call
))
334 if (MI
->isVolatile())
335 return UseCaptureKind::MAY_CAPTURE
;
337 // Calling a function pointer does not in itself cause the pointer to
338 // be captured. This is a subtle point considering that (for example)
339 // the callee might return its own address. It is analogous to saying
340 // that loading a value from a pointer does not cause the pointer to be
341 // captured, even though the loaded value might be the pointer itself
342 // (think of self-referential objects).
343 if (Call
->isCallee(&U
))
344 return UseCaptureKind::NO_CAPTURE
;
346 // Not captured if only passed via 'nocapture' arguments.
347 if (Call
->isDataOperand(&U
) &&
348 !Call
->doesNotCapture(Call
->getDataOperandNo(&U
))) {
349 // The parameter is not marked 'nocapture' - captured.
350 return UseCaptureKind::MAY_CAPTURE
;
352 return UseCaptureKind::NO_CAPTURE
;
354 case Instruction::Load
:
355 // Volatile loads make the address observable.
356 if (cast
<LoadInst
>(I
)->isVolatile())
357 return UseCaptureKind::MAY_CAPTURE
;
358 return UseCaptureKind::NO_CAPTURE
;
359 case Instruction::VAArg
:
360 // "va-arg" from a pointer does not cause it to be captured.
361 return UseCaptureKind::NO_CAPTURE
;
362 case Instruction::Store
:
363 // Stored the pointer - conservatively assume it may be captured.
364 // Volatile stores make the address observable.
365 if (U
.getOperandNo() == 0 || cast
<StoreInst
>(I
)->isVolatile())
366 return UseCaptureKind::MAY_CAPTURE
;
367 return UseCaptureKind::NO_CAPTURE
;
368 case Instruction::AtomicRMW
: {
369 // atomicrmw conceptually includes both a load and store from
370 // the same location.
371 // As with a store, the location being accessed is not captured,
372 // but the value being stored is.
373 // Volatile stores make the address observable.
374 auto *ARMWI
= cast
<AtomicRMWInst
>(I
);
375 if (U
.getOperandNo() == 1 || ARMWI
->isVolatile())
376 return UseCaptureKind::MAY_CAPTURE
;
377 return UseCaptureKind::NO_CAPTURE
;
379 case Instruction::AtomicCmpXchg
: {
380 // cmpxchg conceptually includes both a load and store from
381 // the same location.
382 // As with a store, the location being accessed is not captured,
383 // but the value being stored is.
384 // Volatile stores make the address observable.
385 auto *ACXI
= cast
<AtomicCmpXchgInst
>(I
);
386 if (U
.getOperandNo() == 1 || U
.getOperandNo() == 2 || ACXI
->isVolatile())
387 return UseCaptureKind::MAY_CAPTURE
;
388 return UseCaptureKind::NO_CAPTURE
;
390 case Instruction::BitCast
:
391 case Instruction::GetElementPtr
:
392 case Instruction::PHI
:
393 case Instruction::Select
:
394 case Instruction::AddrSpaceCast
:
395 // The original value is not captured via this if the new value isn't.
396 return UseCaptureKind::PASSTHROUGH
;
397 case Instruction::ICmp
: {
398 unsigned Idx
= U
.getOperandNo();
399 unsigned OtherIdx
= 1 - Idx
;
400 if (auto *CPN
= dyn_cast
<ConstantPointerNull
>(I
->getOperand(OtherIdx
))) {
401 // Don't count comparisons of a no-alias return value against null as
402 // captures. This allows us to ignore comparisons of malloc results
403 // with null, for example.
404 if (CPN
->getType()->getAddressSpace() == 0)
405 if (isNoAliasCall(U
.get()->stripPointerCasts()))
406 return UseCaptureKind::NO_CAPTURE
;
407 if (!I
->getFunction()->nullPointerIsDefined()) {
408 auto *O
= I
->getOperand(Idx
)->stripPointerCastsSameRepresentation();
409 // Comparing a dereferenceable_or_null pointer against null cannot
410 // lead to pointer escapes, because if it is not null it must be a
411 // valid (in-bounds) pointer.
412 const DataLayout
&DL
= I
->getModule()->getDataLayout();
413 if (IsDereferenceableOrNull
&& IsDereferenceableOrNull(O
, DL
))
414 return UseCaptureKind::NO_CAPTURE
;
418 // Otherwise, be conservative. There are crazy ways to capture pointers
419 // using comparisons.
420 return UseCaptureKind::MAY_CAPTURE
;
423 // Something else - be conservative and say it is captured.
424 return UseCaptureKind::MAY_CAPTURE
;
428 void llvm::PointerMayBeCaptured(const Value
*V
, CaptureTracker
*Tracker
,
429 unsigned MaxUsesToExplore
) {
430 assert(V
->getType()->isPointerTy() && "Capture is for pointers only!");
431 if (MaxUsesToExplore
== 0)
432 MaxUsesToExplore
= DefaultMaxUsesToExplore
;
434 SmallVector
<const Use
*, 20> Worklist
;
435 Worklist
.reserve(getDefaultMaxUsesToExploreForCaptureTracking());
436 SmallSet
<const Use
*, 20> Visited
;
438 auto AddUses
= [&](const Value
*V
) {
439 for (const Use
&U
: V
->uses()) {
440 // If there are lots of uses, conservatively say that the value
441 // is captured to avoid taking too much compile time.
442 if (Visited
.size() >= MaxUsesToExplore
) {
443 Tracker
->tooManyUses();
446 if (!Visited
.insert(&U
).second
)
448 if (!Tracker
->shouldExplore(&U
))
450 Worklist
.push_back(&U
);
457 auto IsDereferenceableOrNull
= [Tracker
](Value
*V
, const DataLayout
&DL
) {
458 return Tracker
->isDereferenceableOrNull(V
, DL
);
460 while (!Worklist
.empty()) {
461 const Use
*U
= Worklist
.pop_back_val();
462 switch (DetermineUseCaptureKind(*U
, IsDereferenceableOrNull
)) {
463 case UseCaptureKind::NO_CAPTURE
:
465 case UseCaptureKind::MAY_CAPTURE
:
466 if (Tracker
->captured(U
))
469 case UseCaptureKind::PASSTHROUGH
:
470 if (!AddUses(U
->getUser()))
476 // All uses examined.
479 bool llvm::isNonEscapingLocalObject(
480 const Value
*V
, SmallDenseMap
<const Value
*, bool, 8> *IsCapturedCache
) {
481 SmallDenseMap
<const Value
*, bool, 8>::iterator CacheIt
;
482 if (IsCapturedCache
) {
484 std::tie(CacheIt
, Inserted
) = IsCapturedCache
->insert({V
, false});
486 // Found cached result, return it!
487 return CacheIt
->second
;
490 // If this is an identified function-local object, check to see if it escapes.
491 if (isIdentifiedFunctionLocal(V
)) {
492 // Set StoreCaptures to True so that we can assume in our callers that the
493 // pointer is not the result of a load instruction. Currently
494 // PointerMayBeCaptured doesn't have any special analysis for the
495 // StoreCaptures=false case; if it did, our callers could be refined to be
497 auto Ret
= !PointerMayBeCaptured(V
, false, /*StoreCaptures=*/true);
499 CacheIt
->second
= Ret
;