1 //===-- SafepointIRVerifier.cpp - Verify gc.statepoint invariants ---------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // Run a basic correctness check on the IR to ensure that Safepoints - if
10 // they've been inserted - were inserted correctly. In particular, look for use
11 // of non-relocated values after a safepoint. It's primary use is to check the
12 // correctness of safepoint insertion immediately after insertion, but it can
13 // also be used to verify that later transforms have not found a way to break
14 // safepoint semenatics.
16 // In its current form, this verify checks a property which is sufficient, but
17 // not neccessary for correctness. There are some cases where an unrelocated
18 // pointer can be used after the safepoint. Consider this example:
22 // (a',b') = safepoint(a,b)
26 // Because it is valid to reorder 'c' above the safepoint, this is legal. In
27 // practice, this is a somewhat uncommon transform, but CodeGenPrep does create
28 // idioms like this. The verifier knows about these cases and avoids reporting
31 //===----------------------------------------------------------------------===//
33 #include "llvm/IR/SafepointIRVerifier.h"
34 #include "llvm/ADT/DenseSet.h"
35 #include "llvm/ADT/PostOrderIterator.h"
36 #include "llvm/ADT/SetOperations.h"
37 #include "llvm/ADT/SetVector.h"
38 #include "llvm/IR/BasicBlock.h"
39 #include "llvm/IR/Dominators.h"
40 #include "llvm/IR/Function.h"
41 #include "llvm/IR/InstrTypes.h"
42 #include "llvm/IR/Instructions.h"
43 #include "llvm/IR/Statepoint.h"
44 #include "llvm/IR/Value.h"
45 #include "llvm/InitializePasses.h"
46 #include "llvm/Support/Allocator.h"
47 #include "llvm/Support/CommandLine.h"
48 #include "llvm/Support/Debug.h"
49 #include "llvm/Support/raw_ostream.h"
51 #define DEBUG_TYPE "safepoint-ir-verifier"
55 /// This option is used for writing test cases. Instead of crashing the program
56 /// when verification fails, report a message to the console (for FileCheck
57 /// usage) and continue execution as if nothing happened.
58 static cl::opt
<bool> PrintOnly("safepoint-ir-verifier-print-only",
63 /// This CFG Deadness finds dead blocks and edges. Algorithm starts with a set
64 /// of blocks unreachable from entry then propagates deadness using foldable
65 /// conditional branches without modifying CFG. So GVN does but it changes CFG
66 /// by splitting critical edges. In most cases passes rely on SimplifyCFG to
67 /// clean up dead blocks, but in some cases, like verification or loop passes
68 /// it's not possible.
70 const DominatorTree
*DT
= nullptr;
71 SetVector
<const BasicBlock
*> DeadBlocks
;
72 SetVector
<const Use
*> DeadEdges
; // Contains all dead edges from live blocks.
75 /// Return the edge that coresponds to the predecessor.
76 static const Use
& getEdge(const_pred_iterator
&PredIt
) {
77 auto &PU
= PredIt
.getUse();
78 return PU
.getUser()->getOperandUse(PU
.getOperandNo());
81 /// Return true if there is at least one live edge that corresponds to the
82 /// basic block InBB listed in the phi node.
83 bool hasLiveIncomingEdge(const PHINode
*PN
, const BasicBlock
*InBB
) const {
84 assert(!isDeadBlock(InBB
) && "block must be live");
85 const BasicBlock
* BB
= PN
->getParent();
87 for (const_pred_iterator
PredIt(BB
), End(BB
, true); PredIt
!= End
; ++PredIt
) {
88 if (InBB
== *PredIt
) {
89 if (!isDeadEdge(&getEdge(PredIt
)))
95 assert(Listed
&& "basic block is not found among incoming blocks");
100 bool isDeadBlock(const BasicBlock
*BB
) const {
101 return DeadBlocks
.count(BB
);
104 bool isDeadEdge(const Use
*U
) const {
105 assert(cast
<Instruction
>(U
->getUser())->isTerminator() &&
106 "edge must be operand of terminator");
107 assert(cast_or_null
<BasicBlock
>(U
->get()) &&
108 "edge must refer to basic block");
109 assert(!isDeadBlock(cast
<Instruction
>(U
->getUser())->getParent()) &&
110 "isDeadEdge() must be applied to edge from live block");
111 return DeadEdges
.count(U
);
114 bool hasLiveIncomingEdges(const BasicBlock
*BB
) const {
115 // Check if all incoming edges are dead.
116 for (const_pred_iterator
PredIt(BB
), End(BB
, true); PredIt
!= End
; ++PredIt
) {
117 auto &PU
= PredIt
.getUse();
118 const Use
&U
= PU
.getUser()->getOperandUse(PU
.getOperandNo());
119 if (!isDeadBlock(*PredIt
) && !isDeadEdge(&U
))
120 return true; // Found a live edge.
125 void processFunction(const Function
&F
, const DominatorTree
&DT
) {
128 // Start with all blocks unreachable from entry.
129 for (const BasicBlock
&BB
: F
)
130 if (!DT
.isReachableFromEntry(&BB
))
131 DeadBlocks
.insert(&BB
);
133 // Top-down walk of the dominator tree
134 ReversePostOrderTraversal
<const Function
*> RPOT(&F
);
135 for (const BasicBlock
*BB
: RPOT
) {
136 const Instruction
*TI
= BB
->getTerminator();
137 assert(TI
&& "blocks must be well formed");
139 // For conditional branches, we can perform simple conditional propagation on
140 // the condition value itself.
141 const BranchInst
*BI
= dyn_cast
<BranchInst
>(TI
);
142 if (!BI
|| !BI
->isConditional() || !isa
<Constant
>(BI
->getCondition()))
145 // If a branch has two identical successors, we cannot declare either dead.
146 if (BI
->getSuccessor(0) == BI
->getSuccessor(1))
149 ConstantInt
*Cond
= dyn_cast
<ConstantInt
>(BI
->getCondition());
153 addDeadEdge(BI
->getOperandUse(Cond
->getZExtValue() ? 1 : 2));
158 void addDeadBlock(const BasicBlock
*BB
) {
159 SmallVector
<const BasicBlock
*, 4> NewDead
;
160 SmallSetVector
<const BasicBlock
*, 4> DF
;
162 NewDead
.push_back(BB
);
163 while (!NewDead
.empty()) {
164 const BasicBlock
*D
= NewDead
.pop_back_val();
168 // All blocks dominated by D are dead.
169 SmallVector
<BasicBlock
*, 8> Dom
;
170 DT
->getDescendants(const_cast<BasicBlock
*>(D
), Dom
);
171 // Do not need to mark all in and out edges dead
172 // because BB is marked dead and this is enough
174 DeadBlocks
.insert(Dom
.begin(), Dom
.end());
176 // Figure out the dominance-frontier(D).
177 for (BasicBlock
*B
: Dom
)
178 for (BasicBlock
*S
: successors(B
))
179 if (!isDeadBlock(S
) && !hasLiveIncomingEdges(S
))
180 NewDead
.push_back(S
);
184 void addDeadEdge(const Use
&DeadEdge
) {
185 if (!DeadEdges
.insert(&DeadEdge
))
188 BasicBlock
*BB
= cast_or_null
<BasicBlock
>(DeadEdge
.get());
189 if (hasLiveIncomingEdges(BB
))
197 static void Verify(const Function
&F
, const DominatorTree
&DT
,
198 const CFGDeadness
&CD
);
201 PreservedAnalyses
SafepointIRVerifierPass::run(Function
&F
,
202 FunctionAnalysisManager
&AM
) {
203 const auto &DT
= AM
.getResult
<DominatorTreeAnalysis
>(F
);
205 CD
.processFunction(F
, DT
);
207 return PreservedAnalyses::all();
213 struct SafepointIRVerifier
: public FunctionPass
{
214 static char ID
; // Pass identification, replacement for typeid
215 SafepointIRVerifier() : FunctionPass(ID
) {
216 initializeSafepointIRVerifierPass(*PassRegistry::getPassRegistry());
219 bool runOnFunction(Function
&F
) override
{
220 auto &DT
= getAnalysis
<DominatorTreeWrapperPass
>().getDomTree();
222 CD
.processFunction(F
, DT
);
224 return false; // no modifications
227 void getAnalysisUsage(AnalysisUsage
&AU
) const override
{
228 AU
.addRequiredID(DominatorTreeWrapperPass::ID
);
229 AU
.setPreservesAll();
232 StringRef
getPassName() const override
{ return "safepoint verifier"; }
236 void llvm::verifySafepointIR(Function
&F
) {
237 SafepointIRVerifier pass
;
238 pass
.runOnFunction(F
);
241 char SafepointIRVerifier::ID
= 0;
243 FunctionPass
*llvm::createSafepointIRVerifierPass() {
244 return new SafepointIRVerifier();
247 INITIALIZE_PASS_BEGIN(SafepointIRVerifier
, "verify-safepoint-ir",
248 "Safepoint IR Verifier", false, false)
249 INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass
)
250 INITIALIZE_PASS_END(SafepointIRVerifier
, "verify-safepoint-ir",
251 "Safepoint IR Verifier", false, false)
253 static bool isGCPointerType(Type
*T
) {
254 if (auto *PT
= dyn_cast
<PointerType
>(T
))
255 // For the sake of this example GC, we arbitrarily pick addrspace(1) as our
256 // GC managed heap. We know that a pointer into this heap needs to be
257 // updated and that no other pointer does.
258 return (1 == PT
->getAddressSpace());
262 static bool containsGCPtrType(Type
*Ty
) {
263 if (isGCPointerType(Ty
))
265 if (VectorType
*VT
= dyn_cast
<VectorType
>(Ty
))
266 return isGCPointerType(VT
->getScalarType());
267 if (ArrayType
*AT
= dyn_cast
<ArrayType
>(Ty
))
268 return containsGCPtrType(AT
->getElementType());
269 if (StructType
*ST
= dyn_cast
<StructType
>(Ty
))
270 return llvm::any_of(ST
->elements(), containsGCPtrType
);
274 // Debugging aid -- prints a [Begin, End) range of values.
275 template<typename IteratorTy
>
276 static void PrintValueSet(raw_ostream
&OS
, IteratorTy Begin
, IteratorTy End
) {
278 while (Begin
!= End
) {
279 OS
<< **Begin
<< " ";
285 /// The verifier algorithm is phrased in terms of availability. The set of
286 /// values "available" at a given point in the control flow graph is the set of
287 /// correctly relocated value at that point, and is a subset of the set of
288 /// definitions dominating that point.
290 using AvailableValueSet
= DenseSet
<const Value
*>;
292 /// State we compute and track per basic block.
293 struct BasicBlockState
{
294 // Set of values available coming in, before the phi nodes
295 AvailableValueSet AvailableIn
;
297 // Set of values available going out
298 AvailableValueSet AvailableOut
;
300 // AvailableOut minus AvailableIn.
301 // All elements are Instructions
302 AvailableValueSet Contribution
;
304 // True if this block contains a safepoint and thus AvailableIn does not
305 // contribute to AvailableOut.
306 bool Cleared
= false;
309 /// A given derived pointer can have multiple base pointers through phi/selects.
310 /// This type indicates when the base pointer is exclusively constant
311 /// (ExclusivelySomeConstant), and if that constant is proven to be exclusively
312 /// null, we record that as ExclusivelyNull. In all other cases, the BaseType is
315 NonConstant
= 1, // Base pointers is not exclusively constant.
317 ExclusivelySomeConstant
// Base pointers for a given derived pointer is from a
318 // set of constants, but they are not exclusively
322 /// Return the baseType for Val which states whether Val is exclusively
323 /// derived from constant/null, or not exclusively derived from constant.
324 /// Val is exclusively derived off a constant base when all operands of phi and
325 /// selects are derived off a constant base.
326 static enum BaseType
getBaseType(const Value
*Val
) {
328 SmallVector
<const Value
*, 32> Worklist
;
329 DenseSet
<const Value
*> Visited
;
330 bool isExclusivelyDerivedFromNull
= true;
331 Worklist
.push_back(Val
);
332 // Strip through all the bitcasts and geps to get base pointer. Also check for
333 // the exclusive value when there can be multiple base pointers (through phis
335 while(!Worklist
.empty()) {
336 const Value
*V
= Worklist
.pop_back_val();
337 if (!Visited
.insert(V
).second
)
340 if (const auto *CI
= dyn_cast
<CastInst
>(V
)) {
341 Worklist
.push_back(CI
->stripPointerCasts());
344 if (const auto *GEP
= dyn_cast
<GetElementPtrInst
>(V
)) {
345 Worklist
.push_back(GEP
->getPointerOperand());
348 // Push all the incoming values of phi node into the worklist for
350 if (const auto *PN
= dyn_cast
<PHINode
>(V
)) {
351 append_range(Worklist
, PN
->incoming_values());
354 if (const auto *SI
= dyn_cast
<SelectInst
>(V
)) {
355 // Push in the true and false values
356 Worklist
.push_back(SI
->getTrueValue());
357 Worklist
.push_back(SI
->getFalseValue());
360 if (const auto *GCRelocate
= dyn_cast
<GCRelocateInst
>(V
)) {
361 // GCRelocates do not change null-ness or constant-ness of the value.
362 // So we can continue with derived pointer this instruction relocates.
363 Worklist
.push_back(GCRelocate
->getDerivedPtr());
366 if (const auto *FI
= dyn_cast
<FreezeInst
>(V
)) {
367 // Freeze does not change null-ness or constant-ness of the value.
368 Worklist
.push_back(FI
->getOperand(0));
371 if (isa
<Constant
>(V
)) {
372 // We found at least one base pointer which is non-null, so this derived
373 // pointer is not exclusively derived from null.
374 if (V
!= Constant::getNullValue(V
->getType()))
375 isExclusivelyDerivedFromNull
= false;
376 // Continue processing the remaining values to make sure it's exclusively
380 // At this point, we know that the base pointer is not exclusively
382 return BaseType::NonConstant
;
384 // Now, we know that the base pointer is exclusively constant, but we need to
385 // differentiate between exclusive null constant and non-null constant.
386 return isExclusivelyDerivedFromNull
? BaseType::ExclusivelyNull
387 : BaseType::ExclusivelySomeConstant
;
390 static bool isNotExclusivelyConstantDerived(const Value
*V
) {
391 return getBaseType(V
) == BaseType::NonConstant
;
395 class InstructionVerifier
;
397 /// Builds BasicBlockState for each BB of the function.
398 /// It can traverse function for verification and provides all required
401 /// GC pointer may be in one of three states: relocated, unrelocated and
403 /// Relocated pointer may be used without any restrictions.
404 /// Unrelocated pointer cannot be dereferenced, passed as argument to any call
405 /// or returned. Unrelocated pointer may be safely compared against another
406 /// unrelocated pointer or against a pointer exclusively derived from null.
407 /// Poisoned pointers are produced when we somehow derive pointer from relocated
408 /// and unrelocated pointers (e.g. phi, select). This pointers may be safely
409 /// used in a very limited number of situations. Currently the only way to use
410 /// it is comparison against constant exclusively derived from null. All
411 /// limitations arise due to their undefined state: this pointers should be
412 /// treated as relocated and unrelocated simultaneously.
413 /// Rules of deriving:
414 /// R + U = P - that's where the poisoned pointers come from
419 /// Where "+" - any operation that somehow derive pointer, U - unrelocated,
420 /// R - relocated and P - poisoned, C - constant, X - U or R or P or C or
421 /// nothing (in case when "+" is unary operation).
422 /// Deriving of pointers by itself is always safe.
423 /// NOTE: when we are making decision on the status of instruction's result:
424 /// a) for phi we need to check status of each input *at the end of
425 /// corresponding predecessor BB*.
426 /// b) for other instructions we need to check status of each input *at the
429 /// FIXME: This works fairly well except one case
431 /// p = *some GC-ptr def*
432 /// p1 = gep p, offset
440 /// p2 = phi [p, bb2] [p1, bb1]
441 /// p3 = phi [p, bb2] [p, bb1]
442 /// here p and p1 is unrelocated
443 /// p2 and p3 is poisoned (though they shouldn't be)
445 /// This leads to some weird results:
446 /// cmp eq p, p2 - illegal instruction (false-positive)
447 /// cmp eq p1, p2 - illegal instruction (false-positive)
448 /// cmp eq p, p3 - illegal instruction (false-positive)
449 /// cmp eq p, p1 - ok
450 /// To fix this we need to introduce conception of generations and be able to
451 /// check if two values belong to one generation or not. This way p2 will be
452 /// considered to be unrelocated and no false alarm will happen.
455 const CFGDeadness
&CD
;
456 SpecificBumpPtrAllocator
<BasicBlockState
> BSAllocator
;
457 DenseMap
<const BasicBlock
*, BasicBlockState
*> BlockMap
;
458 // This set contains defs of unrelocated pointers that are proved to be legal
459 // and don't need verification.
460 DenseSet
<const Instruction
*> ValidUnrelocatedDefs
;
461 // This set contains poisoned defs. They can be safely ignored during
463 DenseSet
<const Value
*> PoisonedDefs
;
466 GCPtrTracker(const Function
&F
, const DominatorTree
&DT
,
467 const CFGDeadness
&CD
);
469 bool hasLiveIncomingEdge(const PHINode
*PN
, const BasicBlock
*InBB
) const {
470 return CD
.hasLiveIncomingEdge(PN
, InBB
);
473 BasicBlockState
*getBasicBlockState(const BasicBlock
*BB
);
474 const BasicBlockState
*getBasicBlockState(const BasicBlock
*BB
) const;
476 bool isValuePoisoned(const Value
*V
) const { return PoisonedDefs
.count(V
); }
478 /// Traverse each BB of the function and call
479 /// InstructionVerifier::verifyInstruction for each possibly invalid
481 /// It destructively modifies GCPtrTracker so it's passed via rvalue reference
482 /// in order to prohibit further usages of GCPtrTracker as it'll be in
483 /// inconsistent state.
484 static void verifyFunction(GCPtrTracker
&&Tracker
,
485 InstructionVerifier
&Verifier
);
487 /// Returns true for reachable and live blocks.
488 bool isMapped(const BasicBlock
*BB
) const { return BlockMap
.contains(BB
); }
491 /// Returns true if the instruction may be safely skipped during verification.
492 bool instructionMayBeSkipped(const Instruction
*I
) const;
494 /// Iterates over all BBs from BlockMap and recalculates AvailableIn/Out for
495 /// each of them until it converges.
496 void recalculateBBsStates();
498 /// Remove from Contribution all defs that legally produce unrelocated
499 /// pointers and saves them to ValidUnrelocatedDefs.
500 /// Though Contribution should belong to BBS it is passed separately with
501 /// different const-modifier in order to emphasize (and guarantee) that only
502 /// Contribution will be changed.
503 /// Returns true if Contribution was changed otherwise false.
504 bool removeValidUnrelocatedDefs(const BasicBlock
*BB
,
505 const BasicBlockState
*BBS
,
506 AvailableValueSet
&Contribution
);
508 /// Gather all the definitions dominating the start of BB into Result. This is
509 /// simply the defs introduced by every dominating basic block and the
510 /// function arguments.
511 void gatherDominatingDefs(const BasicBlock
*BB
, AvailableValueSet
&Result
,
512 const DominatorTree
&DT
);
514 /// Compute the AvailableOut set for BB, based on the BasicBlockState BBS,
515 /// which is the BasicBlockState for BB.
516 /// ContributionChanged is set when the verifier runs for the first time
517 /// (in this case Contribution was changed from 'empty' to its initial state)
518 /// or when Contribution of this BB was changed since last computation.
519 static void transferBlock(const BasicBlock
*BB
, BasicBlockState
&BBS
,
520 bool ContributionChanged
);
522 /// Model the effect of an instruction on the set of available values.
523 static void transferInstruction(const Instruction
&I
, bool &Cleared
,
524 AvailableValueSet
&Available
);
527 /// It is a visitor for GCPtrTracker::verifyFunction. It decides if the
528 /// instruction (which uses heap reference) is legal or not, given our safepoint
530 class InstructionVerifier
{
531 bool AnyInvalidUses
= false;
534 void verifyInstruction(const GCPtrTracker
*Tracker
, const Instruction
&I
,
535 const AvailableValueSet
&AvailableSet
);
537 bool hasAnyInvalidUses() const { return AnyInvalidUses
; }
540 void reportInvalidUse(const Value
&V
, const Instruction
&I
);
542 } // end anonymous namespace
544 GCPtrTracker::GCPtrTracker(const Function
&F
, const DominatorTree
&DT
,
545 const CFGDeadness
&CD
) : F(F
), CD(CD
) {
546 // Calculate Contribution of each live BB.
547 // Allocate BB states for live blocks.
548 for (const BasicBlock
&BB
: F
)
549 if (!CD
.isDeadBlock(&BB
)) {
550 BasicBlockState
*BBS
= new (BSAllocator
.Allocate()) BasicBlockState
;
551 for (const auto &I
: BB
)
552 transferInstruction(I
, BBS
->Cleared
, BBS
->Contribution
);
556 // Initialize AvailableIn/Out sets of each BB using only information about
558 for (auto &BBI
: BlockMap
) {
559 gatherDominatingDefs(BBI
.first
, BBI
.second
->AvailableIn
, DT
);
560 transferBlock(BBI
.first
, *BBI
.second
, true);
563 // Simulate the flow of defs through the CFG and recalculate AvailableIn/Out
564 // sets of each BB until it converges. If any def is proved to be an
565 // unrelocated pointer, it will be removed from all BBSs.
566 recalculateBBsStates();
569 BasicBlockState
*GCPtrTracker::getBasicBlockState(const BasicBlock
*BB
) {
570 return BlockMap
.lookup(BB
);
573 const BasicBlockState
*GCPtrTracker::getBasicBlockState(
574 const BasicBlock
*BB
) const {
575 return const_cast<GCPtrTracker
*>(this)->getBasicBlockState(BB
);
578 bool GCPtrTracker::instructionMayBeSkipped(const Instruction
*I
) const {
579 // Poisoned defs are skipped since they are always safe by itself by
580 // definition (for details see comment to this class).
581 return ValidUnrelocatedDefs
.count(I
) || PoisonedDefs
.count(I
);
584 void GCPtrTracker::verifyFunction(GCPtrTracker
&&Tracker
,
585 InstructionVerifier
&Verifier
) {
586 // We need RPO here to a) report always the first error b) report errors in
587 // same order from run to run.
588 ReversePostOrderTraversal
<const Function
*> RPOT(&Tracker
.F
);
589 for (const BasicBlock
*BB
: RPOT
) {
590 BasicBlockState
*BBS
= Tracker
.getBasicBlockState(BB
);
594 // We destructively modify AvailableIn as we traverse the block instruction
596 AvailableValueSet
&AvailableSet
= BBS
->AvailableIn
;
597 for (const Instruction
&I
: *BB
) {
598 if (Tracker
.instructionMayBeSkipped(&I
))
599 continue; // This instruction shouldn't be added to AvailableSet.
601 Verifier
.verifyInstruction(&Tracker
, I
, AvailableSet
);
603 // Model the effect of current instruction on AvailableSet to keep the set
604 // relevant at each point of BB.
605 bool Cleared
= false;
606 transferInstruction(I
, Cleared
, AvailableSet
);
612 void GCPtrTracker::recalculateBBsStates() {
613 SetVector
<const BasicBlock
*> Worklist
;
614 // TODO: This order is suboptimal, it's better to replace it with priority
615 // queue where priority is RPO number of BB.
616 for (auto &BBI
: BlockMap
)
617 Worklist
.insert(BBI
.first
);
619 // This loop iterates the AvailableIn/Out sets until it converges.
620 // The AvailableIn and AvailableOut sets decrease as we iterate.
621 while (!Worklist
.empty()) {
622 const BasicBlock
*BB
= Worklist
.pop_back_val();
623 BasicBlockState
*BBS
= getBasicBlockState(BB
);
625 continue; // Ignore dead successors.
627 size_t OldInCount
= BBS
->AvailableIn
.size();
628 for (const_pred_iterator
PredIt(BB
), End(BB
, true); PredIt
!= End
; ++PredIt
) {
629 const BasicBlock
*PBB
= *PredIt
;
630 BasicBlockState
*PBBS
= getBasicBlockState(PBB
);
631 if (PBBS
&& !CD
.isDeadEdge(&CFGDeadness::getEdge(PredIt
)))
632 set_intersect(BBS
->AvailableIn
, PBBS
->AvailableOut
);
635 assert(OldInCount
>= BBS
->AvailableIn
.size() && "invariant!");
637 bool InputsChanged
= OldInCount
!= BBS
->AvailableIn
.size();
638 bool ContributionChanged
=
639 removeValidUnrelocatedDefs(BB
, BBS
, BBS
->Contribution
);
640 if (!InputsChanged
&& !ContributionChanged
)
643 size_t OldOutCount
= BBS
->AvailableOut
.size();
644 transferBlock(BB
, *BBS
, ContributionChanged
);
645 if (OldOutCount
!= BBS
->AvailableOut
.size()) {
646 assert(OldOutCount
> BBS
->AvailableOut
.size() && "invariant!");
647 Worklist
.insert(succ_begin(BB
), succ_end(BB
));
652 bool GCPtrTracker::removeValidUnrelocatedDefs(const BasicBlock
*BB
,
653 const BasicBlockState
*BBS
,
654 AvailableValueSet
&Contribution
) {
655 assert(&BBS
->Contribution
== &Contribution
&&
656 "Passed Contribution should be from the passed BasicBlockState!");
657 AvailableValueSet AvailableSet
= BBS
->AvailableIn
;
658 bool ContributionChanged
= false;
659 // For explanation why instructions are processed this way see
660 // "Rules of deriving" in the comment to this class.
661 for (const Instruction
&I
: *BB
) {
662 bool ValidUnrelocatedPointerDef
= false;
663 bool PoisonedPointerDef
= false;
664 // TODO: `select` instructions should be handled here too.
665 if (const PHINode
*PN
= dyn_cast
<PHINode
>(&I
)) {
666 if (containsGCPtrType(PN
->getType())) {
667 // If both is true, output is poisoned.
668 bool HasRelocatedInputs
= false;
669 bool HasUnrelocatedInputs
= false;
670 for (unsigned i
= 0, e
= PN
->getNumIncomingValues(); i
!= e
; ++i
) {
671 const BasicBlock
*InBB
= PN
->getIncomingBlock(i
);
672 if (!isMapped(InBB
) ||
673 !CD
.hasLiveIncomingEdge(PN
, InBB
))
674 continue; // Skip dead block or dead edge.
676 const Value
*InValue
= PN
->getIncomingValue(i
);
678 if (isNotExclusivelyConstantDerived(InValue
)) {
679 if (isValuePoisoned(InValue
)) {
680 // If any of inputs is poisoned, output is always poisoned too.
681 HasRelocatedInputs
= true;
682 HasUnrelocatedInputs
= true;
685 if (BlockMap
[InBB
]->AvailableOut
.count(InValue
))
686 HasRelocatedInputs
= true;
688 HasUnrelocatedInputs
= true;
691 if (HasUnrelocatedInputs
) {
692 if (HasRelocatedInputs
)
693 PoisonedPointerDef
= true;
695 ValidUnrelocatedPointerDef
= true;
698 } else if ((isa
<GetElementPtrInst
>(I
) || isa
<BitCastInst
>(I
)) &&
699 containsGCPtrType(I
.getType())) {
700 // GEP/bitcast of unrelocated pointer is legal by itself but this def
701 // shouldn't appear in any AvailableSet.
702 for (const Value
*V
: I
.operands())
703 if (containsGCPtrType(V
->getType()) &&
704 isNotExclusivelyConstantDerived(V
) && !AvailableSet
.count(V
)) {
705 if (isValuePoisoned(V
))
706 PoisonedPointerDef
= true;
708 ValidUnrelocatedPointerDef
= true;
712 assert(!(ValidUnrelocatedPointerDef
&& PoisonedPointerDef
) &&
713 "Value cannot be both unrelocated and poisoned!");
714 if (ValidUnrelocatedPointerDef
) {
715 // Remove def of unrelocated pointer from Contribution of this BB and
716 // trigger update of all its successors.
717 Contribution
.erase(&I
);
718 PoisonedDefs
.erase(&I
);
719 ValidUnrelocatedDefs
.insert(&I
);
720 LLVM_DEBUG(dbgs() << "Removing urelocated " << I
721 << " from Contribution of " << BB
->getName() << "\n");
722 ContributionChanged
= true;
723 } else if (PoisonedPointerDef
) {
724 // Mark pointer as poisoned, remove its def from Contribution and trigger
725 // update of all successors.
726 Contribution
.erase(&I
);
727 PoisonedDefs
.insert(&I
);
728 LLVM_DEBUG(dbgs() << "Removing poisoned " << I
<< " from Contribution of "
729 << BB
->getName() << "\n");
730 ContributionChanged
= true;
732 bool Cleared
= false;
733 transferInstruction(I
, Cleared
, AvailableSet
);
737 return ContributionChanged
;
740 void GCPtrTracker::gatherDominatingDefs(const BasicBlock
*BB
,
741 AvailableValueSet
&Result
,
742 const DominatorTree
&DT
) {
743 DomTreeNode
*DTN
= DT
[const_cast<BasicBlock
*>(BB
)];
745 assert(DTN
&& "Unreachable blocks are ignored");
746 while (DTN
->getIDom()) {
747 DTN
= DTN
->getIDom();
748 auto BBS
= getBasicBlockState(DTN
->getBlock());
749 assert(BBS
&& "immediate dominator cannot be dead for a live block");
750 const auto &Defs
= BBS
->Contribution
;
751 Result
.insert(Defs
.begin(), Defs
.end());
752 // If this block is 'Cleared', then nothing LiveIn to this block can be
753 // available after this block completes. Note: This turns out to be
754 // really important for reducing memory consuption of the initial available
755 // sets and thus peak memory usage by this verifier.
760 for (const Argument
&A
: BB
->getParent()->args())
761 if (containsGCPtrType(A
.getType()))
765 void GCPtrTracker::transferBlock(const BasicBlock
*BB
, BasicBlockState
&BBS
,
766 bool ContributionChanged
) {
767 const AvailableValueSet
&AvailableIn
= BBS
.AvailableIn
;
768 AvailableValueSet
&AvailableOut
= BBS
.AvailableOut
;
771 // AvailableOut will change only when Contribution changed.
772 if (ContributionChanged
)
773 AvailableOut
= BBS
.Contribution
;
775 // Otherwise, we need to reduce the AvailableOut set by things which are no
776 // longer in our AvailableIn
777 AvailableValueSet Temp
= BBS
.Contribution
;
778 set_union(Temp
, AvailableIn
);
779 AvailableOut
= std::move(Temp
);
782 LLVM_DEBUG(dbgs() << "Transfered block " << BB
->getName() << " from ";
783 PrintValueSet(dbgs(), AvailableIn
.begin(), AvailableIn
.end());
785 PrintValueSet(dbgs(), AvailableOut
.begin(), AvailableOut
.end());
789 void GCPtrTracker::transferInstruction(const Instruction
&I
, bool &Cleared
,
790 AvailableValueSet
&Available
) {
791 if (isa
<GCStatepointInst
>(I
)) {
794 } else if (containsGCPtrType(I
.getType()))
795 Available
.insert(&I
);
798 void InstructionVerifier::verifyInstruction(
799 const GCPtrTracker
*Tracker
, const Instruction
&I
,
800 const AvailableValueSet
&AvailableSet
) {
801 if (const PHINode
*PN
= dyn_cast
<PHINode
>(&I
)) {
802 if (containsGCPtrType(PN
->getType()))
803 for (unsigned i
= 0, e
= PN
->getNumIncomingValues(); i
!= e
; ++i
) {
804 const BasicBlock
*InBB
= PN
->getIncomingBlock(i
);
805 const BasicBlockState
*InBBS
= Tracker
->getBasicBlockState(InBB
);
807 !Tracker
->hasLiveIncomingEdge(PN
, InBB
))
808 continue; // Skip dead block or dead edge.
810 const Value
*InValue
= PN
->getIncomingValue(i
);
812 if (isNotExclusivelyConstantDerived(InValue
) &&
813 !InBBS
->AvailableOut
.count(InValue
))
814 reportInvalidUse(*InValue
, *PN
);
816 } else if (isa
<CmpInst
>(I
) &&
817 containsGCPtrType(I
.getOperand(0)->getType())) {
818 Value
*LHS
= I
.getOperand(0), *RHS
= I
.getOperand(1);
819 enum BaseType baseTyLHS
= getBaseType(LHS
),
820 baseTyRHS
= getBaseType(RHS
);
822 // Returns true if LHS and RHS are unrelocated pointers and they are
823 // valid unrelocated uses.
824 auto hasValidUnrelocatedUse
= [&AvailableSet
, Tracker
, baseTyLHS
, baseTyRHS
,
826 // A cmp instruction has valid unrelocated pointer operands only if
827 // both operands are unrelocated pointers.
828 // In the comparison between two pointers, if one is an unrelocated
829 // use, the other *should be* an unrelocated use, for this
830 // instruction to contain valid unrelocated uses. This unrelocated
831 // use can be a null constant as well, or another unrelocated
833 if (AvailableSet
.count(LHS
) || AvailableSet
.count(RHS
))
835 // Constant pointers (that are not exclusively null) may have
836 // meaning in different VMs, so we cannot reorder the compare
837 // against constant pointers before the safepoint. In other words,
838 // comparison of an unrelocated use against a non-null constant
840 if ((baseTyLHS
== BaseType::ExclusivelySomeConstant
&&
841 baseTyRHS
== BaseType::NonConstant
) ||
842 (baseTyLHS
== BaseType::NonConstant
&&
843 baseTyRHS
== BaseType::ExclusivelySomeConstant
))
846 // If one of pointers is poisoned and other is not exclusively derived
847 // from null it is an invalid expression: it produces poisoned result
848 // and unless we want to track all defs (not only gc pointers) the only
849 // option is to prohibit such instructions.
850 if ((Tracker
->isValuePoisoned(LHS
) && baseTyRHS
!= ExclusivelyNull
) ||
851 (Tracker
->isValuePoisoned(RHS
) && baseTyLHS
!= ExclusivelyNull
))
854 // All other cases are valid cases enumerated below:
855 // 1. Comparison between an exclusively derived null pointer and a
856 // constant base pointer.
857 // 2. Comparison between an exclusively derived null pointer and a
858 // non-constant unrelocated base pointer.
859 // 3. Comparison between 2 unrelocated pointers.
860 // 4. Comparison between a pointer exclusively derived from null and a
861 // non-constant poisoned pointer.
864 if (!hasValidUnrelocatedUse()) {
865 // Print out all non-constant derived pointers that are unrelocated
866 // uses, which are invalid.
867 if (baseTyLHS
== BaseType::NonConstant
&& !AvailableSet
.count(LHS
))
868 reportInvalidUse(*LHS
, I
);
869 if (baseTyRHS
== BaseType::NonConstant
&& !AvailableSet
.count(RHS
))
870 reportInvalidUse(*RHS
, I
);
873 for (const Value
*V
: I
.operands())
874 if (containsGCPtrType(V
->getType()) &&
875 isNotExclusivelyConstantDerived(V
) && !AvailableSet
.count(V
))
876 reportInvalidUse(*V
, I
);
880 void InstructionVerifier::reportInvalidUse(const Value
&V
,
881 const Instruction
&I
) {
882 errs() << "Illegal use of unrelocated value found!\n";
883 errs() << "Def: " << V
<< "\n";
884 errs() << "Use: " << I
<< "\n";
887 AnyInvalidUses
= true;
890 static void Verify(const Function
&F
, const DominatorTree
&DT
,
891 const CFGDeadness
&CD
) {
892 LLVM_DEBUG(dbgs() << "Verifying gc pointers in function: " << F
.getName()
895 dbgs() << "Verifying gc pointers in function: " << F
.getName() << "\n";
897 GCPtrTracker
Tracker(F
, DT
, CD
);
899 // We now have all the information we need to decide if the use of a heap
900 // reference is legal or not, given our safepoint semantics.
902 InstructionVerifier Verifier
;
903 GCPtrTracker::verifyFunction(std::move(Tracker
), Verifier
);
905 if (PrintOnly
&& !Verifier
.hasAnyInvalidUses()) {
906 dbgs() << "No illegal uses found by SafepointIRVerifier in: " << F
.getName()