1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file defines a generic engine for intraprocedural, path-sensitive,
10 // dataflow analysis via graph reachability engine.
12 //===----------------------------------------------------------------------===//
14 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
15 #include "clang/AST/Expr.h"
16 #include "clang/AST/ExprCXX.h"
17 #include "clang/AST/Stmt.h"
18 #include "clang/AST/StmtCXX.h"
19 #include "clang/Analysis/AnalysisDeclContext.h"
20 #include "clang/Analysis/CFG.h"
21 #include "clang/Analysis/ProgramPoint.h"
22 #include "clang/Basic/LLVM.h"
23 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
24 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h"
25 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
26 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
27 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h"
28 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h"
29 #include "llvm/ADT/STLExtras.h"
30 #include "llvm/ADT/Statistic.h"
31 #include "llvm/Support/Casting.h"
32 #include "llvm/Support/ErrorHandling.h"
39 using namespace clang
;
42 #define DEBUG_TYPE "CoreEngine"
45 "The # of steps executed.");
46 STATISTIC(NumSTUSteps
, "The # of STU steps executed.");
47 STATISTIC(NumCTUSteps
, "The # of CTU steps executed.");
48 STATISTIC(NumReachedMaxSteps
,
49 "The # of times we reached the max number of steps.");
50 STATISTIC(NumPathsExplored
,
51 "The # of paths explored by the analyzer.");
53 //===----------------------------------------------------------------------===//
54 // Core analysis engine.
55 //===----------------------------------------------------------------------===//
57 static std::unique_ptr
<WorkList
> generateWorkList(AnalyzerOptions
&Opts
) {
58 switch (Opts
.getExplorationStrategy()) {
59 case ExplorationStrategyKind::DFS
:
60 return WorkList::makeDFS();
61 case ExplorationStrategyKind::BFS
:
62 return WorkList::makeBFS();
63 case ExplorationStrategyKind::BFSBlockDFSContents
:
64 return WorkList::makeBFSBlockDFSContents();
65 case ExplorationStrategyKind::UnexploredFirst
:
66 return WorkList::makeUnexploredFirst();
67 case ExplorationStrategyKind::UnexploredFirstQueue
:
68 return WorkList::makeUnexploredFirstPriorityQueue();
69 case ExplorationStrategyKind::UnexploredFirstLocationQueue
:
70 return WorkList::makeUnexploredFirstPriorityLocationQueue();
72 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind");
75 CoreEngine::CoreEngine(ExprEngine
&exprengine
, FunctionSummariesTy
*FS
,
76 AnalyzerOptions
&Opts
)
77 : ExprEng(exprengine
), WList(generateWorkList(Opts
)),
78 CTUWList(Opts
.IsNaiveCTUEnabled
? generateWorkList(Opts
) : nullptr),
79 BCounterFactory(G
.getAllocator()), FunctionSummaries(FS
) {}
81 void CoreEngine::setBlockCounter(BlockCounter C
) {
82 WList
->setBlockCounter(C
);
84 CTUWList
->setBlockCounter(C
);
87 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
88 bool CoreEngine::ExecuteWorkList(const LocationContext
*L
, unsigned MaxSteps
,
89 ProgramStateRef InitState
) {
90 if (G
.num_roots() == 0) { // Initialize the analysis by constructing
91 // the root if none exists.
93 const CFGBlock
*Entry
= &(L
->getCFG()->getEntry());
95 assert(Entry
->empty() && "Entry block must be empty.");
97 assert(Entry
->succ_size() == 1 && "Entry block must have 1 successor.");
99 // Mark the entry block as visited.
100 FunctionSummaries
->markVisitedBasicBlock(Entry
->getBlockID(),
102 L
->getCFG()->getNumBlockIDs());
104 // Get the solitary successor.
105 const CFGBlock
*Succ
= *(Entry
->succ_begin());
107 // Construct an edge representing the
108 // starting location in the function.
109 BlockEdge
StartLoc(Entry
, Succ
, L
);
111 // Set the current block counter to being empty.
112 setBlockCounter(BCounterFactory
.GetEmptyCounter());
115 InitState
= ExprEng
.getInitialState(L
);
118 ExplodedNode
*Node
= G
.getNode(StartLoc
, InitState
, false, &IsNew
);
122 NodeBuilderContext
BuilderCtx(*this, StartLoc
.getDst(), Node
);
123 ExplodedNodeSet DstBegin
;
124 ExprEng
.processBeginOfFunction(BuilderCtx
, Node
, DstBegin
, StartLoc
);
129 // Check if we have a steps limit
130 bool UnlimitedSteps
= MaxSteps
== 0;
132 // Cap our pre-reservation in the event that the user specifies
133 // a very large number of maximum steps.
134 const unsigned PreReservationCap
= 4000000;
136 G
.reserve(std::min(MaxSteps
, PreReservationCap
));
138 auto ProcessWList
= [this, UnlimitedSteps
](unsigned MaxSteps
) {
139 unsigned Steps
= MaxSteps
;
140 while (WList
->hasWork()) {
141 if (!UnlimitedSteps
) {
143 NumReachedMaxSteps
++;
151 const WorkListUnit
&WU
= WList
->dequeue();
153 // Set the current block counter.
154 setBlockCounter(WU
.getBlockCounter());
156 // Retrieve the node.
157 ExplodedNode
*Node
= WU
.getNode();
159 dispatchWorkItem(Node
, Node
->getLocation(), WU
);
161 return MaxSteps
- Steps
;
163 const unsigned STUSteps
= ProcessWList(MaxSteps
);
166 NumSTUSteps
+= STUSteps
;
167 const unsigned MinCTUSteps
=
168 this->ExprEng
.getAnalysisManager().options
.CTUMaxNodesMin
;
170 this->ExprEng
.getAnalysisManager().options
.CTUMaxNodesPercentage
;
171 unsigned MaxCTUSteps
= std::max(STUSteps
* Pct
/ 100, MinCTUSteps
);
173 WList
= std::move(CTUWList
);
174 const unsigned CTUSteps
= ProcessWList(MaxCTUSteps
);
175 NumCTUSteps
+= CTUSteps
;
178 ExprEng
.processEndWorklist();
179 return WList
->hasWork();
182 void CoreEngine::dispatchWorkItem(ExplodedNode
* Pred
, ProgramPoint Loc
,
183 const WorkListUnit
& WU
) {
184 // Dispatch on the location type.
185 switch (Loc
.getKind()) {
186 case ProgramPoint::BlockEdgeKind
:
187 HandleBlockEdge(Loc
.castAs
<BlockEdge
>(), Pred
);
190 case ProgramPoint::BlockEntranceKind
:
191 HandleBlockEntrance(Loc
.castAs
<BlockEntrance
>(), Pred
);
194 case ProgramPoint::BlockExitKind
:
195 assert(false && "BlockExit location never occur in forward analysis.");
198 case ProgramPoint::CallEnterKind
:
199 HandleCallEnter(Loc
.castAs
<CallEnter
>(), Pred
);
202 case ProgramPoint::CallExitBeginKind
:
203 ExprEng
.processCallExit(Pred
);
206 case ProgramPoint::EpsilonKind
: {
207 assert(Pred
->hasSinglePred() &&
208 "Assume epsilon has exactly one predecessor by construction");
209 ExplodedNode
*PNode
= Pred
->getFirstPred();
210 dispatchWorkItem(Pred
, PNode
->getLocation(), WU
);
214 assert(Loc
.getAs
<PostStmt
>() ||
215 Loc
.getAs
<PostInitializer
>() ||
216 Loc
.getAs
<PostImplicitCall
>() ||
217 Loc
.getAs
<CallExitEnd
>() ||
218 Loc
.getAs
<LoopExit
>() ||
219 Loc
.getAs
<PostAllocatorCall
>());
220 HandlePostStmt(WU
.getBlock(), WU
.getIndex(), Pred
);
225 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext
*L
,
227 ProgramStateRef InitState
,
228 ExplodedNodeSet
&Dst
) {
229 bool DidNotFinish
= ExecuteWorkList(L
, Steps
, InitState
);
230 for (ExplodedGraph::eop_iterator I
= G
.eop_begin(), E
= G
.eop_end(); I
!= E
;
237 void CoreEngine::HandleBlockEdge(const BlockEdge
&L
, ExplodedNode
*Pred
) {
238 const CFGBlock
*Blk
= L
.getDst();
239 NodeBuilderContext
BuilderCtx(*this, Blk
, Pred
);
241 // Mark this block as visited.
242 const LocationContext
*LC
= Pred
->getLocationContext();
243 FunctionSummaries
->markVisitedBasicBlock(Blk
->getBlockID(),
245 LC
->getCFG()->getNumBlockIDs());
247 // Display a prunable path note to the user if it's a virtual bases branch
248 // and we're taking the path that skips virtual base constructors.
249 if (L
.getSrc()->getTerminator().isVirtualBaseBranch() &&
250 L
.getDst() == *L
.getSrc()->succ_begin()) {
251 ProgramPoint P
= L
.withTag(getDataTags().make
<NoteTag
>(
252 [](BugReporterContext
&, PathSensitiveBugReport
&) -> std::string
{
253 // TODO: Just call out the name of the most derived class
255 return "Virtual base initialization skipped because "
256 "it has already been handled by the most derived class";
258 /*IsPrunable=*/true));
259 // Perform the transition.
261 NodeBuilder
Bldr(Pred
, Dst
, BuilderCtx
);
262 Pred
= Bldr
.generateNode(P
, Pred
->getState(), Pred
);
267 // Check if we are entering the EXIT block.
268 if (Blk
== &(L
.getLocationContext()->getCFG()->getExit())) {
269 assert(L
.getLocationContext()->getCFG()->getExit().empty() &&
270 "EXIT block cannot contain Stmts.");
272 // Get return statement..
273 const ReturnStmt
*RS
= nullptr;
274 if (!L
.getSrc()->empty()) {
275 CFGElement LastElement
= L
.getSrc()->back();
276 if (std::optional
<CFGStmt
> LastStmt
= LastElement
.getAs
<CFGStmt
>()) {
277 RS
= dyn_cast
<ReturnStmt
>(LastStmt
->getStmt());
278 } else if (std::optional
<CFGAutomaticObjDtor
> AutoDtor
=
279 LastElement
.getAs
<CFGAutomaticObjDtor
>()) {
280 RS
= dyn_cast
<ReturnStmt
>(AutoDtor
->getTriggerStmt());
284 // Process the final state transition.
285 ExprEng
.processEndOfFunction(BuilderCtx
, Pred
, RS
);
287 // This path is done. Don't enqueue any more nodes.
291 // Call into the ExprEngine to process entering the CFGBlock.
292 ExplodedNodeSet dstNodes
;
293 BlockEntrance
BE(Blk
, Pred
->getLocationContext());
294 NodeBuilderWithSinks
nodeBuilder(Pred
, dstNodes
, BuilderCtx
, BE
);
295 ExprEng
.processCFGBlockEntrance(L
, nodeBuilder
, Pred
);
297 // Auto-generate a node.
298 if (!nodeBuilder
.hasGeneratedNodes()) {
299 nodeBuilder
.generateNode(Pred
->State
, Pred
);
302 // Enqueue nodes onto the worklist.
306 void CoreEngine::HandleBlockEntrance(const BlockEntrance
&L
,
307 ExplodedNode
*Pred
) {
308 // Increment the block counter.
309 const LocationContext
*LC
= Pred
->getLocationContext();
310 unsigned BlockId
= L
.getBlock()->getBlockID();
311 BlockCounter Counter
= WList
->getBlockCounter();
312 Counter
= BCounterFactory
.IncrementCount(Counter
, LC
->getStackFrame(),
314 setBlockCounter(Counter
);
316 // Process the entrance of the block.
317 if (std::optional
<CFGElement
> E
= L
.getFirstElement()) {
318 NodeBuilderContext
Ctx(*this, L
.getBlock(), Pred
);
319 ExprEng
.processCFGElement(*E
, Pred
, 0, &Ctx
);
321 HandleBlockExit(L
.getBlock(), Pred
);
324 void CoreEngine::HandleBlockExit(const CFGBlock
* B
, ExplodedNode
*Pred
) {
325 if (const Stmt
*Term
= B
->getTerminatorStmt()) {
326 switch (Term
->getStmtClass()) {
328 llvm_unreachable("Analysis for this terminator not implemented.");
330 case Stmt::CXXBindTemporaryExprClass
:
331 HandleCleanupTemporaryBranch(
332 cast
<CXXBindTemporaryExpr
>(Term
), B
, Pred
);
335 // Model static initializers.
336 case Stmt::DeclStmtClass
:
337 HandleStaticInit(cast
<DeclStmt
>(Term
), B
, Pred
);
340 case Stmt::BinaryOperatorClass
: // '&&' and '||'
341 HandleBranch(cast
<BinaryOperator
>(Term
)->getLHS(), Term
, B
, Pred
);
344 case Stmt::BinaryConditionalOperatorClass
:
345 case Stmt::ConditionalOperatorClass
:
346 HandleBranch(cast
<AbstractConditionalOperator
>(Term
)->getCond(),
350 // FIXME: Use constant-folding in CFG construction to simplify this
353 case Stmt::ChooseExprClass
:
354 HandleBranch(cast
<ChooseExpr
>(Term
)->getCond(), Term
, B
, Pred
);
357 case Stmt::CXXTryStmtClass
:
358 // Generate a node for each of the successors.
359 // Our logic for EH analysis can certainly be improved.
360 for (CFGBlock::const_succ_iterator it
= B
->succ_begin(),
361 et
= B
->succ_end(); it
!= et
; ++it
) {
362 if (const CFGBlock
*succ
= *it
) {
363 generateNode(BlockEdge(B
, succ
, Pred
->getLocationContext()),
369 case Stmt::DoStmtClass
:
370 HandleBranch(cast
<DoStmt
>(Term
)->getCond(), Term
, B
, Pred
);
373 case Stmt::CXXForRangeStmtClass
:
374 HandleBranch(cast
<CXXForRangeStmt
>(Term
)->getCond(), Term
, B
, Pred
);
377 case Stmt::ForStmtClass
:
378 HandleBranch(cast
<ForStmt
>(Term
)->getCond(), Term
, B
, Pred
);
381 case Stmt::SEHLeaveStmtClass
:
382 case Stmt::ContinueStmtClass
:
383 case Stmt::BreakStmtClass
:
384 case Stmt::GotoStmtClass
:
387 case Stmt::IfStmtClass
:
388 HandleBranch(cast
<IfStmt
>(Term
)->getCond(), Term
, B
, Pred
);
391 case Stmt::IndirectGotoStmtClass
: {
392 // Only 1 successor: the indirect goto dispatch block.
393 assert(B
->succ_size() == 1);
395 IndirectGotoNodeBuilder
396 builder(Pred
, B
, cast
<IndirectGotoStmt
>(Term
)->getTarget(),
397 *(B
->succ_begin()), this);
399 ExprEng
.processIndirectGoto(builder
);
403 case Stmt::ObjCForCollectionStmtClass
:
404 // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
406 // (1) inside a basic block, which represents the binding of the
407 // 'element' variable to a value.
408 // (2) in a terminator, which represents the branch.
410 // For (1), ExprEngine will bind a value (i.e., 0 or 1) indicating
411 // whether or not collection contains any more elements. We cannot
412 // just test to see if the element is nil because a container can
413 // contain nil elements.
414 HandleBranch(Term
, Term
, B
, Pred
);
417 case Stmt::SwitchStmtClass
: {
418 SwitchNodeBuilder
builder(Pred
, B
, cast
<SwitchStmt
>(Term
)->getCond(),
421 ExprEng
.processSwitch(builder
);
425 case Stmt::WhileStmtClass
:
426 HandleBranch(cast
<WhileStmt
>(Term
)->getCond(), Term
, B
, Pred
);
429 case Stmt::GCCAsmStmtClass
:
430 assert(cast
<GCCAsmStmt
>(Term
)->isAsmGoto() && "Encountered GCCAsmStmt without labels");
431 // TODO: Handle jumping to labels
436 if (B
->getTerminator().isVirtualBaseBranch()) {
437 HandleVirtualBaseBranch(B
, Pred
);
441 assert(B
->succ_size() == 1 &&
442 "Blocks with no terminator should have at most 1 successor.");
444 generateNode(BlockEdge(B
, *(B
->succ_begin()), Pred
->getLocationContext()),
448 void CoreEngine::HandleCallEnter(const CallEnter
&CE
, ExplodedNode
*Pred
) {
449 NodeBuilderContext
BuilderCtx(*this, CE
.getEntry(), Pred
);
450 ExprEng
.processCallEnter(BuilderCtx
, CE
, Pred
);
453 void CoreEngine::HandleBranch(const Stmt
*Cond
, const Stmt
*Term
,
454 const CFGBlock
* B
, ExplodedNode
*Pred
) {
455 assert(B
->succ_size() == 2);
456 NodeBuilderContext
Ctx(*this, B
, Pred
);
458 ExprEng
.processBranch(Cond
, Ctx
, Pred
, Dst
, *(B
->succ_begin()),
459 *(B
->succ_begin() + 1));
460 // Enqueue the new frontier onto the worklist.
464 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr
*BTE
,
466 ExplodedNode
*Pred
) {
467 assert(B
->succ_size() == 2);
468 NodeBuilderContext
Ctx(*this, B
, Pred
);
470 ExprEng
.processCleanupTemporaryBranch(BTE
, Ctx
, Pred
, Dst
, *(B
->succ_begin()),
471 *(B
->succ_begin() + 1));
472 // Enqueue the new frontier onto the worklist.
476 void CoreEngine::HandleStaticInit(const DeclStmt
*DS
, const CFGBlock
*B
,
477 ExplodedNode
*Pred
) {
478 assert(B
->succ_size() == 2);
479 NodeBuilderContext
Ctx(*this, B
, Pred
);
481 ExprEng
.processStaticInitializer(DS
, Ctx
, Pred
, Dst
,
482 *(B
->succ_begin()), *(B
->succ_begin()+1));
483 // Enqueue the new frontier onto the worklist.
487 void CoreEngine::HandlePostStmt(const CFGBlock
*B
, unsigned StmtIdx
,
488 ExplodedNode
*Pred
) {
492 if (StmtIdx
== B
->size())
493 HandleBlockExit(B
, Pred
);
495 NodeBuilderContext
Ctx(*this, B
, Pred
);
496 ExprEng
.processCFGElement((*B
)[StmtIdx
], Pred
, StmtIdx
, &Ctx
);
500 void CoreEngine::HandleVirtualBaseBranch(const CFGBlock
*B
,
501 ExplodedNode
*Pred
) {
502 const LocationContext
*LCtx
= Pred
->getLocationContext();
503 if (const auto *CallerCtor
= dyn_cast_or_null
<CXXConstructExpr
>(
504 LCtx
->getStackFrame()->getCallSite())) {
505 switch (CallerCtor
->getConstructionKind()) {
506 case CXXConstructExpr::CK_NonVirtualBase
:
507 case CXXConstructExpr::CK_VirtualBase
: {
508 BlockEdge
Loc(B
, *B
->succ_begin(), LCtx
);
509 HandleBlockEdge(Loc
, Pred
);
517 // We either don't see a parent stack frame because we're in the top frame,
518 // or the parent stack frame doesn't initialize our virtual bases.
519 BlockEdge
Loc(B
, *(B
->succ_begin() + 1), LCtx
);
520 HandleBlockEdge(Loc
, Pred
);
523 /// generateNode - Utility method to generate nodes, hook up successors,
524 /// and add nodes to the worklist.
525 void CoreEngine::generateNode(const ProgramPoint
&Loc
,
526 ProgramStateRef State
,
527 ExplodedNode
*Pred
) {
529 ExplodedNode
*Node
= G
.getNode(Loc
, State
, false, &IsNew
);
532 Node
->addPredecessor(Pred
, G
); // Link 'Node' with its predecessor.
535 G
.addRoot(Node
); // 'Node' has no predecessor. Make it a root.
538 // Only add 'Node' to the worklist if it was freshly generated.
539 if (IsNew
) WList
->enqueue(Node
);
542 void CoreEngine::enqueueStmtNode(ExplodedNode
*N
,
543 const CFGBlock
*Block
, unsigned Idx
) {
545 assert(!N
->isSink());
547 // Check if this node entered a callee.
548 if (N
->getLocation().getAs
<CallEnter
>()) {
549 // Still use the index of the CallExpr. It's needed to create the callee
550 // StackFrameContext.
551 WList
->enqueue(N
, Block
, Idx
);
555 // Do not create extra nodes. Move to the next CFG element.
556 if (N
->getLocation().getAs
<PostInitializer
>() ||
557 N
->getLocation().getAs
<PostImplicitCall
>()||
558 N
->getLocation().getAs
<LoopExit
>()) {
559 WList
->enqueue(N
, Block
, Idx
+1);
563 if (N
->getLocation().getAs
<EpsilonPoint
>()) {
564 WList
->enqueue(N
, Block
, Idx
);
568 if ((*Block
)[Idx
].getKind() == CFGElement::NewAllocator
) {
569 WList
->enqueue(N
, Block
, Idx
+1);
573 // At this point, we know we're processing a normal statement.
574 CFGStmt CS
= (*Block
)[Idx
].castAs
<CFGStmt
>();
575 PostStmt
Loc(CS
.getStmt(), N
->getLocationContext());
577 if (Loc
== N
->getLocation().withTag(nullptr)) {
578 // Note: 'N' should be a fresh node because otherwise it shouldn't be
579 // a member of Deferred.
580 WList
->enqueue(N
, Block
, Idx
+1);
585 ExplodedNode
*Succ
= G
.getNode(Loc
, N
->getState(), false, &IsNew
);
586 Succ
->addPredecessor(N
, G
);
589 WList
->enqueue(Succ
, Block
, Idx
+1);
592 ExplodedNode
*CoreEngine::generateCallExitBeginNode(ExplodedNode
*N
,
593 const ReturnStmt
*RS
) {
594 // Create a CallExitBegin node and enqueue it.
595 const auto *LocCtx
= cast
<StackFrameContext
>(N
->getLocationContext());
597 // Use the callee location context.
598 CallExitBegin
Loc(LocCtx
, RS
);
601 ExplodedNode
*Node
= G
.getNode(Loc
, N
->getState(), false, &isNew
);
602 Node
->addPredecessor(N
, G
);
603 return isNew
? Node
: nullptr;
606 void CoreEngine::enqueue(ExplodedNodeSet
&Set
) {
607 for (const auto I
: Set
)
611 void CoreEngine::enqueue(ExplodedNodeSet
&Set
,
612 const CFGBlock
*Block
, unsigned Idx
) {
613 for (const auto I
: Set
)
614 enqueueStmtNode(I
, Block
, Idx
);
617 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet
&Set
, const ReturnStmt
*RS
) {
618 for (auto *I
: Set
) {
619 // If we are in an inlined call, generate CallExitBegin node.
620 if (I
->getLocationContext()->getParent()) {
621 I
= generateCallExitBeginNode(I
, RS
);
625 // TODO: We should run remove dead bindings here.
632 void NodeBuilder::anchor() {}
634 ExplodedNode
* NodeBuilder::generateNodeImpl(const ProgramPoint
&Loc
,
635 ProgramStateRef State
,
638 HasGeneratedNodes
= true;
640 ExplodedNode
*N
= C
.Eng
.G
.getNode(Loc
, State
, MarkAsSink
, &IsNew
);
641 N
->addPredecessor(FromN
, C
.Eng
.G
);
642 Frontier
.erase(FromN
);
653 void NodeBuilderWithSinks::anchor() {}
655 StmtNodeBuilder::~StmtNodeBuilder() {
657 for (const auto I
: Frontier
)
658 EnclosingBldr
->addNodes(I
);
661 void BranchNodeBuilder::anchor() {}
663 ExplodedNode
*BranchNodeBuilder::generateNode(ProgramStateRef State
,
665 ExplodedNode
*NodePred
) {
666 // If the branch has been marked infeasible we should not generate a node.
667 if (!isFeasible(branch
))
670 ProgramPoint Loc
= BlockEdge(C
.Block
, branch
? DstT
:DstF
,
671 NodePred
->getLocationContext());
672 ExplodedNode
*Succ
= generateNodeImpl(Loc
, State
, NodePred
);
677 IndirectGotoNodeBuilder::generateNode(const iterator
&I
,
682 Eng
.G
.getNode(BlockEdge(Src
, I
.getBlock(), Pred
->getLocationContext()),
684 Succ
->addPredecessor(Pred
, Eng
.G
);
690 Eng
.WList
->enqueue(Succ
);
696 SwitchNodeBuilder::generateCaseStmtNode(const iterator
&I
,
697 ProgramStateRef St
) {
700 Eng
.G
.getNode(BlockEdge(Src
, I
.getBlock(), Pred
->getLocationContext()),
702 Succ
->addPredecessor(Pred
, Eng
.G
);
706 Eng
.WList
->enqueue(Succ
);
711 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St
,
713 // Get the block for the default case.
714 assert(Src
->succ_rbegin() != Src
->succ_rend());
715 CFGBlock
*DefaultBlock
= *Src
->succ_rbegin();
717 // Basic correctness check for default blocks that are unreachable and not
718 // caught by earlier stages.
724 Eng
.G
.getNode(BlockEdge(Src
, DefaultBlock
, Pred
->getLocationContext()),
726 Succ
->addPredecessor(Pred
, Eng
.G
);
732 Eng
.WList
->enqueue(Succ
);