Revert "[libc] Breakup freelist_malloc into separate files" (#119749)
[llvm-project.git] / clang / lib / StaticAnalyzer / Core / CoreEngine.cpp
blob67b7d30853d9de31580433ab107c0c6793331ea5
1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines a generic engine for intraprocedural, path-sensitive,
10 // dataflow analysis via graph reachability engine.
12 //===----------------------------------------------------------------------===//
14 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
15 #include "clang/AST/Expr.h"
16 #include "clang/AST/ExprCXX.h"
17 #include "clang/AST/Stmt.h"
18 #include "clang/AST/StmtCXX.h"
19 #include "clang/Analysis/AnalysisDeclContext.h"
20 #include "clang/Analysis/CFG.h"
21 #include "clang/Analysis/ProgramPoint.h"
22 #include "clang/Basic/LLVM.h"
23 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
24 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h"
25 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
26 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
27 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h"
28 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h"
29 #include "llvm/ADT/STLExtras.h"
30 #include "llvm/ADT/Statistic.h"
31 #include "llvm/Support/Casting.h"
32 #include "llvm/Support/ErrorHandling.h"
33 #include <algorithm>
34 #include <cassert>
35 #include <memory>
36 #include <optional>
37 #include <utility>
39 using namespace clang;
40 using namespace ento;
42 #define DEBUG_TYPE "CoreEngine"
44 STATISTIC(NumSteps,
45 "The # of steps executed.");
46 STATISTIC(NumSTUSteps, "The # of STU steps executed.");
47 STATISTIC(NumCTUSteps, "The # of CTU steps executed.");
48 STATISTIC(NumReachedMaxSteps,
49 "The # of times we reached the max number of steps.");
50 STATISTIC(NumPathsExplored,
51 "The # of paths explored by the analyzer.");
53 //===----------------------------------------------------------------------===//
54 // Core analysis engine.
55 //===----------------------------------------------------------------------===//
57 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) {
58 switch (Opts.getExplorationStrategy()) {
59 case ExplorationStrategyKind::DFS:
60 return WorkList::makeDFS();
61 case ExplorationStrategyKind::BFS:
62 return WorkList::makeBFS();
63 case ExplorationStrategyKind::BFSBlockDFSContents:
64 return WorkList::makeBFSBlockDFSContents();
65 case ExplorationStrategyKind::UnexploredFirst:
66 return WorkList::makeUnexploredFirst();
67 case ExplorationStrategyKind::UnexploredFirstQueue:
68 return WorkList::makeUnexploredFirstPriorityQueue();
69 case ExplorationStrategyKind::UnexploredFirstLocationQueue:
70 return WorkList::makeUnexploredFirstPriorityLocationQueue();
72 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind");
75 CoreEngine::CoreEngine(ExprEngine &exprengine, FunctionSummariesTy *FS,
76 AnalyzerOptions &Opts)
77 : ExprEng(exprengine), WList(generateWorkList(Opts)),
78 CTUWList(Opts.IsNaiveCTUEnabled ? generateWorkList(Opts) : nullptr),
79 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {}
81 void CoreEngine::setBlockCounter(BlockCounter C) {
82 WList->setBlockCounter(C);
83 if (CTUWList)
84 CTUWList->setBlockCounter(C);
87 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
88 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned MaxSteps,
89 ProgramStateRef InitState) {
90 if (G.num_roots() == 0) { // Initialize the analysis by constructing
91 // the root if none exists.
93 const CFGBlock *Entry = &(L->getCFG()->getEntry());
95 assert(Entry->empty() && "Entry block must be empty.");
97 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor.");
99 // Mark the entry block as visited.
100 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(),
101 L->getDecl(),
102 L->getCFG()->getNumBlockIDs());
104 // Get the solitary successor.
105 const CFGBlock *Succ = *(Entry->succ_begin());
107 // Construct an edge representing the
108 // starting location in the function.
109 BlockEdge StartLoc(Entry, Succ, L);
111 // Set the current block counter to being empty.
112 setBlockCounter(BCounterFactory.GetEmptyCounter());
114 if (!InitState)
115 InitState = ExprEng.getInitialState(L);
117 bool IsNew;
118 ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew);
119 assert(IsNew);
120 G.addRoot(Node);
122 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node);
123 ExplodedNodeSet DstBegin;
124 ExprEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc);
126 enqueue(DstBegin);
129 // Check if we have a steps limit
130 bool UnlimitedSteps = MaxSteps == 0;
132 // Cap our pre-reservation in the event that the user specifies
133 // a very large number of maximum steps.
134 const unsigned PreReservationCap = 4000000;
135 if(!UnlimitedSteps)
136 G.reserve(std::min(MaxSteps, PreReservationCap));
138 auto ProcessWList = [this, UnlimitedSteps](unsigned MaxSteps) {
139 unsigned Steps = MaxSteps;
140 while (WList->hasWork()) {
141 if (!UnlimitedSteps) {
142 if (Steps == 0) {
143 NumReachedMaxSteps++;
144 break;
146 --Steps;
149 NumSteps++;
151 const WorkListUnit &WU = WList->dequeue();
153 // Set the current block counter.
154 setBlockCounter(WU.getBlockCounter());
156 // Retrieve the node.
157 ExplodedNode *Node = WU.getNode();
159 dispatchWorkItem(Node, Node->getLocation(), WU);
161 return MaxSteps - Steps;
163 const unsigned STUSteps = ProcessWList(MaxSteps);
165 if (CTUWList) {
166 NumSTUSteps += STUSteps;
167 const unsigned MinCTUSteps =
168 this->ExprEng.getAnalysisManager().options.CTUMaxNodesMin;
169 const unsigned Pct =
170 this->ExprEng.getAnalysisManager().options.CTUMaxNodesPercentage;
171 unsigned MaxCTUSteps = std::max(STUSteps * Pct / 100, MinCTUSteps);
173 WList = std::move(CTUWList);
174 const unsigned CTUSteps = ProcessWList(MaxCTUSteps);
175 NumCTUSteps += CTUSteps;
178 ExprEng.processEndWorklist();
179 return WList->hasWork();
182 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
183 const WorkListUnit& WU) {
184 // Dispatch on the location type.
185 switch (Loc.getKind()) {
186 case ProgramPoint::BlockEdgeKind:
187 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred);
188 break;
190 case ProgramPoint::BlockEntranceKind:
191 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred);
192 break;
194 case ProgramPoint::BlockExitKind:
195 assert(false && "BlockExit location never occur in forward analysis.");
196 break;
198 case ProgramPoint::CallEnterKind:
199 HandleCallEnter(Loc.castAs<CallEnter>(), Pred);
200 break;
202 case ProgramPoint::CallExitBeginKind:
203 ExprEng.processCallExit(Pred);
204 break;
206 case ProgramPoint::EpsilonKind: {
207 assert(Pred->hasSinglePred() &&
208 "Assume epsilon has exactly one predecessor by construction");
209 ExplodedNode *PNode = Pred->getFirstPred();
210 dispatchWorkItem(Pred, PNode->getLocation(), WU);
211 break;
213 default:
214 assert(Loc.getAs<PostStmt>() ||
215 Loc.getAs<PostInitializer>() ||
216 Loc.getAs<PostImplicitCall>() ||
217 Loc.getAs<CallExitEnd>() ||
218 Loc.getAs<LoopExit>() ||
219 Loc.getAs<PostAllocatorCall>());
220 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred);
221 break;
225 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
226 const CFGBlock *Blk = L.getDst();
227 NodeBuilderContext BuilderCtx(*this, Blk, Pred);
229 // Mark this block as visited.
230 const LocationContext *LC = Pred->getLocationContext();
231 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(),
232 LC->getDecl(),
233 LC->getCFG()->getNumBlockIDs());
235 // Display a prunable path note to the user if it's a virtual bases branch
236 // and we're taking the path that skips virtual base constructors.
237 if (L.getSrc()->getTerminator().isVirtualBaseBranch() &&
238 L.getDst() == *L.getSrc()->succ_begin()) {
239 ProgramPoint P = L.withTag(getDataTags().make<NoteTag>(
240 [](BugReporterContext &, PathSensitiveBugReport &) -> std::string {
241 // TODO: Just call out the name of the most derived class
242 // when we know it.
243 return "Virtual base initialization skipped because "
244 "it has already been handled by the most derived class";
246 /*IsPrunable=*/true));
247 // Perform the transition.
248 ExplodedNodeSet Dst;
249 NodeBuilder Bldr(Pred, Dst, BuilderCtx);
250 Pred = Bldr.generateNode(P, Pred->getState(), Pred);
251 if (!Pred)
252 return;
255 // Check if we are entering the EXIT block.
256 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
257 assert(L.getLocationContext()->getCFG()->getExit().empty() &&
258 "EXIT block cannot contain Stmts.");
260 // Get return statement..
261 const ReturnStmt *RS = nullptr;
262 if (!L.getSrc()->empty()) {
263 CFGElement LastElement = L.getSrc()->back();
264 if (std::optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) {
265 RS = dyn_cast<ReturnStmt>(LastStmt->getStmt());
266 } else if (std::optional<CFGAutomaticObjDtor> AutoDtor =
267 LastElement.getAs<CFGAutomaticObjDtor>()) {
268 RS = dyn_cast<ReturnStmt>(AutoDtor->getTriggerStmt());
272 // Process the final state transition.
273 ExprEng.processEndOfFunction(BuilderCtx, Pred, RS);
275 // This path is done. Don't enqueue any more nodes.
276 return;
279 // Call into the ExprEngine to process entering the CFGBlock.
280 ExplodedNodeSet dstNodes;
281 BlockEntrance BE(Blk, Pred->getLocationContext());
282 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
283 ExprEng.processCFGBlockEntrance(L, nodeBuilder, Pred);
285 // Auto-generate a node.
286 if (!nodeBuilder.hasGeneratedNodes()) {
287 nodeBuilder.generateNode(Pred->State, Pred);
290 // Enqueue nodes onto the worklist.
291 enqueue(dstNodes);
294 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
295 ExplodedNode *Pred) {
296 // Increment the block counter.
297 const LocationContext *LC = Pred->getLocationContext();
298 unsigned BlockId = L.getBlock()->getBlockID();
299 BlockCounter Counter = WList->getBlockCounter();
300 Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(),
301 BlockId);
302 setBlockCounter(Counter);
304 // Process the entrance of the block.
305 if (std::optional<CFGElement> E = L.getFirstElement()) {
306 NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
307 ExprEng.processCFGElement(*E, Pred, 0, &Ctx);
308 } else
309 HandleBlockExit(L.getBlock(), Pred);
312 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
313 if (const Stmt *Term = B->getTerminatorStmt()) {
314 switch (Term->getStmtClass()) {
315 default:
316 llvm_unreachable("Analysis for this terminator not implemented.");
318 case Stmt::CXXBindTemporaryExprClass:
319 HandleCleanupTemporaryBranch(
320 cast<CXXBindTemporaryExpr>(Term), B, Pred);
321 return;
323 // Model static initializers.
324 case Stmt::DeclStmtClass:
325 HandleStaticInit(cast<DeclStmt>(Term), B, Pred);
326 return;
328 case Stmt::BinaryOperatorClass: // '&&' and '||'
329 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
330 return;
332 case Stmt::BinaryConditionalOperatorClass:
333 case Stmt::ConditionalOperatorClass:
334 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
335 Term, B, Pred);
336 return;
338 // FIXME: Use constant-folding in CFG construction to simplify this
339 // case.
341 case Stmt::ChooseExprClass:
342 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
343 return;
345 case Stmt::CXXTryStmtClass:
346 // Generate a node for each of the successors.
347 // Our logic for EH analysis can certainly be improved.
348 for (CFGBlock::const_succ_iterator it = B->succ_begin(),
349 et = B->succ_end(); it != et; ++it) {
350 if (const CFGBlock *succ = *it) {
351 generateNode(BlockEdge(B, succ, Pred->getLocationContext()),
352 Pred->State, Pred);
355 return;
357 case Stmt::DoStmtClass:
358 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
359 return;
361 case Stmt::CXXForRangeStmtClass:
362 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred);
363 return;
365 case Stmt::ForStmtClass:
366 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
367 return;
369 case Stmt::SEHLeaveStmtClass:
370 case Stmt::ContinueStmtClass:
371 case Stmt::BreakStmtClass:
372 case Stmt::GotoStmtClass:
373 break;
375 case Stmt::IfStmtClass:
376 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
377 return;
379 case Stmt::IndirectGotoStmtClass: {
380 // Only 1 successor: the indirect goto dispatch block.
381 assert(B->succ_size() == 1);
383 IndirectGotoNodeBuilder
384 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
385 *(B->succ_begin()), this);
387 ExprEng.processIndirectGoto(builder);
388 return;
391 case Stmt::ObjCForCollectionStmtClass:
392 // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
394 // (1) inside a basic block, which represents the binding of the
395 // 'element' variable to a value.
396 // (2) in a terminator, which represents the branch.
398 // For (1), ExprEngine will bind a value (i.e., 0 or 1) indicating
399 // whether or not collection contains any more elements. We cannot
400 // just test to see if the element is nil because a container can
401 // contain nil elements.
402 HandleBranch(Term, Term, B, Pred);
403 return;
405 case Stmt::SwitchStmtClass: {
406 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
407 this);
409 ExprEng.processSwitch(builder);
410 return;
413 case Stmt::WhileStmtClass:
414 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
415 return;
417 case Stmt::GCCAsmStmtClass:
418 assert(cast<GCCAsmStmt>(Term)->isAsmGoto() && "Encountered GCCAsmStmt without labels");
419 // TODO: Handle jumping to labels
420 return;
424 if (B->getTerminator().isVirtualBaseBranch()) {
425 HandleVirtualBaseBranch(B, Pred);
426 return;
429 assert(B->succ_size() == 1 &&
430 "Blocks with no terminator should have at most 1 successor.");
432 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
433 Pred->State, Pred);
436 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) {
437 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred);
438 ExprEng.processCallEnter(BuilderCtx, CE, Pred);
441 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
442 const CFGBlock * B, ExplodedNode *Pred) {
443 assert(B->succ_size() == 2);
444 NodeBuilderContext Ctx(*this, B, Pred);
445 ExplodedNodeSet Dst;
446 ExprEng.processBranch(Cond, Ctx, Pred, Dst, *(B->succ_begin()),
447 *(B->succ_begin() + 1));
448 // Enqueue the new frontier onto the worklist.
449 enqueue(Dst);
452 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
453 const CFGBlock *B,
454 ExplodedNode *Pred) {
455 assert(B->succ_size() == 2);
456 NodeBuilderContext Ctx(*this, B, Pred);
457 ExplodedNodeSet Dst;
458 ExprEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()),
459 *(B->succ_begin() + 1));
460 // Enqueue the new frontier onto the worklist.
461 enqueue(Dst);
464 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
465 ExplodedNode *Pred) {
466 assert(B->succ_size() == 2);
467 NodeBuilderContext Ctx(*this, B, Pred);
468 ExplodedNodeSet Dst;
469 ExprEng.processStaticInitializer(DS, Ctx, Pred, Dst,
470 *(B->succ_begin()), *(B->succ_begin()+1));
471 // Enqueue the new frontier onto the worklist.
472 enqueue(Dst);
475 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
476 ExplodedNode *Pred) {
477 assert(B);
478 assert(!B->empty());
480 if (StmtIdx == B->size())
481 HandleBlockExit(B, Pred);
482 else {
483 NodeBuilderContext Ctx(*this, B, Pred);
484 ExprEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx);
488 void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B,
489 ExplodedNode *Pred) {
490 const LocationContext *LCtx = Pred->getLocationContext();
491 if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>(
492 LCtx->getStackFrame()->getCallSite())) {
493 switch (CallerCtor->getConstructionKind()) {
494 case CXXConstructionKind::NonVirtualBase:
495 case CXXConstructionKind::VirtualBase: {
496 BlockEdge Loc(B, *B->succ_begin(), LCtx);
497 HandleBlockEdge(Loc, Pred);
498 return;
500 default:
501 break;
505 // We either don't see a parent stack frame because we're in the top frame,
506 // or the parent stack frame doesn't initialize our virtual bases.
507 BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx);
508 HandleBlockEdge(Loc, Pred);
511 /// generateNode - Utility method to generate nodes, hook up successors,
512 /// and add nodes to the worklist.
513 void CoreEngine::generateNode(const ProgramPoint &Loc,
514 ProgramStateRef State,
515 ExplodedNode *Pred) {
516 bool IsNew;
517 ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew);
519 if (Pred)
520 Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor.
521 else {
522 assert(IsNew);
523 G.addRoot(Node); // 'Node' has no predecessor. Make it a root.
526 // Only add 'Node' to the worklist if it was freshly generated.
527 if (IsNew) WList->enqueue(Node);
530 void CoreEngine::enqueueStmtNode(ExplodedNode *N,
531 const CFGBlock *Block, unsigned Idx) {
532 assert(Block);
533 assert(!N->isSink());
535 // Check if this node entered a callee.
536 if (N->getLocation().getAs<CallEnter>()) {
537 // Still use the index of the CallExpr. It's needed to create the callee
538 // StackFrameContext.
539 WList->enqueue(N, Block, Idx);
540 return;
543 // Do not create extra nodes. Move to the next CFG element.
544 if (N->getLocation().getAs<PostInitializer>() ||
545 N->getLocation().getAs<PostImplicitCall>()||
546 N->getLocation().getAs<LoopExit>()) {
547 WList->enqueue(N, Block, Idx+1);
548 return;
551 if (N->getLocation().getAs<EpsilonPoint>()) {
552 WList->enqueue(N, Block, Idx);
553 return;
556 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
557 WList->enqueue(N, Block, Idx+1);
558 return;
561 // At this point, we know we're processing a normal statement.
562 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
563 PostStmt Loc(CS.getStmt(), N->getLocationContext());
565 if (Loc == N->getLocation().withTag(nullptr)) {
566 // Note: 'N' should be a fresh node because otherwise it shouldn't be
567 // a member of Deferred.
568 WList->enqueue(N, Block, Idx+1);
569 return;
572 bool IsNew;
573 ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew);
574 Succ->addPredecessor(N, G);
576 if (IsNew)
577 WList->enqueue(Succ, Block, Idx+1);
580 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N,
581 const ReturnStmt *RS) {
582 // Create a CallExitBegin node and enqueue it.
583 const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext());
585 // Use the callee location context.
586 CallExitBegin Loc(LocCtx, RS);
588 bool isNew;
589 ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew);
590 Node->addPredecessor(N, G);
591 return isNew ? Node : nullptr;
594 void CoreEngine::enqueue(ExplodedNodeSet &Set) {
595 for (const auto I : Set)
596 WList->enqueue(I);
599 void CoreEngine::enqueue(ExplodedNodeSet &Set,
600 const CFGBlock *Block, unsigned Idx) {
601 for (const auto I : Set)
602 enqueueStmtNode(I, Block, Idx);
605 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) {
606 for (auto *I : Set) {
607 // If we are in an inlined call, generate CallExitBegin node.
608 if (I->getLocationContext()->getParent()) {
609 I = generateCallExitBeginNode(I, RS);
610 if (I)
611 WList->enqueue(I);
612 } else {
613 // TODO: We should run remove dead bindings here.
614 G.addEndOfPath(I);
615 NumPathsExplored++;
620 void NodeBuilder::anchor() {}
622 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
623 ProgramStateRef State,
624 ExplodedNode *FromN,
625 bool MarkAsSink) {
626 HasGeneratedNodes = true;
627 bool IsNew;
628 ExplodedNode *N = C.getEngine().G.getNode(Loc, State, MarkAsSink, &IsNew);
629 N->addPredecessor(FromN, C.getEngine().G);
630 Frontier.erase(FromN);
632 if (!IsNew)
633 return nullptr;
635 if (!MarkAsSink)
636 Frontier.Add(N);
638 return N;
641 void NodeBuilderWithSinks::anchor() {}
643 StmtNodeBuilder::~StmtNodeBuilder() {
644 if (EnclosingBldr)
645 for (const auto I : Frontier)
646 EnclosingBldr->addNodes(I);
649 void BranchNodeBuilder::anchor() {}
651 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
652 bool Branch,
653 ExplodedNode *NodePred) {
654 const CFGBlock *Dst = Branch ? DstT : DstF;
656 if (!Dst)
657 return nullptr;
659 ProgramPoint Loc =
660 BlockEdge(C.getBlock(), Dst, NodePred->getLocationContext());
661 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred);
662 return Succ;
665 ExplodedNode*
666 IndirectGotoNodeBuilder::generateNode(const iterator &I,
667 ProgramStateRef St,
668 bool IsSink) {
669 bool IsNew;
670 ExplodedNode *Succ =
671 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
672 St, IsSink, &IsNew);
673 Succ->addPredecessor(Pred, Eng.G);
675 if (!IsNew)
676 return nullptr;
678 if (!IsSink)
679 Eng.WList->enqueue(Succ);
681 return Succ;
684 ExplodedNode*
685 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
686 ProgramStateRef St) {
687 bool IsNew;
688 ExplodedNode *Succ =
689 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
690 St, false, &IsNew);
691 Succ->addPredecessor(Pred, Eng.G);
692 if (!IsNew)
693 return nullptr;
695 Eng.WList->enqueue(Succ);
696 return Succ;
699 ExplodedNode*
700 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
701 bool IsSink) {
702 // Get the block for the default case.
703 assert(Src->succ_rbegin() != Src->succ_rend());
704 CFGBlock *DefaultBlock = *Src->succ_rbegin();
706 // Basic correctness check for default blocks that are unreachable and not
707 // caught by earlier stages.
708 if (!DefaultBlock)
709 return nullptr;
711 bool IsNew;
712 ExplodedNode *Succ =
713 Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()),
714 St, IsSink, &IsNew);
715 Succ->addPredecessor(Pred, Eng.G);
717 if (!IsNew)
718 return nullptr;
720 if (!IsSink)
721 Eng.WList->enqueue(Succ);
723 return Succ;