[X86][TTI] Use TargetCostConstants Free/Basic values instead of hard coded 0/1 to...
[llvm-project.git] / clang / lib / Analysis / FlowSensitive / TypeErasedDataflowAnalysis.cpp
blob32b3886439495f838129e0033c897bd261610f59
1 //===- TypeErasedDataflowAnalysis.cpp -------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines type-erased base types and functions for building dataflow
10 // analyses that run over Control-Flow Graphs (CFGs).
12 //===----------------------------------------------------------------------===//
14 #include <optional>
15 #include <system_error>
16 #include <utility>
17 #include <vector>
19 #include "clang/AST/ASTDumper.h"
20 #include "clang/AST/DeclCXX.h"
21 #include "clang/AST/OperationKinds.h"
22 #include "clang/AST/StmtCXX.h"
23 #include "clang/AST/StmtVisitor.h"
24 #include "clang/Analysis/Analyses/PostOrderCFGView.h"
25 #include "clang/Analysis/CFG.h"
26 #include "clang/Analysis/FlowSensitive/DataflowEnvironment.h"
27 #include "clang/Analysis/FlowSensitive/DataflowLattice.h"
28 #include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
29 #include "clang/Analysis/FlowSensitive/RecordOps.h"
30 #include "clang/Analysis/FlowSensitive/Transfer.h"
31 #include "clang/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.h"
32 #include "clang/Analysis/FlowSensitive/Value.h"
33 #include "clang/Support/Compiler.h"
34 #include "llvm/ADT/ArrayRef.h"
35 #include "llvm/ADT/STLExtras.h"
36 #include "llvm/Support/Debug.h"
37 #include "llvm/Support/Error.h"
39 #define DEBUG_TYPE "clang-dataflow"
41 namespace clang {
42 namespace dataflow {
43 class NoopLattice;
45 } // namespace clang
47 namespace llvm {
48 // This needs to be exported for ClangAnalysisFlowSensitiveTests so any_cast
49 // uses the correct address of Any::TypeId from the clang shared library instead
50 // of creating one in the test executable. when building with
51 // CLANG_LINK_CLANG_DYLIB
52 template struct CLANG_EXPORT_TEMPLATE Any::TypeId<clang::dataflow::NoopLattice>;
53 } // namespace llvm
55 namespace clang {
56 namespace dataflow {
58 /// Returns the index of `Block` in the successors of `Pred`.
59 static int blockIndexInPredecessor(const CFGBlock &Pred,
60 const CFGBlock &Block) {
61 auto BlockPos = llvm::find_if(
62 Pred.succs(), [&Block](const CFGBlock::AdjacentBlock &Succ) {
63 return Succ && Succ->getBlockID() == Block.getBlockID();
64 });
65 return BlockPos - Pred.succ_begin();
68 // A "backedge" node is a block introduced in the CFG exclusively to indicate a
69 // loop backedge. They are exactly identified by the presence of a non-null
70 // pointer to the entry block of the loop condition. Note that this is not
71 // necessarily the block with the loop statement as terminator, because
72 // short-circuit operators will result in multiple blocks encoding the loop
73 // condition, only one of which will contain the loop statement as terminator.
74 static bool isBackedgeNode(const CFGBlock &B) {
75 return B.getLoopTarget() != nullptr;
78 namespace {
80 /// Extracts the terminator's condition expression.
81 class TerminatorVisitor
82 : public ConstStmtVisitor<TerminatorVisitor, const Expr *> {
83 public:
84 TerminatorVisitor() = default;
85 const Expr *VisitIfStmt(const IfStmt *S) { return S->getCond(); }
86 const Expr *VisitWhileStmt(const WhileStmt *S) { return S->getCond(); }
87 const Expr *VisitDoStmt(const DoStmt *S) { return S->getCond(); }
88 const Expr *VisitForStmt(const ForStmt *S) { return S->getCond(); }
89 const Expr *VisitCXXForRangeStmt(const CXXForRangeStmt *) {
90 // Don't do anything special for CXXForRangeStmt, because the condition
91 // (being implicitly generated) isn't visible from the loop body.
92 return nullptr;
94 const Expr *VisitBinaryOperator(const BinaryOperator *S) {
95 assert(S->getOpcode() == BO_LAnd || S->getOpcode() == BO_LOr);
96 return S->getLHS();
98 const Expr *VisitConditionalOperator(const ConditionalOperator *S) {
99 return S->getCond();
103 /// Holds data structures required for running dataflow analysis.
104 struct AnalysisContext {
105 AnalysisContext(const AdornedCFG &ACFG, TypeErasedDataflowAnalysis &Analysis,
106 const Environment &InitEnv,
107 llvm::ArrayRef<std::optional<TypeErasedDataflowAnalysisState>>
108 BlockStates)
109 : ACFG(ACFG), Analysis(Analysis), InitEnv(InitEnv),
110 Log(*InitEnv.getDataflowAnalysisContext().getOptions().Log),
111 BlockStates(BlockStates) {
112 Log.beginAnalysis(ACFG, Analysis);
114 ~AnalysisContext() { Log.endAnalysis(); }
116 /// Contains the CFG being analyzed.
117 const AdornedCFG &ACFG;
118 /// The analysis to be run.
119 TypeErasedDataflowAnalysis &Analysis;
120 /// Initial state to start the analysis.
121 const Environment &InitEnv;
122 Logger &Log;
123 /// Stores the state of a CFG block if it has been evaluated by the analysis.
124 /// The indices correspond to the block IDs.
125 llvm::ArrayRef<std::optional<TypeErasedDataflowAnalysisState>> BlockStates;
128 class PrettyStackTraceAnalysis : public llvm::PrettyStackTraceEntry {
129 public:
130 PrettyStackTraceAnalysis(const AdornedCFG &ACFG, const char *Message)
131 : ACFG(ACFG), Message(Message) {}
133 void print(raw_ostream &OS) const override {
134 OS << Message << "\n";
135 OS << "Decl:\n";
136 ACFG.getDecl().dump(OS);
137 OS << "CFG:\n";
138 ACFG.getCFG().print(OS, LangOptions(), false);
141 private:
142 const AdornedCFG &ACFG;
143 const char *Message;
146 class PrettyStackTraceCFGElement : public llvm::PrettyStackTraceEntry {
147 public:
148 PrettyStackTraceCFGElement(const CFGElement &Element, int BlockIdx,
149 int ElementIdx, const char *Message)
150 : Element(Element), BlockIdx(BlockIdx), ElementIdx(ElementIdx),
151 Message(Message) {}
153 void print(raw_ostream &OS) const override {
154 OS << Message << ": Element [B" << BlockIdx << "." << ElementIdx << "]\n";
155 if (auto Stmt = Element.getAs<CFGStmt>()) {
156 OS << "Stmt:\n";
157 ASTDumper Dumper(OS, false);
158 Dumper.Visit(Stmt->getStmt());
162 private:
163 const CFGElement &Element;
164 int BlockIdx;
165 int ElementIdx;
166 const char *Message;
169 // Builds a joined TypeErasedDataflowAnalysisState from 0 or more sources,
170 // each of which may be owned (built as part of the join) or external (a
171 // reference to an Environment that will outlive the builder).
172 // Avoids unneccesary copies of the environment.
173 class JoinedStateBuilder {
174 AnalysisContext &AC;
175 Environment::ExprJoinBehavior JoinBehavior;
176 std::vector<const TypeErasedDataflowAnalysisState *> All;
177 std::deque<TypeErasedDataflowAnalysisState> Owned;
179 TypeErasedDataflowAnalysisState
180 join(const TypeErasedDataflowAnalysisState &L,
181 const TypeErasedDataflowAnalysisState &R) {
182 return {AC.Analysis.joinTypeErased(L.Lattice, R.Lattice),
183 Environment::join(L.Env, R.Env, AC.Analysis, JoinBehavior)};
186 public:
187 JoinedStateBuilder(AnalysisContext &AC,
188 Environment::ExprJoinBehavior JoinBehavior)
189 : AC(AC), JoinBehavior(JoinBehavior) {}
191 void addOwned(TypeErasedDataflowAnalysisState State) {
192 Owned.push_back(std::move(State));
193 All.push_back(&Owned.back());
195 void addUnowned(const TypeErasedDataflowAnalysisState &State) {
196 All.push_back(&State);
198 TypeErasedDataflowAnalysisState take() && {
199 if (All.empty())
200 // FIXME: Consider passing `Block` to Analysis.typeErasedInitialElement
201 // to enable building analyses like computation of dominators that
202 // initialize the state of each basic block differently.
203 return {AC.Analysis.typeErasedInitialElement(), AC.InitEnv.fork()};
204 if (All.size() == 1)
205 // Join the environment with itself so that we discard expression state if
206 // desired.
207 // FIXME: We could consider writing special-case code for this that only
208 // does the discarding, but it's not clear if this is worth it.
209 return {All[0]->Lattice, Environment::join(All[0]->Env, All[0]->Env,
210 AC.Analysis, JoinBehavior)};
212 auto Result = join(*All[0], *All[1]);
213 for (unsigned I = 2; I < All.size(); ++I)
214 Result = join(Result, *All[I]);
215 return Result;
218 } // namespace
220 static const Expr *getTerminatorCondition(const Stmt *TerminatorStmt) {
221 return TerminatorStmt == nullptr ? nullptr
222 : TerminatorVisitor().Visit(TerminatorStmt);
225 /// Computes the input state for a given basic block by joining the output
226 /// states of its predecessors.
228 /// Requirements:
230 /// All predecessors of `Block` except those with loop back edges must have
231 /// already been transferred. States in `AC.BlockStates` that are set to
232 /// `std::nullopt` represent basic blocks that are not evaluated yet.
233 static TypeErasedDataflowAnalysisState
234 computeBlockInputState(const CFGBlock &Block, AnalysisContext &AC) {
235 std::vector<const CFGBlock *> Preds(Block.pred_begin(), Block.pred_end());
236 if (Block.getTerminator().isTemporaryDtorsBranch()) {
237 // This handles a special case where the code that produced the CFG includes
238 // a conditional operator with a branch that constructs a temporary and
239 // calls a destructor annotated as noreturn. The CFG models this as follows:
241 // B1 (contains the condition of the conditional operator) - succs: B2, B3
242 // B2 (contains code that does not call a noreturn destructor) - succs: B4
243 // B3 (contains code that calls a noreturn destructor) - succs: B4
244 // B4 (has temporary destructor terminator) - succs: B5, B6
245 // B5 (noreturn block that is associated with the noreturn destructor call)
246 // B6 (contains code that follows the conditional operator statement)
248 // The first successor (B5 above) of a basic block with a temporary
249 // destructor terminator (B4 above) is the block that evaluates the
250 // destructor. If that block has a noreturn element then the predecessor
251 // block that constructed the temporary object (B3 above) is effectively a
252 // noreturn block and its state should not be used as input for the state
253 // of the block that has a temporary destructor terminator (B4 above). This
254 // holds regardless of which branch of the ternary operator calls the
255 // noreturn destructor. However, it doesn't cases where a nested ternary
256 // operator includes a branch that contains a noreturn destructor call.
258 // See `NoreturnDestructorTest` for concrete examples.
259 if (Block.succ_begin()->getReachableBlock() != nullptr &&
260 Block.succ_begin()->getReachableBlock()->hasNoReturnElement()) {
261 const CFGBlock *StmtBlock = nullptr;
262 if (const Stmt *Terminator = Block.getTerminatorStmt())
263 StmtBlock = AC.ACFG.blockForStmt(*Terminator);
264 assert(StmtBlock != nullptr);
265 llvm::erase(Preds, StmtBlock);
269 // If any of the predecessor blocks contains an expression consumed in a
270 // different block, we need to keep expression state.
271 // Note that in this case, we keep expression state for all predecessors,
272 // rather than only those predecessors that actually contain an expression
273 // consumed in a different block. While this is potentially suboptimal, it's
274 // actually likely, if we have control flow within a full expression, that
275 // all predecessors have expression state consumed in a different block.
276 Environment::ExprJoinBehavior JoinBehavior = Environment::DiscardExprState;
277 for (const CFGBlock *Pred : Preds) {
278 if (Pred && AC.ACFG.containsExprConsumedInDifferentBlock(*Pred)) {
279 JoinBehavior = Environment::KeepExprState;
280 break;
284 JoinedStateBuilder Builder(AC, JoinBehavior);
285 for (const CFGBlock *Pred : Preds) {
286 // Skip if the `Block` is unreachable or control flow cannot get past it.
287 if (!Pred || Pred->hasNoReturnElement())
288 continue;
290 // Skip if `Pred` was not evaluated yet. This could happen if `Pred` has a
291 // loop back edge to `Block`.
292 const std::optional<TypeErasedDataflowAnalysisState> &MaybePredState =
293 AC.BlockStates[Pred->getBlockID()];
294 if (!MaybePredState)
295 continue;
297 const TypeErasedDataflowAnalysisState &PredState = *MaybePredState;
298 const Expr *Cond = getTerminatorCondition(Pred->getTerminatorStmt());
299 if (Cond == nullptr) {
300 Builder.addUnowned(PredState);
301 continue;
304 bool BranchVal = blockIndexInPredecessor(*Pred, Block) == 0;
306 // `transferBranch` may need to mutate the environment to describe the
307 // dynamic effect of the terminator for a given branch. Copy now.
308 TypeErasedDataflowAnalysisState Copy = MaybePredState->fork();
309 if (AC.Analysis.builtinOptions()) {
310 auto *CondVal = Copy.Env.get<BoolValue>(*Cond);
311 // In transferCFGBlock(), we ensure that we always have a `Value`
312 // for the terminator condition, so assert this. We consciously
313 // assert ourselves instead of asserting via `cast()` so that we get
314 // a more meaningful line number if the assertion fails.
315 assert(CondVal != nullptr);
316 BoolValue *AssertedVal =
317 BranchVal ? CondVal : &Copy.Env.makeNot(*CondVal);
318 Copy.Env.assume(AssertedVal->formula());
320 AC.Analysis.transferBranchTypeErased(BranchVal, Cond, Copy.Lattice,
321 Copy.Env);
322 Builder.addOwned(std::move(Copy));
324 return std::move(Builder).take();
327 /// Built-in transfer function for `CFGStmt`.
328 static void
329 builtinTransferStatement(unsigned CurBlockID, const CFGStmt &Elt,
330 TypeErasedDataflowAnalysisState &InputState,
331 AnalysisContext &AC) {
332 const Stmt *S = Elt.getStmt();
333 assert(S != nullptr);
334 transfer(StmtToEnvMap(AC.ACFG, AC.BlockStates, CurBlockID, InputState), *S,
335 InputState.Env, AC.Analysis);
338 /// Built-in transfer function for `CFGInitializer`.
339 static void
340 builtinTransferInitializer(const CFGInitializer &Elt,
341 TypeErasedDataflowAnalysisState &InputState) {
342 const CXXCtorInitializer *Init = Elt.getInitializer();
343 assert(Init != nullptr);
345 auto &Env = InputState.Env;
346 auto &ThisLoc = *Env.getThisPointeeStorageLocation();
348 if (!Init->isAnyMemberInitializer())
349 // FIXME: Handle base initialization
350 return;
352 auto *InitExpr = Init->getInit();
353 assert(InitExpr != nullptr);
355 const FieldDecl *Member = nullptr;
356 RecordStorageLocation *ParentLoc = &ThisLoc;
357 StorageLocation *MemberLoc = nullptr;
358 if (Init->isMemberInitializer()) {
359 Member = Init->getMember();
360 MemberLoc = ThisLoc.getChild(*Member);
361 } else {
362 IndirectFieldDecl *IndirectField = Init->getIndirectMember();
363 assert(IndirectField != nullptr);
364 MemberLoc = &ThisLoc;
365 for (const auto *I : IndirectField->chain()) {
366 Member = cast<FieldDecl>(I);
367 ParentLoc = cast<RecordStorageLocation>(MemberLoc);
368 MemberLoc = ParentLoc->getChild(*Member);
371 assert(Member != nullptr);
373 // FIXME: Instead of these case distinctions, we would ideally want to be able
374 // to simply use `Environment::createObject()` here, the same way that we do
375 // this in `TransferVisitor::VisitInitListExpr()`. However, this would require
376 // us to be able to build a list of fields that we then use to initialize an
377 // `RecordStorageLocation` -- and the problem is that, when we get here,
378 // the `RecordStorageLocation` already exists. We should explore if there's
379 // anything that we can do to change this.
380 if (Member->getType()->isReferenceType()) {
381 auto *InitExprLoc = Env.getStorageLocation(*InitExpr);
382 if (InitExprLoc == nullptr)
383 return;
385 ParentLoc->setChild(*Member, InitExprLoc);
386 // Record-type initializers construct themselves directly into the result
387 // object, so there is no need to handle them here.
388 } else if (!Member->getType()->isRecordType()) {
389 assert(MemberLoc != nullptr);
390 if (auto *InitExprVal = Env.getValue(*InitExpr))
391 Env.setValue(*MemberLoc, *InitExprVal);
395 static void builtinTransfer(unsigned CurBlockID, const CFGElement &Elt,
396 TypeErasedDataflowAnalysisState &State,
397 AnalysisContext &AC) {
398 switch (Elt.getKind()) {
399 case CFGElement::Statement:
400 builtinTransferStatement(CurBlockID, Elt.castAs<CFGStmt>(), State, AC);
401 break;
402 case CFGElement::Initializer:
403 builtinTransferInitializer(Elt.castAs<CFGInitializer>(), State);
404 break;
405 case CFGElement::LifetimeEnds:
406 // Removing declarations when their lifetime ends serves two purposes:
407 // - Eliminate unnecessary clutter from `Environment::DeclToLoc`
408 // - Allow us to assert that, when joining two `Environment`s, the two
409 // `DeclToLoc` maps never contain entries that map the same declaration to
410 // different storage locations.
411 if (const ValueDecl *VD = Elt.castAs<CFGLifetimeEnds>().getVarDecl())
412 State.Env.removeDecl(*VD);
413 break;
414 default:
415 // FIXME: Evaluate other kinds of `CFGElement`
416 break;
420 /// Transfers `State` by evaluating each element in the `Block` based on the
421 /// `AC.Analysis` specified.
423 /// Built-in transfer functions (if the option for `ApplyBuiltinTransfer` is set
424 /// by the analysis) will be applied to the element before evaluation by the
425 /// user-specified analysis.
426 /// `PostVisitCFG` (if provided) will be applied to the element after evaluation
427 /// by the user-specified analysis.
428 static TypeErasedDataflowAnalysisState
429 transferCFGBlock(const CFGBlock &Block, AnalysisContext &AC,
430 const CFGEltCallbacksTypeErased &PostAnalysisCallbacks = {}) {
431 AC.Log.enterBlock(Block, PostAnalysisCallbacks.Before != nullptr ||
432 PostAnalysisCallbacks.After != nullptr);
433 auto State = computeBlockInputState(Block, AC);
434 AC.Log.recordState(State);
435 int ElementIdx = 1;
436 for (const auto &Element : Block) {
437 PrettyStackTraceCFGElement CrashInfo(Element, Block.getBlockID(),
438 ElementIdx++, "transferCFGBlock");
440 AC.Log.enterElement(Element);
442 if (PostAnalysisCallbacks.Before) {
443 PostAnalysisCallbacks.Before(Element, State);
446 // Built-in analysis
447 if (AC.Analysis.builtinOptions()) {
448 builtinTransfer(Block.getBlockID(), Element, State, AC);
451 // User-provided analysis
452 AC.Analysis.transferTypeErased(Element, State.Lattice, State.Env);
454 if (PostAnalysisCallbacks.After) {
455 PostAnalysisCallbacks.After(Element, State);
458 AC.Log.recordState(State);
461 // If we have a terminator, evaluate its condition.
462 // This `Expr` may not appear as a `CFGElement` anywhere else, and it's
463 // important that we evaluate it here (rather than while processing the
464 // terminator) so that we put the corresponding value in the right
465 // environment.
466 if (const Expr *TerminatorCond =
467 dyn_cast_or_null<Expr>(Block.getTerminatorCondition())) {
468 if (State.Env.getValue(*TerminatorCond) == nullptr)
469 // FIXME: This only runs the builtin transfer, not the analysis-specific
470 // transfer. Fixing this isn't trivial, as the analysis-specific transfer
471 // takes a `CFGElement` as input, but some expressions only show up as a
472 // terminator condition, but not as a `CFGElement`. The condition of an if
473 // statement is one such example.
474 transfer(StmtToEnvMap(AC.ACFG, AC.BlockStates, Block.getBlockID(), State),
475 *TerminatorCond, State.Env, AC.Analysis);
477 // If the transfer function didn't produce a value, create an atom so that
478 // we have *some* value for the condition expression. This ensures that
479 // when we extend the flow condition, it actually changes.
480 if (State.Env.getValue(*TerminatorCond) == nullptr)
481 State.Env.setValue(*TerminatorCond, State.Env.makeAtomicBoolValue());
482 AC.Log.recordState(State);
485 return State;
488 llvm::Expected<std::vector<std::optional<TypeErasedDataflowAnalysisState>>>
489 runTypeErasedDataflowAnalysis(
490 const AdornedCFG &ACFG, TypeErasedDataflowAnalysis &Analysis,
491 const Environment &InitEnv,
492 const CFGEltCallbacksTypeErased &PostAnalysisCallbacks,
493 std::int32_t MaxBlockVisits) {
494 PrettyStackTraceAnalysis CrashInfo(ACFG, "runTypeErasedDataflowAnalysis");
496 std::optional<Environment> MaybeStartingEnv;
497 if (InitEnv.callStackSize() == 0) {
498 MaybeStartingEnv = InitEnv.fork();
499 MaybeStartingEnv->initialize();
501 const Environment &StartingEnv =
502 MaybeStartingEnv ? *MaybeStartingEnv : InitEnv;
504 const clang::CFG &CFG = ACFG.getCFG();
505 PostOrderCFGView POV(&CFG);
506 ForwardDataflowWorklist Worklist(CFG, &POV);
508 std::vector<std::optional<TypeErasedDataflowAnalysisState>> BlockStates(
509 CFG.size());
511 // The entry basic block doesn't contain statements so it can be skipped.
512 const CFGBlock &Entry = CFG.getEntry();
513 BlockStates[Entry.getBlockID()] = {Analysis.typeErasedInitialElement(),
514 StartingEnv.fork()};
515 Worklist.enqueueSuccessors(&Entry);
517 AnalysisContext AC(ACFG, Analysis, StartingEnv, BlockStates);
518 std::int32_t BlockVisits = 0;
519 while (const CFGBlock *Block = Worklist.dequeue()) {
520 LLVM_DEBUG(llvm::dbgs()
521 << "Processing Block " << Block->getBlockID() << "\n");
522 if (++BlockVisits > MaxBlockVisits) {
523 return llvm::createStringError(std::errc::timed_out,
524 "maximum number of blocks processed");
527 const std::optional<TypeErasedDataflowAnalysisState> &OldBlockState =
528 BlockStates[Block->getBlockID()];
529 TypeErasedDataflowAnalysisState NewBlockState =
530 transferCFGBlock(*Block, AC);
531 LLVM_DEBUG({
532 llvm::errs() << "New Env:\n";
533 NewBlockState.Env.dump();
536 if (OldBlockState) {
537 LLVM_DEBUG({
538 llvm::errs() << "Old Env:\n";
539 OldBlockState->Env.dump();
541 if (isBackedgeNode(*Block)) {
542 LatticeJoinEffect Effect1 = Analysis.widenTypeErased(
543 NewBlockState.Lattice, OldBlockState->Lattice);
544 LatticeJoinEffect Effect2 =
545 NewBlockState.Env.widen(OldBlockState->Env, Analysis);
546 if (Effect1 == LatticeJoinEffect::Unchanged &&
547 Effect2 == LatticeJoinEffect::Unchanged) {
548 // The state of `Block` didn't change from widening so there's no need
549 // to revisit its successors.
550 AC.Log.blockConverged();
551 continue;
553 } else if (Analysis.isEqualTypeErased(OldBlockState->Lattice,
554 NewBlockState.Lattice) &&
555 OldBlockState->Env.equivalentTo(NewBlockState.Env, Analysis)) {
556 // The state of `Block` didn't change after transfer so there's no need
557 // to revisit its successors.
558 AC.Log.blockConverged();
559 continue;
563 BlockStates[Block->getBlockID()] = std::move(NewBlockState);
565 // Do not add unreachable successor blocks to `Worklist`.
566 if (Block->hasNoReturnElement())
567 continue;
569 Worklist.enqueueSuccessors(Block);
571 // FIXME: Consider evaluating unreachable basic blocks (those that have a
572 // state set to `std::nullopt` at this point) to also analyze dead code.
574 if (PostAnalysisCallbacks.Before || PostAnalysisCallbacks.After) {
575 for (const CFGBlock *Block : ACFG.getCFG()) {
576 // Skip blocks that were not evaluated.
577 if (!BlockStates[Block->getBlockID()])
578 continue;
579 transferCFGBlock(*Block, AC, PostAnalysisCallbacks);
583 return std::move(BlockStates);
586 } // namespace dataflow
587 } // namespace clang