[Flang] remove whole-archive option for AIX linker (#76039)
[llvm-project.git] / clang / lib / Analysis / FlowSensitive / TypeErasedDataflowAnalysis.cpp
blobfaf83a8920d4ead30bf62922df79fd6c6835c000
1 //===- TypeErasedDataflowAnalysis.cpp -------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines type-erased base types and functions for building dataflow
10 // analyses that run over Control-Flow Graphs (CFGs).
12 //===----------------------------------------------------------------------===//
14 #include <algorithm>
15 #include <optional>
16 #include <system_error>
17 #include <utility>
18 #include <vector>
20 #include "clang/AST/ASTDumper.h"
21 #include "clang/AST/DeclCXX.h"
22 #include "clang/AST/OperationKinds.h"
23 #include "clang/AST/StmtCXX.h"
24 #include "clang/AST/StmtVisitor.h"
25 #include "clang/Analysis/Analyses/PostOrderCFGView.h"
26 #include "clang/Analysis/CFG.h"
27 #include "clang/Analysis/FlowSensitive/DataflowEnvironment.h"
28 #include "clang/Analysis/FlowSensitive/DataflowLattice.h"
29 #include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
30 #include "clang/Analysis/FlowSensitive/RecordOps.h"
31 #include "clang/Analysis/FlowSensitive/Transfer.h"
32 #include "clang/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.h"
33 #include "clang/Analysis/FlowSensitive/Value.h"
34 #include "llvm/ADT/ArrayRef.h"
35 #include "llvm/ADT/STLExtras.h"
36 #include "llvm/ADT/SmallBitVector.h"
37 #include "llvm/Support/Debug.h"
38 #include "llvm/Support/Error.h"
40 #define DEBUG_TYPE "clang-dataflow"
42 namespace clang {
43 namespace dataflow {
45 /// Returns the index of `Block` in the successors of `Pred`.
46 static int blockIndexInPredecessor(const CFGBlock &Pred,
47 const CFGBlock &Block) {
48 auto BlockPos = llvm::find_if(
49 Pred.succs(), [&Block](const CFGBlock::AdjacentBlock &Succ) {
50 return Succ && Succ->getBlockID() == Block.getBlockID();
51 });
52 return BlockPos - Pred.succ_begin();
55 // A "backedge" node is a block introduced in the CFG exclusively to indicate a
56 // loop backedge. They are exactly identified by the presence of a non-null
57 // pointer to the entry block of the loop condition. Note that this is not
58 // necessarily the block with the loop statement as terminator, because
59 // short-circuit operators will result in multiple blocks encoding the loop
60 // condition, only one of which will contain the loop statement as terminator.
61 static bool isBackedgeNode(const CFGBlock &B) {
62 return B.getLoopTarget() != nullptr;
65 namespace {
67 // The return type of the visit functions in TerminatorVisitor. The first
68 // element represents the terminator expression (that is the conditional
69 // expression in case of a path split in the CFG). The second element
70 // represents whether the condition was true or false.
71 using TerminatorVisitorRetTy = std::pair<const Expr *, bool>;
73 /// Extends the flow condition of an environment based on a terminator
74 /// statement.
75 class TerminatorVisitor
76 : public ConstStmtVisitor<TerminatorVisitor, TerminatorVisitorRetTy> {
77 public:
78 TerminatorVisitor(const StmtToEnvMap &StmtToEnv, Environment &Env,
79 int BlockSuccIdx)
80 : StmtToEnv(StmtToEnv), Env(Env), BlockSuccIdx(BlockSuccIdx) {}
82 TerminatorVisitorRetTy VisitIfStmt(const IfStmt *S) {
83 auto *Cond = S->getCond();
84 assert(Cond != nullptr);
85 return extendFlowCondition(*Cond);
88 TerminatorVisitorRetTy VisitWhileStmt(const WhileStmt *S) {
89 auto *Cond = S->getCond();
90 assert(Cond != nullptr);
91 return extendFlowCondition(*Cond);
94 TerminatorVisitorRetTy VisitDoStmt(const DoStmt *S) {
95 auto *Cond = S->getCond();
96 assert(Cond != nullptr);
97 return extendFlowCondition(*Cond);
100 TerminatorVisitorRetTy VisitForStmt(const ForStmt *S) {
101 auto *Cond = S->getCond();
102 if (Cond != nullptr)
103 return extendFlowCondition(*Cond);
104 return {nullptr, false};
107 TerminatorVisitorRetTy VisitCXXForRangeStmt(const CXXForRangeStmt *) {
108 // Don't do anything special for CXXForRangeStmt, because the condition
109 // (being implicitly generated) isn't visible from the loop body.
110 return {nullptr, false};
113 TerminatorVisitorRetTy VisitBinaryOperator(const BinaryOperator *S) {
114 assert(S->getOpcode() == BO_LAnd || S->getOpcode() == BO_LOr);
115 auto *LHS = S->getLHS();
116 assert(LHS != nullptr);
117 return extendFlowCondition(*LHS);
120 TerminatorVisitorRetTy
121 VisitConditionalOperator(const ConditionalOperator *S) {
122 auto *Cond = S->getCond();
123 assert(Cond != nullptr);
124 return extendFlowCondition(*Cond);
127 private:
128 TerminatorVisitorRetTy extendFlowCondition(const Expr &Cond) {
129 // The terminator sub-expression might not be evaluated.
130 if (Env.getValue(Cond) == nullptr)
131 transfer(StmtToEnv, Cond, Env);
133 auto *Val = Env.get<BoolValue>(Cond);
134 // Value merging depends on flow conditions from different environments
135 // being mutually exclusive -- that is, they cannot both be true in their
136 // entirety (even if they may share some clauses). So, we need *some* value
137 // for the condition expression, even if just an atom.
138 if (Val == nullptr) {
139 Val = &Env.makeAtomicBoolValue();
140 Env.setValue(Cond, *Val);
143 bool ConditionValue = true;
144 // The condition must be inverted for the successor that encompasses the
145 // "else" branch, if such exists.
146 if (BlockSuccIdx == 1) {
147 Val = &Env.makeNot(*Val);
148 ConditionValue = false;
151 Env.assume(Val->formula());
152 return {&Cond, ConditionValue};
155 const StmtToEnvMap &StmtToEnv;
156 Environment &Env;
157 int BlockSuccIdx;
160 /// Holds data structures required for running dataflow analysis.
161 struct AnalysisContext {
162 AnalysisContext(const ControlFlowContext &CFCtx,
163 TypeErasedDataflowAnalysis &Analysis,
164 const Environment &InitEnv,
165 llvm::ArrayRef<std::optional<TypeErasedDataflowAnalysisState>>
166 BlockStates)
167 : CFCtx(CFCtx), Analysis(Analysis), InitEnv(InitEnv),
168 Log(*InitEnv.getDataflowAnalysisContext().getOptions().Log),
169 BlockStates(BlockStates) {
170 Log.beginAnalysis(CFCtx, Analysis);
172 ~AnalysisContext() { Log.endAnalysis(); }
174 /// Contains the CFG being analyzed.
175 const ControlFlowContext &CFCtx;
176 /// The analysis to be run.
177 TypeErasedDataflowAnalysis &Analysis;
178 /// Initial state to start the analysis.
179 const Environment &InitEnv;
180 Logger &Log;
181 /// Stores the state of a CFG block if it has been evaluated by the analysis.
182 /// The indices correspond to the block IDs.
183 llvm::ArrayRef<std::optional<TypeErasedDataflowAnalysisState>> BlockStates;
186 class PrettyStackTraceAnalysis : public llvm::PrettyStackTraceEntry {
187 public:
188 PrettyStackTraceAnalysis(const ControlFlowContext &CFCtx, const char *Message)
189 : CFCtx(CFCtx), Message(Message) {}
191 void print(raw_ostream &OS) const override {
192 OS << Message << "\n";
193 OS << "Decl:\n";
194 CFCtx.getDecl().dump(OS);
195 OS << "CFG:\n";
196 CFCtx.getCFG().print(OS, LangOptions(), false);
199 private:
200 const ControlFlowContext &CFCtx;
201 const char *Message;
204 class PrettyStackTraceCFGElement : public llvm::PrettyStackTraceEntry {
205 public:
206 PrettyStackTraceCFGElement(const CFGElement &Element, int BlockIdx,
207 int ElementIdx, const char *Message)
208 : Element(Element), BlockIdx(BlockIdx), ElementIdx(ElementIdx),
209 Message(Message) {}
211 void print(raw_ostream &OS) const override {
212 OS << Message << ": Element [B" << BlockIdx << "." << ElementIdx << "]\n";
213 if (auto Stmt = Element.getAs<CFGStmt>()) {
214 OS << "Stmt:\n";
215 ASTDumper Dumper(OS, false);
216 Dumper.Visit(Stmt->getStmt());
220 private:
221 const CFGElement &Element;
222 int BlockIdx;
223 int ElementIdx;
224 const char *Message;
227 // Builds a joined TypeErasedDataflowAnalysisState from 0 or more sources,
228 // each of which may be owned (built as part of the join) or external (a
229 // reference to an Environment that will outlive the builder).
230 // Avoids unneccesary copies of the environment.
231 class JoinedStateBuilder {
232 AnalysisContext &AC;
233 std::vector<const TypeErasedDataflowAnalysisState *> All;
234 std::deque<TypeErasedDataflowAnalysisState> Owned;
236 TypeErasedDataflowAnalysisState
237 join(const TypeErasedDataflowAnalysisState &L,
238 const TypeErasedDataflowAnalysisState &R) {
239 return {AC.Analysis.joinTypeErased(L.Lattice, R.Lattice),
240 Environment::join(L.Env, R.Env, AC.Analysis)};
243 public:
244 JoinedStateBuilder(AnalysisContext &AC) : AC(AC) {}
246 void addOwned(TypeErasedDataflowAnalysisState State) {
247 Owned.push_back(std::move(State));
248 All.push_back(&Owned.back());
250 void addUnowned(const TypeErasedDataflowAnalysisState &State) {
251 All.push_back(&State);
253 TypeErasedDataflowAnalysisState take() && {
254 if (All.empty())
255 // FIXME: Consider passing `Block` to Analysis.typeErasedInitialElement
256 // to enable building analyses like computation of dominators that
257 // initialize the state of each basic block differently.
258 return {AC.Analysis.typeErasedInitialElement(), AC.InitEnv.fork()};
259 if (All.size() == 1)
260 // Join the environment with itself so that we discard the entries from
261 // `ExprToLoc` and `ExprToVal`.
262 // FIXME: We could consider writing special-case code for this that only
263 // does the discarding, but it's not clear if this is worth it.
264 return {All[0]->Lattice,
265 Environment::join(All[0]->Env, All[0]->Env, AC.Analysis)};
267 auto Result = join(*All[0], *All[1]);
268 for (unsigned I = 2; I < All.size(); ++I)
269 Result = join(Result, *All[I]);
270 return Result;
274 } // namespace
276 /// Computes the input state for a given basic block by joining the output
277 /// states of its predecessors.
279 /// Requirements:
281 /// All predecessors of `Block` except those with loop back edges must have
282 /// already been transferred. States in `AC.BlockStates` that are set to
283 /// `std::nullopt` represent basic blocks that are not evaluated yet.
284 static TypeErasedDataflowAnalysisState
285 computeBlockInputState(const CFGBlock &Block, AnalysisContext &AC) {
286 std::vector<const CFGBlock *> Preds(Block.pred_begin(), Block.pred_end());
287 if (Block.getTerminator().isTemporaryDtorsBranch()) {
288 // This handles a special case where the code that produced the CFG includes
289 // a conditional operator with a branch that constructs a temporary and
290 // calls a destructor annotated as noreturn. The CFG models this as follows:
292 // B1 (contains the condition of the conditional operator) - succs: B2, B3
293 // B2 (contains code that does not call a noreturn destructor) - succs: B4
294 // B3 (contains code that calls a noreturn destructor) - succs: B4
295 // B4 (has temporary destructor terminator) - succs: B5, B6
296 // B5 (noreturn block that is associated with the noreturn destructor call)
297 // B6 (contains code that follows the conditional operator statement)
299 // The first successor (B5 above) of a basic block with a temporary
300 // destructor terminator (B4 above) is the block that evaluates the
301 // destructor. If that block has a noreturn element then the predecessor
302 // block that constructed the temporary object (B3 above) is effectively a
303 // noreturn block and its state should not be used as input for the state
304 // of the block that has a temporary destructor terminator (B4 above). This
305 // holds regardless of which branch of the ternary operator calls the
306 // noreturn destructor. However, it doesn't cases where a nested ternary
307 // operator includes a branch that contains a noreturn destructor call.
309 // See `NoreturnDestructorTest` for concrete examples.
310 if (Block.succ_begin()->getReachableBlock() != nullptr &&
311 Block.succ_begin()->getReachableBlock()->hasNoReturnElement()) {
312 auto &StmtToBlock = AC.CFCtx.getStmtToBlock();
313 auto StmtBlock = StmtToBlock.find(Block.getTerminatorStmt());
314 assert(StmtBlock != StmtToBlock.end());
315 llvm::erase(Preds, StmtBlock->getSecond());
319 JoinedStateBuilder Builder(AC);
320 for (const CFGBlock *Pred : Preds) {
321 // Skip if the `Block` is unreachable or control flow cannot get past it.
322 if (!Pred || Pred->hasNoReturnElement())
323 continue;
325 // Skip if `Pred` was not evaluated yet. This could happen if `Pred` has a
326 // loop back edge to `Block`.
327 const std::optional<TypeErasedDataflowAnalysisState> &MaybePredState =
328 AC.BlockStates[Pred->getBlockID()];
329 if (!MaybePredState)
330 continue;
332 if (AC.Analysis.builtinOptions()) {
333 if (const Stmt *PredTerminatorStmt = Pred->getTerminatorStmt()) {
334 // We have a terminator: we need to mutate an environment to describe
335 // when the terminator is taken. Copy now.
336 TypeErasedDataflowAnalysisState Copy = MaybePredState->fork();
338 const StmtToEnvMap StmtToEnv(AC.CFCtx, AC.BlockStates);
339 auto [Cond, CondValue] =
340 TerminatorVisitor(StmtToEnv, Copy.Env,
341 blockIndexInPredecessor(*Pred, Block))
342 .Visit(PredTerminatorStmt);
343 if (Cond != nullptr)
344 // FIXME: Call transferBranchTypeErased even if BuiltinTransferOpts
345 // are not set.
346 AC.Analysis.transferBranchTypeErased(CondValue, Cond, Copy.Lattice,
347 Copy.Env);
348 Builder.addOwned(std::move(Copy));
349 continue;
352 Builder.addUnowned(*MaybePredState);
354 return std::move(Builder).take();
357 /// Built-in transfer function for `CFGStmt`.
358 static void
359 builtinTransferStatement(const CFGStmt &Elt,
360 TypeErasedDataflowAnalysisState &InputState,
361 AnalysisContext &AC) {
362 const Stmt *S = Elt.getStmt();
363 assert(S != nullptr);
364 transfer(StmtToEnvMap(AC.CFCtx, AC.BlockStates), *S, InputState.Env);
367 /// Built-in transfer function for `CFGInitializer`.
368 static void
369 builtinTransferInitializer(const CFGInitializer &Elt,
370 TypeErasedDataflowAnalysisState &InputState) {
371 const CXXCtorInitializer *Init = Elt.getInitializer();
372 assert(Init != nullptr);
374 auto &Env = InputState.Env;
375 auto &ThisLoc = *Env.getThisPointeeStorageLocation();
377 if (!Init->isAnyMemberInitializer())
378 // FIXME: Handle base initialization
379 return;
381 auto *InitExpr = Init->getInit();
382 assert(InitExpr != nullptr);
384 const FieldDecl *Member = nullptr;
385 RecordStorageLocation *ParentLoc = &ThisLoc;
386 StorageLocation *MemberLoc = nullptr;
387 if (Init->isMemberInitializer()) {
388 Member = Init->getMember();
389 MemberLoc = ThisLoc.getChild(*Member);
390 } else {
391 IndirectFieldDecl *IndirectField = Init->getIndirectMember();
392 assert(IndirectField != nullptr);
393 MemberLoc = &ThisLoc;
394 for (const auto *I : IndirectField->chain()) {
395 Member = cast<FieldDecl>(I);
396 ParentLoc = cast<RecordStorageLocation>(MemberLoc);
397 MemberLoc = ParentLoc->getChild(*Member);
400 assert(Member != nullptr);
401 assert(MemberLoc != nullptr);
403 // FIXME: Instead of these case distinctions, we would ideally want to be able
404 // to simply use `Environment::createObject()` here, the same way that we do
405 // this in `TransferVisitor::VisitInitListExpr()`. However, this would require
406 // us to be able to build a list of fields that we then use to initialize an
407 // `RecordStorageLocation` -- and the problem is that, when we get here,
408 // the `RecordStorageLocation` already exists. We should explore if there's
409 // anything that we can do to change this.
410 if (Member->getType()->isReferenceType()) {
411 auto *InitExprLoc = Env.getStorageLocation(*InitExpr);
412 if (InitExprLoc == nullptr)
413 return;
415 ParentLoc->setChild(*Member, InitExprLoc);
416 } else if (auto *InitExprVal = Env.getValue(*InitExpr)) {
417 if (Member->getType()->isRecordType()) {
418 auto *InitValStruct = cast<RecordValue>(InitExprVal);
419 // FIXME: Rather than performing a copy here, we should really be
420 // initializing the field in place. This would require us to propagate the
421 // storage location of the field to the AST node that creates the
422 // `RecordValue`.
423 copyRecord(InitValStruct->getLoc(),
424 *cast<RecordStorageLocation>(MemberLoc), Env);
425 } else {
426 Env.setValue(*MemberLoc, *InitExprVal);
431 static void builtinTransfer(const CFGElement &Elt,
432 TypeErasedDataflowAnalysisState &State,
433 AnalysisContext &AC) {
434 switch (Elt.getKind()) {
435 case CFGElement::Statement:
436 builtinTransferStatement(Elt.castAs<CFGStmt>(), State, AC);
437 break;
438 case CFGElement::Initializer:
439 builtinTransferInitializer(Elt.castAs<CFGInitializer>(), State);
440 break;
441 case CFGElement::LifetimeEnds:
442 // Removing declarations when their lifetime ends serves two purposes:
443 // - Eliminate unnecessary clutter from `Environment::DeclToLoc`
444 // - Allow us to assert that, when joining two `Environment`s, the two
445 // `DeclToLoc` maps never contain entries that map the same declaration to
446 // different storage locations.
447 if (const ValueDecl *VD = Elt.castAs<CFGLifetimeEnds>().getVarDecl())
448 State.Env.removeDecl(*VD);
449 break;
450 default:
451 // FIXME: Evaluate other kinds of `CFGElement`
452 break;
456 /// Transfers `State` by evaluating each element in the `Block` based on the
457 /// `AC.Analysis` specified.
459 /// Built-in transfer functions (if the option for `ApplyBuiltinTransfer` is set
460 /// by the analysis) will be applied to the element before evaluation by the
461 /// user-specified analysis.
462 /// `PostVisitCFG` (if provided) will be applied to the element after evaluation
463 /// by the user-specified analysis.
464 static TypeErasedDataflowAnalysisState
465 transferCFGBlock(const CFGBlock &Block, AnalysisContext &AC,
466 std::function<void(const CFGElement &,
467 const TypeErasedDataflowAnalysisState &)>
468 PostVisitCFG = nullptr) {
469 AC.Log.enterBlock(Block, PostVisitCFG != nullptr);
470 auto State = computeBlockInputState(Block, AC);
471 AC.Log.recordState(State);
472 int ElementIdx = 1;
473 for (const auto &Element : Block) {
474 PrettyStackTraceCFGElement CrashInfo(Element, Block.getBlockID(),
475 ElementIdx++, "transferCFGBlock");
477 AC.Log.enterElement(Element);
478 // Built-in analysis
479 if (AC.Analysis.builtinOptions()) {
480 builtinTransfer(Element, State, AC);
483 // User-provided analysis
484 AC.Analysis.transferTypeErased(Element, State.Lattice, State.Env);
486 // Post processing
487 if (PostVisitCFG) {
488 PostVisitCFG(Element, State);
490 AC.Log.recordState(State);
492 return State;
495 llvm::Expected<std::vector<std::optional<TypeErasedDataflowAnalysisState>>>
496 runTypeErasedDataflowAnalysis(
497 const ControlFlowContext &CFCtx, TypeErasedDataflowAnalysis &Analysis,
498 const Environment &InitEnv,
499 std::function<void(const CFGElement &,
500 const TypeErasedDataflowAnalysisState &)>
501 PostVisitCFG) {
502 PrettyStackTraceAnalysis CrashInfo(CFCtx, "runTypeErasedDataflowAnalysis");
504 std::optional<Environment> MaybeStartingEnv;
505 if (InitEnv.callStackSize() == 1) {
506 MaybeStartingEnv = InitEnv.fork();
507 MaybeStartingEnv->initialize();
509 const Environment &StartingEnv =
510 MaybeStartingEnv ? *MaybeStartingEnv : InitEnv;
512 const clang::CFG &CFG = CFCtx.getCFG();
513 PostOrderCFGView POV(&CFG);
514 ForwardDataflowWorklist Worklist(CFG, &POV);
516 std::vector<std::optional<TypeErasedDataflowAnalysisState>> BlockStates(
517 CFG.size());
519 // The entry basic block doesn't contain statements so it can be skipped.
520 const CFGBlock &Entry = CFG.getEntry();
521 BlockStates[Entry.getBlockID()] = {Analysis.typeErasedInitialElement(),
522 StartingEnv.fork()};
523 Worklist.enqueueSuccessors(&Entry);
525 AnalysisContext AC(CFCtx, Analysis, StartingEnv, BlockStates);
527 // Bugs in lattices and transfer functions can prevent the analysis from
528 // converging. To limit the damage (infinite loops) that these bugs can cause,
529 // limit the number of iterations.
530 // FIXME: Consider making the maximum number of iterations configurable.
531 // FIXME: Consider restricting the number of backedges followed, rather than
532 // iterations.
533 // FIXME: Set up statistics (see llvm/ADT/Statistic.h) to count average number
534 // of iterations, number of functions that time out, etc.
535 static constexpr uint32_t MaxAverageVisitsPerBlock = 4;
536 static constexpr uint32_t AbsoluteMaxIterations = 1 << 16;
537 const uint32_t RelativeMaxIterations =
538 MaxAverageVisitsPerBlock * BlockStates.size();
539 const uint32_t MaxIterations =
540 std::min(RelativeMaxIterations, AbsoluteMaxIterations);
541 uint32_t Iterations = 0;
542 while (const CFGBlock *Block = Worklist.dequeue()) {
543 LLVM_DEBUG(llvm::dbgs()
544 << "Processing Block " << Block->getBlockID() << "\n");
545 if (++Iterations > MaxIterations) {
546 return llvm::createStringError(std::errc::timed_out,
547 "maximum number of iterations reached");
550 const std::optional<TypeErasedDataflowAnalysisState> &OldBlockState =
551 BlockStates[Block->getBlockID()];
552 TypeErasedDataflowAnalysisState NewBlockState =
553 transferCFGBlock(*Block, AC);
554 LLVM_DEBUG({
555 llvm::errs() << "New Env:\n";
556 NewBlockState.Env.dump();
559 if (OldBlockState) {
560 LLVM_DEBUG({
561 llvm::errs() << "Old Env:\n";
562 OldBlockState->Env.dump();
564 if (isBackedgeNode(*Block)) {
565 LatticeJoinEffect Effect1 = Analysis.widenTypeErased(
566 NewBlockState.Lattice, OldBlockState->Lattice);
567 LatticeJoinEffect Effect2 =
568 NewBlockState.Env.widen(OldBlockState->Env, Analysis);
569 if (Effect1 == LatticeJoinEffect::Unchanged &&
570 Effect2 == LatticeJoinEffect::Unchanged) {
571 // The state of `Block` didn't change from widening so there's no need
572 // to revisit its successors.
573 AC.Log.blockConverged();
574 continue;
576 } else if (Analysis.isEqualTypeErased(OldBlockState->Lattice,
577 NewBlockState.Lattice) &&
578 OldBlockState->Env.equivalentTo(NewBlockState.Env, Analysis)) {
579 // The state of `Block` didn't change after transfer so there's no need
580 // to revisit its successors.
581 AC.Log.blockConverged();
582 continue;
586 BlockStates[Block->getBlockID()] = std::move(NewBlockState);
588 // Do not add unreachable successor blocks to `Worklist`.
589 if (Block->hasNoReturnElement())
590 continue;
592 Worklist.enqueueSuccessors(Block);
594 // FIXME: Consider evaluating unreachable basic blocks (those that have a
595 // state set to `std::nullopt` at this point) to also analyze dead code.
597 if (PostVisitCFG) {
598 for (const CFGBlock *Block : CFCtx.getCFG()) {
599 // Skip blocks that were not evaluated.
600 if (!BlockStates[Block->getBlockID()])
601 continue;
602 transferCFGBlock(*Block, AC, PostVisitCFG);
606 return std::move(BlockStates);
609 } // namespace dataflow
610 } // namespace clang