1 //=== AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis ------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file defines analysis_warnings::[Policy,Executor].
10 // Together they are used by Sema to issue warnings based on inexpensive
11 // static analysis algorithms in libAnalysis.
13 //===----------------------------------------------------------------------===//
15 #include "clang/Sema/AnalysisBasedWarnings.h"
16 #include "clang/AST/Decl.h"
17 #include "clang/AST/DeclCXX.h"
18 #include "clang/AST/DeclObjC.h"
19 #include "clang/AST/DynamicRecursiveASTVisitor.h"
20 #include "clang/AST/EvaluatedExprVisitor.h"
21 #include "clang/AST/Expr.h"
22 #include "clang/AST/ExprCXX.h"
23 #include "clang/AST/ExprObjC.h"
24 #include "clang/AST/OperationKinds.h"
25 #include "clang/AST/ParentMap.h"
26 #include "clang/AST/StmtCXX.h"
27 #include "clang/AST/StmtObjC.h"
28 #include "clang/AST/Type.h"
29 #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
30 #include "clang/Analysis/Analyses/CalledOnceCheck.h"
31 #include "clang/Analysis/Analyses/Consumed.h"
32 #include "clang/Analysis/Analyses/ReachableCode.h"
33 #include "clang/Analysis/Analyses/ThreadSafety.h"
34 #include "clang/Analysis/Analyses/UninitializedValues.h"
35 #include "clang/Analysis/Analyses/UnsafeBufferUsage.h"
36 #include "clang/Analysis/AnalysisDeclContext.h"
37 #include "clang/Analysis/CFG.h"
38 #include "clang/Analysis/CFGStmtMap.h"
39 #include "clang/Basic/Diagnostic.h"
40 #include "clang/Basic/DiagnosticSema.h"
41 #include "clang/Basic/SourceLocation.h"
42 #include "clang/Basic/SourceManager.h"
43 #include "clang/Lex/Preprocessor.h"
44 #include "clang/Sema/ScopeInfo.h"
45 #include "clang/Sema/SemaInternal.h"
46 #include "llvm/ADT/ArrayRef.h"
47 #include "llvm/ADT/BitVector.h"
48 #include "llvm/ADT/MapVector.h"
49 #include "llvm/ADT/STLFunctionalExtras.h"
50 #include "llvm/ADT/SmallVector.h"
51 #include "llvm/ADT/StringRef.h"
57 using namespace clang
;
59 //===----------------------------------------------------------------------===//
60 // Unreachable code analysis.
61 //===----------------------------------------------------------------------===//
64 class UnreachableCodeHandler
: public reachable_code::Callback
{
66 SourceRange PreviousSilenceableCondVal
;
69 UnreachableCodeHandler(Sema
&s
) : S(s
) {}
71 void HandleUnreachable(reachable_code::UnreachableKind UK
, SourceLocation L
,
72 SourceRange SilenceableCondVal
, SourceRange R1
,
73 SourceRange R2
, bool HasFallThroughAttr
) override
{
74 // If the diagnosed code is `[[fallthrough]];` and
75 // `-Wunreachable-code-fallthrough` is enabled, suppress `code will never
76 // be executed` warning to avoid generating diagnostic twice
77 if (HasFallThroughAttr
&&
78 !S
.getDiagnostics().isIgnored(diag::warn_unreachable_fallthrough_attr
,
82 // Avoid reporting multiple unreachable code diagnostics that are
83 // triggered by the same conditional value.
84 if (PreviousSilenceableCondVal
.isValid() &&
85 SilenceableCondVal
.isValid() &&
86 PreviousSilenceableCondVal
== SilenceableCondVal
)
88 PreviousSilenceableCondVal
= SilenceableCondVal
;
90 unsigned diag
= diag::warn_unreachable
;
92 case reachable_code::UK_Break
:
93 diag
= diag::warn_unreachable_break
;
95 case reachable_code::UK_Return
:
96 diag
= diag::warn_unreachable_return
;
98 case reachable_code::UK_Loop_Increment
:
99 diag
= diag::warn_unreachable_loop_increment
;
101 case reachable_code::UK_Other
:
105 S
.Diag(L
, diag
) << R1
<< R2
;
107 SourceLocation Open
= SilenceableCondVal
.getBegin();
108 if (Open
.isValid()) {
109 SourceLocation Close
= SilenceableCondVal
.getEnd();
110 Close
= S
.getLocForEndOfToken(Close
);
111 if (Close
.isValid()) {
112 S
.Diag(Open
, diag::note_unreachable_silence
)
113 << FixItHint::CreateInsertion(Open
, "/* DISABLES CODE */ (")
114 << FixItHint::CreateInsertion(Close
, ")");
119 } // anonymous namespace
121 /// CheckUnreachable - Check for unreachable code.
122 static void CheckUnreachable(Sema
&S
, AnalysisDeclContext
&AC
) {
123 // As a heuristic prune all diagnostics not in the main file. Currently
124 // the majority of warnings in headers are false positives. These
125 // are largely caused by configuration state, e.g. preprocessor
126 // defined code, etc.
128 // Note that this is also a performance optimization. Analyzing
129 // headers many times can be expensive.
130 if (!S
.getSourceManager().isInMainFile(AC
.getDecl()->getBeginLoc()))
133 UnreachableCodeHandler
UC(S
);
134 reachable_code::FindUnreachableCode(AC
, S
.getPreprocessor(), UC
);
138 /// Warn on logical operator errors in CFGBuilder
139 class LogicalErrorHandler
: public CFGCallback
{
143 LogicalErrorHandler(Sema
&S
) : S(S
) {}
145 static bool HasMacroID(const Expr
*E
) {
146 if (E
->getExprLoc().isMacroID())
149 // Recurse to children.
150 for (const Stmt
*SubStmt
: E
->children())
151 if (const Expr
*SubExpr
= dyn_cast_or_null
<Expr
>(SubStmt
))
152 if (HasMacroID(SubExpr
))
158 void logicAlwaysTrue(const BinaryOperator
*B
, bool isAlwaysTrue
) override
{
162 unsigned DiagID
= isAlwaysTrue
163 ? diag::warn_tautological_negation_or_compare
164 : diag::warn_tautological_negation_and_compare
;
165 SourceRange DiagRange
= B
->getSourceRange();
166 S
.Diag(B
->getExprLoc(), DiagID
) << DiagRange
;
169 void compareAlwaysTrue(const BinaryOperator
*B
, bool isAlwaysTrue
) override
{
173 SourceRange DiagRange
= B
->getSourceRange();
174 S
.Diag(B
->getExprLoc(), diag::warn_tautological_overlap_comparison
)
175 << DiagRange
<< isAlwaysTrue
;
178 void compareBitwiseEquality(const BinaryOperator
*B
,
179 bool isAlwaysTrue
) override
{
183 SourceRange DiagRange
= B
->getSourceRange();
184 S
.Diag(B
->getExprLoc(), diag::warn_comparison_bitwise_always
)
185 << DiagRange
<< isAlwaysTrue
;
188 void compareBitwiseOr(const BinaryOperator
*B
) override
{
192 SourceRange DiagRange
= B
->getSourceRange();
193 S
.Diag(B
->getExprLoc(), diag::warn_comparison_bitwise_or
) << DiagRange
;
196 static bool hasActiveDiagnostics(DiagnosticsEngine
&Diags
,
197 SourceLocation Loc
) {
198 return !Diags
.isIgnored(diag::warn_tautological_overlap_comparison
, Loc
) ||
199 !Diags
.isIgnored(diag::warn_comparison_bitwise_or
, Loc
) ||
200 !Diags
.isIgnored(diag::warn_tautological_negation_and_compare
, Loc
);
203 } // anonymous namespace
205 //===----------------------------------------------------------------------===//
206 // Check for infinite self-recursion in functions
207 //===----------------------------------------------------------------------===//
209 // Returns true if the function is called anywhere within the CFGBlock.
210 // For member functions, the additional condition of being call from the
211 // this pointer is required.
212 static bool hasRecursiveCallInPath(const FunctionDecl
*FD
, CFGBlock
&Block
) {
213 // Process all the Stmt's in this block to find any calls to FD.
214 for (const auto &B
: Block
) {
215 if (B
.getKind() != CFGElement::Statement
)
218 const CallExpr
*CE
= dyn_cast
<CallExpr
>(B
.getAs
<CFGStmt
>()->getStmt());
219 if (!CE
|| !CE
->getCalleeDecl() ||
220 CE
->getCalleeDecl()->getCanonicalDecl() != FD
)
223 // Skip function calls which are qualified with a templated class.
224 if (const DeclRefExpr
*DRE
=
225 dyn_cast
<DeclRefExpr
>(CE
->getCallee()->IgnoreParenImpCasts())) {
226 if (NestedNameSpecifier
*NNS
= DRE
->getQualifier()) {
227 if (NNS
->getKind() == NestedNameSpecifier::TypeSpec
&&
228 isa
<TemplateSpecializationType
>(NNS
->getAsType())) {
234 const CXXMemberCallExpr
*MCE
= dyn_cast
<CXXMemberCallExpr
>(CE
);
235 if (!MCE
|| isa
<CXXThisExpr
>(MCE
->getImplicitObjectArgument()) ||
236 !MCE
->getMethodDecl()->isVirtual())
242 // Returns true if every path from the entry block passes through a call to FD.
243 static bool checkForRecursiveFunctionCall(const FunctionDecl
*FD
, CFG
*cfg
) {
244 llvm::SmallPtrSet
<CFGBlock
*, 16> Visited
;
245 llvm::SmallVector
<CFGBlock
*, 16> WorkList
;
246 // Keep track of whether we found at least one recursive path.
247 bool foundRecursion
= false;
249 const unsigned ExitID
= cfg
->getExit().getBlockID();
251 // Seed the work list with the entry block.
252 WorkList
.push_back(&cfg
->getEntry());
254 while (!WorkList
.empty()) {
255 CFGBlock
*Block
= WorkList
.pop_back_val();
257 for (auto I
= Block
->succ_begin(), E
= Block
->succ_end(); I
!= E
; ++I
) {
258 if (CFGBlock
*SuccBlock
= *I
) {
259 if (!Visited
.insert(SuccBlock
).second
)
262 // Found a path to the exit node without a recursive call.
263 if (ExitID
== SuccBlock
->getBlockID())
266 // If the successor block contains a recursive call, end analysis there.
267 if (hasRecursiveCallInPath(FD
, *SuccBlock
)) {
268 foundRecursion
= true;
272 WorkList
.push_back(SuccBlock
);
276 return foundRecursion
;
279 static void checkRecursiveFunction(Sema
&S
, const FunctionDecl
*FD
,
280 const Stmt
*Body
, AnalysisDeclContext
&AC
) {
281 FD
= FD
->getCanonicalDecl();
283 // Only run on non-templated functions and non-templated members of
284 // templated classes.
285 if (FD
->getTemplatedKind() != FunctionDecl::TK_NonTemplate
&&
286 FD
->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization
)
289 CFG
*cfg
= AC
.getCFG();
292 // If the exit block is unreachable, skip processing the function.
293 if (cfg
->getExit().pred_empty())
296 // Emit diagnostic if a recursive function call is detected for all paths.
297 if (checkForRecursiveFunctionCall(FD
, cfg
))
298 S
.Diag(Body
->getBeginLoc(), diag::warn_infinite_recursive_function
);
301 //===----------------------------------------------------------------------===//
302 // Check for throw in a non-throwing function.
303 //===----------------------------------------------------------------------===//
305 /// Determine whether an exception thrown by E, unwinding from ThrowBlock,
306 /// can reach ExitBlock.
307 static bool throwEscapes(Sema
&S
, const CXXThrowExpr
*E
, CFGBlock
&ThrowBlock
,
309 SmallVector
<CFGBlock
*, 16> Stack
;
310 llvm::BitVector
Queued(Body
->getNumBlockIDs());
312 Stack
.push_back(&ThrowBlock
);
313 Queued
[ThrowBlock
.getBlockID()] = true;
315 while (!Stack
.empty()) {
316 CFGBlock
&UnwindBlock
= *Stack
.back();
319 for (auto &Succ
: UnwindBlock
.succs()) {
320 if (!Succ
.isReachable() || Queued
[Succ
->getBlockID()])
323 if (Succ
->getBlockID() == Body
->getExit().getBlockID())
327 dyn_cast_or_null
<CXXCatchStmt
>(Succ
->getLabel())) {
328 QualType Caught
= Catch
->getCaughtType();
329 if (Caught
.isNull() || // catch (...) catches everything
330 !E
->getSubExpr() || // throw; is considered cuaght by any handler
331 S
.handlerCanCatch(Caught
, E
->getSubExpr()->getType()))
332 // Exception doesn't escape via this path.
335 Stack
.push_back(Succ
);
336 Queued
[Succ
->getBlockID()] = true;
344 static void visitReachableThrows(
346 llvm::function_ref
<void(const CXXThrowExpr
*, CFGBlock
&)> Visit
) {
347 llvm::BitVector
Reachable(BodyCFG
->getNumBlockIDs());
348 clang::reachable_code::ScanReachableFromBlock(&BodyCFG
->getEntry(), Reachable
);
349 for (CFGBlock
*B
: *BodyCFG
) {
350 if (!Reachable
[B
->getBlockID()])
352 for (CFGElement
&E
: *B
) {
353 std::optional
<CFGStmt
> S
= E
.getAs
<CFGStmt
>();
356 if (auto *Throw
= dyn_cast
<CXXThrowExpr
>(S
->getStmt()))
362 static void EmitDiagForCXXThrowInNonThrowingFunc(Sema
&S
, SourceLocation OpLoc
,
363 const FunctionDecl
*FD
) {
364 if (!S
.getSourceManager().isInSystemHeader(OpLoc
) &&
365 FD
->getTypeSourceInfo()) {
366 S
.Diag(OpLoc
, diag::warn_throw_in_noexcept_func
) << FD
;
367 if (S
.getLangOpts().CPlusPlus11
&&
368 (isa
<CXXDestructorDecl
>(FD
) ||
369 FD
->getDeclName().getCXXOverloadedOperator() == OO_Delete
||
370 FD
->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete
)) {
371 if (const auto *Ty
= FD
->getTypeSourceInfo()->getType()->
372 getAs
<FunctionProtoType
>())
373 S
.Diag(FD
->getLocation(), diag::note_throw_in_dtor
)
374 << !isa
<CXXDestructorDecl
>(FD
) << !Ty
->hasExceptionSpec()
375 << FD
->getExceptionSpecSourceRange();
377 S
.Diag(FD
->getLocation(), diag::note_throw_in_function
)
378 << FD
->getExceptionSpecSourceRange();
382 static void checkThrowInNonThrowingFunc(Sema
&S
, const FunctionDecl
*FD
,
383 AnalysisDeclContext
&AC
) {
384 CFG
*BodyCFG
= AC
.getCFG();
387 if (BodyCFG
->getExit().pred_empty())
389 visitReachableThrows(BodyCFG
, [&](const CXXThrowExpr
*Throw
, CFGBlock
&Block
) {
390 if (throwEscapes(S
, Throw
, Block
, BodyCFG
))
391 EmitDiagForCXXThrowInNonThrowingFunc(S
, Throw
->getThrowLoc(), FD
);
395 static bool isNoexcept(const FunctionDecl
*FD
) {
396 const auto *FPT
= FD
->getType()->castAs
<FunctionProtoType
>();
397 if (FPT
->isNothrow() || FD
->hasAttr
<NoThrowAttr
>())
402 //===----------------------------------------------------------------------===//
403 // Check for missing return value.
404 //===----------------------------------------------------------------------===//
406 enum ControlFlowKind
{
411 NeverFallThroughOrReturn
414 /// CheckFallThrough - Check that we don't fall off the end of a
415 /// Statement that should return a value.
417 /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
418 /// MaybeFallThrough iff we might or might not fall off the end,
419 /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
420 /// return. We assume NeverFallThrough iff we never fall off the end of the
421 /// statement but we may return. We assume that functions not marked noreturn
423 static ControlFlowKind
CheckFallThrough(AnalysisDeclContext
&AC
) {
424 CFG
*cfg
= AC
.getCFG();
425 if (!cfg
) return UnknownFallThrough
;
427 // The CFG leaves in dead things, and we don't want the dead code paths to
428 // confuse us, so we mark all live things first.
429 llvm::BitVector
live(cfg
->getNumBlockIDs());
430 unsigned count
= reachable_code::ScanReachableFromBlock(&cfg
->getEntry(),
433 bool AddEHEdges
= AC
.getAddEHEdges();
434 if (!AddEHEdges
&& count
!= cfg
->getNumBlockIDs())
435 // When there are things remaining dead, and we didn't add EH edges
436 // from CallExprs to the catch clauses, we have to go back and
437 // mark them as live.
438 for (const auto *B
: *cfg
) {
439 if (!live
[B
->getBlockID()]) {
440 if (B
->pred_begin() == B
->pred_end()) {
441 const Stmt
*Term
= B
->getTerminatorStmt();
442 if (isa_and_nonnull
<CXXTryStmt
>(Term
))
443 // When not adding EH edges from calls, catch clauses
444 // can otherwise seem dead. Avoid noting them as dead.
445 count
+= reachable_code::ScanReachableFromBlock(B
, live
);
451 // Now we know what is live, we check the live precessors of the exit block
452 // and look for fall through paths, being careful to ignore normal returns,
453 // and exceptional paths.
454 bool HasLiveReturn
= false;
455 bool HasFakeEdge
= false;
456 bool HasPlainEdge
= false;
457 bool HasAbnormalEdge
= false;
459 // Ignore default cases that aren't likely to be reachable because all
460 // enums in a switch(X) have explicit case statements.
461 CFGBlock::FilterOptions FO
;
462 FO
.IgnoreDefaultsWithCoveredEnums
= 1;
464 for (CFGBlock::filtered_pred_iterator I
=
465 cfg
->getExit().filtered_pred_start_end(FO
);
467 const CFGBlock
&B
= **I
;
468 if (!live
[B
.getBlockID()])
471 // Skip blocks which contain an element marked as no-return. They don't
472 // represent actually viable edges into the exit block, so mark them as
474 if (B
.hasNoReturnElement()) {
475 HasAbnormalEdge
= true;
479 // Destructors can appear after the 'return' in the CFG. This is
480 // normal. We need to look pass the destructors for the return
481 // statement (if it exists).
482 CFGBlock::const_reverse_iterator ri
= B
.rbegin(), re
= B
.rend();
484 for ( ; ri
!= re
; ++ri
)
485 if (ri
->getAs
<CFGStmt
>())
488 // No more CFGElements in the block?
490 const Stmt
*Term
= B
.getTerminatorStmt();
491 if (Term
&& (isa
<CXXTryStmt
>(Term
) || isa
<ObjCAtTryStmt
>(Term
))) {
492 HasAbnormalEdge
= true;
495 // A labeled empty statement, or the entry block...
500 CFGStmt CS
= ri
->castAs
<CFGStmt
>();
501 const Stmt
*S
= CS
.getStmt();
502 if (isa
<ReturnStmt
>(S
) || isa
<CoreturnStmt
>(S
)) {
503 HasLiveReturn
= true;
506 if (isa
<ObjCAtThrowStmt
>(S
)) {
510 if (isa
<CXXThrowExpr
>(S
)) {
514 if (isa
<MSAsmStmt
>(S
)) {
515 // TODO: Verify this is correct.
517 HasLiveReturn
= true;
520 if (isa
<CXXTryStmt
>(S
)) {
521 HasAbnormalEdge
= true;
524 if (!llvm::is_contained(B
.succs(), &cfg
->getExit())) {
525 HasAbnormalEdge
= true;
533 return NeverFallThrough
;
534 return NeverFallThroughOrReturn
;
536 if (HasAbnormalEdge
|| HasFakeEdge
|| HasLiveReturn
)
537 return MaybeFallThrough
;
538 // This says AlwaysFallThrough for calls to functions that are not marked
539 // noreturn, that don't return. If people would like this warning to be more
540 // accurate, such functions should be marked as noreturn.
541 return AlwaysFallThrough
;
546 struct CheckFallThroughDiagnostics
{
547 unsigned diag_MaybeFallThrough_HasNoReturn
;
548 unsigned diag_MaybeFallThrough_ReturnsNonVoid
;
549 unsigned diag_AlwaysFallThrough_HasNoReturn
;
550 unsigned diag_AlwaysFallThrough_ReturnsNonVoid
;
551 unsigned diag_NeverFallThroughOrReturn
;
552 enum { Function
, Block
, Lambda
, Coroutine
} funMode
;
553 SourceLocation FuncLoc
;
555 static CheckFallThroughDiagnostics
MakeForFunction(const Decl
*Func
) {
556 CheckFallThroughDiagnostics D
;
557 D
.FuncLoc
= Func
->getLocation();
558 D
.diag_MaybeFallThrough_HasNoReturn
=
559 diag::warn_falloff_noreturn_function
;
560 D
.diag_MaybeFallThrough_ReturnsNonVoid
=
561 diag::warn_maybe_falloff_nonvoid_function
;
562 D
.diag_AlwaysFallThrough_HasNoReturn
=
563 diag::warn_falloff_noreturn_function
;
564 D
.diag_AlwaysFallThrough_ReturnsNonVoid
=
565 diag::warn_falloff_nonvoid_function
;
567 // Don't suggest that virtual functions be marked "noreturn", since they
568 // might be overridden by non-noreturn functions.
569 bool isVirtualMethod
= false;
570 if (const CXXMethodDecl
*Method
= dyn_cast
<CXXMethodDecl
>(Func
))
571 isVirtualMethod
= Method
->isVirtual();
573 // Don't suggest that template instantiations be marked "noreturn"
574 bool isTemplateInstantiation
= false;
575 if (const FunctionDecl
*Function
= dyn_cast
<FunctionDecl
>(Func
))
576 isTemplateInstantiation
= Function
->isTemplateInstantiation();
578 if (!isVirtualMethod
&& !isTemplateInstantiation
)
579 D
.diag_NeverFallThroughOrReturn
=
580 diag::warn_suggest_noreturn_function
;
582 D
.diag_NeverFallThroughOrReturn
= 0;
584 D
.funMode
= Function
;
588 static CheckFallThroughDiagnostics
MakeForCoroutine(const Decl
*Func
) {
589 CheckFallThroughDiagnostics D
;
590 D
.FuncLoc
= Func
->getLocation();
591 D
.diag_MaybeFallThrough_HasNoReturn
= 0;
592 D
.diag_MaybeFallThrough_ReturnsNonVoid
=
593 diag::warn_maybe_falloff_nonvoid_coroutine
;
594 D
.diag_AlwaysFallThrough_HasNoReturn
= 0;
595 D
.diag_AlwaysFallThrough_ReturnsNonVoid
=
596 diag::warn_falloff_nonvoid_coroutine
;
597 D
.diag_NeverFallThroughOrReturn
= 0;
598 D
.funMode
= Coroutine
;
602 static CheckFallThroughDiagnostics
MakeForBlock() {
603 CheckFallThroughDiagnostics D
;
604 D
.diag_MaybeFallThrough_HasNoReturn
=
605 diag::err_noreturn_block_has_return_expr
;
606 D
.diag_MaybeFallThrough_ReturnsNonVoid
=
607 diag::err_maybe_falloff_nonvoid_block
;
608 D
.diag_AlwaysFallThrough_HasNoReturn
=
609 diag::err_noreturn_block_has_return_expr
;
610 D
.diag_AlwaysFallThrough_ReturnsNonVoid
=
611 diag::err_falloff_nonvoid_block
;
612 D
.diag_NeverFallThroughOrReturn
= 0;
617 static CheckFallThroughDiagnostics
MakeForLambda() {
618 CheckFallThroughDiagnostics D
;
619 D
.diag_MaybeFallThrough_HasNoReturn
=
620 diag::err_noreturn_lambda_has_return_expr
;
621 D
.diag_MaybeFallThrough_ReturnsNonVoid
=
622 diag::warn_maybe_falloff_nonvoid_lambda
;
623 D
.diag_AlwaysFallThrough_HasNoReturn
=
624 diag::err_noreturn_lambda_has_return_expr
;
625 D
.diag_AlwaysFallThrough_ReturnsNonVoid
=
626 diag::warn_falloff_nonvoid_lambda
;
627 D
.diag_NeverFallThroughOrReturn
= 0;
632 bool checkDiagnostics(DiagnosticsEngine
&D
, bool ReturnsVoid
,
633 bool HasNoReturn
) const {
634 if (funMode
== Function
) {
635 return (ReturnsVoid
||
636 D
.isIgnored(diag::warn_maybe_falloff_nonvoid_function
,
639 D
.isIgnored(diag::warn_noreturn_function_has_return_expr
,
642 D
.isIgnored(diag::warn_suggest_noreturn_block
, FuncLoc
));
644 if (funMode
== Coroutine
) {
645 return (ReturnsVoid
||
646 D
.isIgnored(diag::warn_maybe_falloff_nonvoid_function
, FuncLoc
) ||
647 D
.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine
,
651 // For blocks / lambdas.
652 return ReturnsVoid
&& !HasNoReturn
;
656 } // anonymous namespace
658 /// CheckFallThroughForBody - Check that we don't fall off the end of a
659 /// function that should return a value. Check that we don't fall off the end
660 /// of a noreturn function. We assume that functions and blocks not marked
661 /// noreturn will return.
662 static void CheckFallThroughForBody(Sema
&S
, const Decl
*D
, const Stmt
*Body
,
664 const CheckFallThroughDiagnostics
&CD
,
665 AnalysisDeclContext
&AC
,
666 sema::FunctionScopeInfo
*FSI
) {
668 bool ReturnsVoid
= false;
669 bool HasNoReturn
= false;
670 bool IsCoroutine
= FSI
->isCoroutine();
672 if (const auto *FD
= dyn_cast
<FunctionDecl
>(D
)) {
673 if (const auto *CBody
= dyn_cast
<CoroutineBodyStmt
>(Body
))
674 ReturnsVoid
= CBody
->getFallthroughHandler() != nullptr;
676 ReturnsVoid
= FD
->getReturnType()->isVoidType();
677 HasNoReturn
= FD
->isNoReturn();
679 else if (const auto *MD
= dyn_cast
<ObjCMethodDecl
>(D
)) {
680 ReturnsVoid
= MD
->getReturnType()->isVoidType();
681 HasNoReturn
= MD
->hasAttr
<NoReturnAttr
>();
683 else if (isa
<BlockDecl
>(D
)) {
684 if (const FunctionType
*FT
=
685 BlockType
->getPointeeType()->getAs
<FunctionType
>()) {
686 if (FT
->getReturnType()->isVoidType())
688 if (FT
->getNoReturnAttr())
693 DiagnosticsEngine
&Diags
= S
.getDiagnostics();
695 // Short circuit for compilation speed.
696 if (CD
.checkDiagnostics(Diags
, ReturnsVoid
, HasNoReturn
))
698 SourceLocation LBrace
= Body
->getBeginLoc(), RBrace
= Body
->getEndLoc();
699 auto EmitDiag
= [&](SourceLocation Loc
, unsigned DiagID
) {
701 S
.Diag(Loc
, DiagID
) << FSI
->CoroutinePromise
->getType();
706 // cpu_dispatch functions permit empty function bodies for ICC compatibility.
707 if (D
->getAsFunction() && D
->getAsFunction()->isCPUDispatchMultiVersion())
710 // Either in a function body compound statement, or a function-try-block.
711 switch (CheckFallThrough(AC
)) {
712 case UnknownFallThrough
:
715 case MaybeFallThrough
:
717 EmitDiag(RBrace
, CD
.diag_MaybeFallThrough_HasNoReturn
);
718 else if (!ReturnsVoid
)
719 EmitDiag(RBrace
, CD
.diag_MaybeFallThrough_ReturnsNonVoid
);
721 case AlwaysFallThrough
:
723 EmitDiag(RBrace
, CD
.diag_AlwaysFallThrough_HasNoReturn
);
724 else if (!ReturnsVoid
)
725 EmitDiag(RBrace
, CD
.diag_AlwaysFallThrough_ReturnsNonVoid
);
727 case NeverFallThroughOrReturn
:
728 if (ReturnsVoid
&& !HasNoReturn
&& CD
.diag_NeverFallThroughOrReturn
) {
729 if (const FunctionDecl
*FD
= dyn_cast
<FunctionDecl
>(D
)) {
730 S
.Diag(LBrace
, CD
.diag_NeverFallThroughOrReturn
) << 0 << FD
;
731 } else if (const ObjCMethodDecl
*MD
= dyn_cast
<ObjCMethodDecl
>(D
)) {
732 S
.Diag(LBrace
, CD
.diag_NeverFallThroughOrReturn
) << 1 << MD
;
734 S
.Diag(LBrace
, CD
.diag_NeverFallThroughOrReturn
);
738 case NeverFallThrough
:
743 //===----------------------------------------------------------------------===//
745 //===----------------------------------------------------------------------===//
748 /// ContainsReference - A visitor class to search for references to
749 /// a particular declaration (the needle) within any evaluated component of an
750 /// expression (recursively).
751 class ContainsReference
: public ConstEvaluatedExprVisitor
<ContainsReference
> {
753 const DeclRefExpr
*Needle
;
756 typedef ConstEvaluatedExprVisitor
<ContainsReference
> Inherited
;
758 ContainsReference(ASTContext
&Context
, const DeclRefExpr
*Needle
)
759 : Inherited(Context
), FoundReference(false), Needle(Needle
) {}
761 void VisitExpr(const Expr
*E
) {
762 // Stop evaluating if we already have a reference.
766 Inherited::VisitExpr(E
);
769 void VisitDeclRefExpr(const DeclRefExpr
*E
) {
771 FoundReference
= true;
773 Inherited::VisitDeclRefExpr(E
);
776 bool doesContainReference() const { return FoundReference
; }
778 } // anonymous namespace
780 static bool SuggestInitializationFixit(Sema
&S
, const VarDecl
*VD
) {
781 QualType VariableTy
= VD
->getType().getCanonicalType();
782 if (VariableTy
->isBlockPointerType() &&
783 !VD
->hasAttr
<BlocksAttr
>()) {
784 S
.Diag(VD
->getLocation(), diag::note_block_var_fixit_add_initialization
)
786 << FixItHint::CreateInsertion(VD
->getLocation(), "__block ");
790 // Don't issue a fixit if there is already an initializer.
794 // Don't suggest a fixit inside macros.
795 if (VD
->getEndLoc().isMacroID())
798 SourceLocation Loc
= S
.getLocForEndOfToken(VD
->getEndLoc());
800 // Suggest possible initialization (if any).
801 std::string Init
= S
.getFixItZeroInitializerForType(VariableTy
, Loc
);
805 S
.Diag(Loc
, diag::note_var_fixit_add_initialization
) << VD
->getDeclName()
806 << FixItHint::CreateInsertion(Loc
, Init
);
810 /// Create a fixit to remove an if-like statement, on the assumption that its
811 /// condition is CondVal.
812 static void CreateIfFixit(Sema
&S
, const Stmt
*If
, const Stmt
*Then
,
813 const Stmt
*Else
, bool CondVal
,
814 FixItHint
&Fixit1
, FixItHint
&Fixit2
) {
816 // If condition is always true, remove all but the 'then'.
817 Fixit1
= FixItHint::CreateRemoval(
818 CharSourceRange::getCharRange(If
->getBeginLoc(), Then
->getBeginLoc()));
820 SourceLocation ElseKwLoc
= S
.getLocForEndOfToken(Then
->getEndLoc());
822 FixItHint::CreateRemoval(SourceRange(ElseKwLoc
, Else
->getEndLoc()));
825 // If condition is always false, remove all but the 'else'.
827 Fixit1
= FixItHint::CreateRemoval(CharSourceRange::getCharRange(
828 If
->getBeginLoc(), Else
->getBeginLoc()));
830 Fixit1
= FixItHint::CreateRemoval(If
->getSourceRange());
834 /// DiagUninitUse -- Helper function to produce a diagnostic for an
835 /// uninitialized use of a variable.
836 static void DiagUninitUse(Sema
&S
, const VarDecl
*VD
, const UninitUse
&Use
,
837 bool IsCapturedByBlock
) {
838 bool Diagnosed
= false;
840 switch (Use
.getKind()) {
841 case UninitUse::Always
:
842 S
.Diag(Use
.getUser()->getBeginLoc(), diag::warn_uninit_var
)
843 << VD
->getDeclName() << IsCapturedByBlock
844 << Use
.getUser()->getSourceRange();
847 case UninitUse::AfterDecl
:
848 case UninitUse::AfterCall
:
849 S
.Diag(VD
->getLocation(), diag::warn_sometimes_uninit_var
)
850 << VD
->getDeclName() << IsCapturedByBlock
851 << (Use
.getKind() == UninitUse::AfterDecl
? 4 : 5)
852 << const_cast<DeclContext
*>(VD
->getLexicalDeclContext())
853 << VD
->getSourceRange();
854 S
.Diag(Use
.getUser()->getBeginLoc(), diag::note_uninit_var_use
)
855 << IsCapturedByBlock
<< Use
.getUser()->getSourceRange();
858 case UninitUse::Maybe
:
859 case UninitUse::Sometimes
:
860 // Carry on to report sometimes-uninitialized branches, if possible,
861 // or a 'may be used uninitialized' diagnostic otherwise.
865 // Diagnose each branch which leads to a sometimes-uninitialized use.
866 for (UninitUse::branch_iterator I
= Use
.branch_begin(), E
= Use
.branch_end();
868 assert(Use
.getKind() == UninitUse::Sometimes
);
870 const Expr
*User
= Use
.getUser();
871 const Stmt
*Term
= I
->Terminator
;
873 // Information used when building the diagnostic.
878 // FixIts to suppress the diagnostic by removing the dead condition.
879 // For all binary terminators, branch 0 is taken if the condition is true,
880 // and branch 1 is taken if the condition is false.
881 int RemoveDiagKind
= -1;
882 const char *FixitStr
=
883 S
.getLangOpts().CPlusPlus
? (I
->Output
? "true" : "false")
884 : (I
->Output
? "1" : "0");
885 FixItHint Fixit1
, Fixit2
;
887 switch (Term
? Term
->getStmtClass() : Stmt::DeclStmtClass
) {
889 // Don't know how to report this. Just fall back to 'may be used
890 // uninitialized'. FIXME: Can this happen?
893 // "condition is true / condition is false".
894 case Stmt::IfStmtClass
: {
895 const IfStmt
*IS
= cast
<IfStmt
>(Term
);
898 Range
= IS
->getCond()->getSourceRange();
900 CreateIfFixit(S
, IS
, IS
->getThen(), IS
->getElse(),
901 I
->Output
, Fixit1
, Fixit2
);
904 case Stmt::ConditionalOperatorClass
: {
905 const ConditionalOperator
*CO
= cast
<ConditionalOperator
>(Term
);
908 Range
= CO
->getCond()->getSourceRange();
910 CreateIfFixit(S
, CO
, CO
->getTrueExpr(), CO
->getFalseExpr(),
911 I
->Output
, Fixit1
, Fixit2
);
914 case Stmt::BinaryOperatorClass
: {
915 const BinaryOperator
*BO
= cast
<BinaryOperator
>(Term
);
916 if (!BO
->isLogicalOp())
919 Str
= BO
->getOpcodeStr();
920 Range
= BO
->getLHS()->getSourceRange();
922 if ((BO
->getOpcode() == BO_LAnd
&& I
->Output
) ||
923 (BO
->getOpcode() == BO_LOr
&& !I
->Output
))
924 // true && y -> y, false || y -> y.
925 Fixit1
= FixItHint::CreateRemoval(
926 SourceRange(BO
->getBeginLoc(), BO
->getOperatorLoc()));
928 // false && y -> false, true || y -> true.
929 Fixit1
= FixItHint::CreateReplacement(BO
->getSourceRange(), FixitStr
);
933 // "loop is entered / loop is exited".
934 case Stmt::WhileStmtClass
:
937 Range
= cast
<WhileStmt
>(Term
)->getCond()->getSourceRange();
939 Fixit1
= FixItHint::CreateReplacement(Range
, FixitStr
);
941 case Stmt::ForStmtClass
:
944 Range
= cast
<ForStmt
>(Term
)->getCond()->getSourceRange();
947 Fixit1
= FixItHint::CreateRemoval(Range
);
949 Fixit1
= FixItHint::CreateReplacement(Range
, FixitStr
);
951 case Stmt::CXXForRangeStmtClass
:
952 if (I
->Output
== 1) {
953 // The use occurs if a range-based for loop's body never executes.
954 // That may be impossible, and there's no syntactic fix for this,
955 // so treat it as a 'may be uninitialized' case.
960 Range
= cast
<CXXForRangeStmt
>(Term
)->getRangeInit()->getSourceRange();
963 // "condition is true / loop is exited".
964 case Stmt::DoStmtClass
:
967 Range
= cast
<DoStmt
>(Term
)->getCond()->getSourceRange();
969 Fixit1
= FixItHint::CreateReplacement(Range
, FixitStr
);
972 // "switch case is taken".
973 case Stmt::CaseStmtClass
:
976 Range
= cast
<CaseStmt
>(Term
)->getLHS()->getSourceRange();
978 case Stmt::DefaultStmtClass
:
981 Range
= cast
<DefaultStmt
>(Term
)->getDefaultLoc();
985 S
.Diag(Range
.getBegin(), diag::warn_sometimes_uninit_var
)
986 << VD
->getDeclName() << IsCapturedByBlock
<< DiagKind
987 << Str
<< I
->Output
<< Range
;
988 S
.Diag(User
->getBeginLoc(), diag::note_uninit_var_use
)
989 << IsCapturedByBlock
<< User
->getSourceRange();
990 if (RemoveDiagKind
!= -1)
991 S
.Diag(Fixit1
.RemoveRange
.getBegin(), diag::note_uninit_fixit_remove_cond
)
992 << RemoveDiagKind
<< Str
<< I
->Output
<< Fixit1
<< Fixit2
;
998 S
.Diag(Use
.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var
)
999 << VD
->getDeclName() << IsCapturedByBlock
1000 << Use
.getUser()->getSourceRange();
1003 /// Diagnose uninitialized const reference usages.
1004 static bool DiagnoseUninitializedConstRefUse(Sema
&S
, const VarDecl
*VD
,
1005 const UninitUse
&Use
) {
1006 S
.Diag(Use
.getUser()->getBeginLoc(), diag::warn_uninit_const_reference
)
1007 << VD
->getDeclName() << Use
.getUser()->getSourceRange();
1011 /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
1012 /// uninitialized variable. This manages the different forms of diagnostic
1013 /// emitted for particular types of uses. Returns true if the use was diagnosed
1014 /// as a warning. If a particular use is one we omit warnings for, returns
1016 static bool DiagnoseUninitializedUse(Sema
&S
, const VarDecl
*VD
,
1017 const UninitUse
&Use
,
1018 bool alwaysReportSelfInit
= false) {
1019 if (const DeclRefExpr
*DRE
= dyn_cast
<DeclRefExpr
>(Use
.getUser())) {
1020 // Inspect the initializer of the variable declaration which is
1021 // being referenced prior to its initialization. We emit
1022 // specialized diagnostics for self-initialization, and we
1023 // specifically avoid warning about self references which take the
1028 // This is used to indicate to GCC that 'x' is intentionally left
1029 // uninitialized. Proven code paths which access 'x' in
1030 // an uninitialized state after this will still warn.
1031 if (const Expr
*Initializer
= VD
->getInit()) {
1032 if (!alwaysReportSelfInit
&& DRE
== Initializer
->IgnoreParenImpCasts())
1035 ContainsReference
CR(S
.Context
, DRE
);
1036 CR
.Visit(Initializer
);
1037 if (CR
.doesContainReference()) {
1038 S
.Diag(DRE
->getBeginLoc(), diag::warn_uninit_self_reference_in_init
)
1039 << VD
->getDeclName() << VD
->getLocation() << DRE
->getSourceRange();
1044 DiagUninitUse(S
, VD
, Use
, false);
1046 const BlockExpr
*BE
= cast
<BlockExpr
>(Use
.getUser());
1047 if (VD
->getType()->isBlockPointerType() && !VD
->hasAttr
<BlocksAttr
>())
1048 S
.Diag(BE
->getBeginLoc(),
1049 diag::warn_uninit_byref_blockvar_captured_by_block
)
1050 << VD
->getDeclName()
1051 << VD
->getType().getQualifiers().hasObjCLifetime();
1053 DiagUninitUse(S
, VD
, Use
, true);
1056 // Report where the variable was declared when the use wasn't within
1057 // the initializer of that declaration & we didn't already suggest
1058 // an initialization fixit.
1059 if (!SuggestInitializationFixit(S
, VD
))
1060 S
.Diag(VD
->getBeginLoc(), diag::note_var_declared_here
)
1061 << VD
->getDeclName();
1067 class FallthroughMapper
: public DynamicRecursiveASTVisitor
{
1069 FallthroughMapper(Sema
&S
) : FoundSwitchStatements(false), S(S
) {
1070 ShouldWalkTypesOfTypeLocs
= false;
1073 bool foundSwitchStatements() const { return FoundSwitchStatements
; }
1075 void markFallthroughVisited(const AttributedStmt
*Stmt
) {
1076 bool Found
= FallthroughStmts
.erase(Stmt
);
1081 typedef llvm::SmallPtrSet
<const AttributedStmt
*, 8> AttrStmts
;
1083 const AttrStmts
&getFallthroughStmts() const { return FallthroughStmts
; }
1085 void fillReachableBlocks(CFG
*Cfg
) {
1086 assert(ReachableBlocks
.empty() && "ReachableBlocks already filled");
1087 std::deque
<const CFGBlock
*> BlockQueue
;
1089 ReachableBlocks
.insert(&Cfg
->getEntry());
1090 BlockQueue
.push_back(&Cfg
->getEntry());
1091 // Mark all case blocks reachable to avoid problems with switching on
1092 // constants, covered enums, etc.
1093 // These blocks can contain fall-through annotations, and we don't want to
1094 // issue a warn_fallthrough_attr_unreachable for them.
1095 for (const auto *B
: *Cfg
) {
1096 const Stmt
*L
= B
->getLabel();
1097 if (isa_and_nonnull
<SwitchCase
>(L
) && ReachableBlocks
.insert(B
).second
)
1098 BlockQueue
.push_back(B
);
1101 while (!BlockQueue
.empty()) {
1102 const CFGBlock
*P
= BlockQueue
.front();
1103 BlockQueue
.pop_front();
1104 for (const CFGBlock
*B
: P
->succs()) {
1105 if (B
&& ReachableBlocks
.insert(B
).second
)
1106 BlockQueue
.push_back(B
);
1111 bool checkFallThroughIntoBlock(const CFGBlock
&B
, int &AnnotatedCnt
,
1112 bool IsTemplateInstantiation
) {
1113 assert(!ReachableBlocks
.empty() && "ReachableBlocks empty");
1115 int UnannotatedCnt
= 0;
1118 std::deque
<const CFGBlock
*> BlockQueue(B
.pred_begin(), B
.pred_end());
1119 while (!BlockQueue
.empty()) {
1120 const CFGBlock
*P
= BlockQueue
.front();
1121 BlockQueue
.pop_front();
1125 const Stmt
*Term
= P
->getTerminatorStmt();
1126 if (isa_and_nonnull
<SwitchStmt
>(Term
))
1127 continue; // Switch statement, good.
1129 const SwitchCase
*SW
= dyn_cast_or_null
<SwitchCase
>(P
->getLabel());
1130 if (SW
&& SW
->getSubStmt() == B
.getLabel() && P
->begin() == P
->end())
1131 continue; // Previous case label has no statements, good.
1133 const LabelStmt
*L
= dyn_cast_or_null
<LabelStmt
>(P
->getLabel());
1134 if (L
&& L
->getSubStmt() == B
.getLabel() && P
->begin() == P
->end())
1135 continue; // Case label is preceded with a normal label, good.
1137 if (!ReachableBlocks
.count(P
)) {
1138 for (const CFGElement
&Elem
: llvm::reverse(*P
)) {
1139 if (std::optional
<CFGStmt
> CS
= Elem
.getAs
<CFGStmt
>()) {
1140 if (const AttributedStmt
*AS
= asFallThroughAttr(CS
->getStmt())) {
1141 // Don't issue a warning for an unreachable fallthrough
1142 // attribute in template instantiations as it may not be
1143 // unreachable in all instantiations of the template.
1144 if (!IsTemplateInstantiation
)
1145 S
.Diag(AS
->getBeginLoc(),
1146 diag::warn_unreachable_fallthrough_attr
);
1147 markFallthroughVisited(AS
);
1151 // Don't care about other unreachable statements.
1154 // If there are no unreachable statements, this may be a special
1157 // A a; // A has a destructor.
1160 // // <<<< This place is represented by a 'hanging' CFG block.
1165 const Stmt
*LastStmt
= getLastStmt(*P
);
1166 if (const AttributedStmt
*AS
= asFallThroughAttr(LastStmt
)) {
1167 markFallthroughVisited(AS
);
1169 continue; // Fallthrough annotation, good.
1172 if (!LastStmt
) { // This block contains no executable statements.
1173 // Traverse its predecessors.
1174 std::copy(P
->pred_begin(), P
->pred_end(),
1175 std::back_inserter(BlockQueue
));
1181 return !!UnannotatedCnt
;
1184 bool VisitAttributedStmt(AttributedStmt
*S
) override
{
1185 if (asFallThroughAttr(S
))
1186 FallthroughStmts
.insert(S
);
1190 bool VisitSwitchStmt(SwitchStmt
*S
) override
{
1191 FoundSwitchStatements
= true;
1195 // We don't want to traverse local type declarations. We analyze their
1196 // methods separately.
1197 bool TraverseDecl(Decl
*D
) override
{ return true; }
1199 // We analyze lambda bodies separately. Skip them here.
1200 bool TraverseLambdaExpr(LambdaExpr
*LE
) override
{
1201 // Traverse the captures, but not the body.
1202 for (const auto C
: zip(LE
->captures(), LE
->capture_inits()))
1203 TraverseLambdaCapture(LE
, &std::get
<0>(C
), std::get
<1>(C
));
1209 static const AttributedStmt
*asFallThroughAttr(const Stmt
*S
) {
1210 if (const AttributedStmt
*AS
= dyn_cast_or_null
<AttributedStmt
>(S
)) {
1211 if (hasSpecificAttr
<FallThroughAttr
>(AS
->getAttrs()))
1217 static const Stmt
*getLastStmt(const CFGBlock
&B
) {
1218 if (const Stmt
*Term
= B
.getTerminatorStmt())
1220 for (const CFGElement
&Elem
: llvm::reverse(B
))
1221 if (std::optional
<CFGStmt
> CS
= Elem
.getAs
<CFGStmt
>())
1222 return CS
->getStmt();
1223 // Workaround to detect a statement thrown out by CFGBuilder:
1224 // case X: {} case Y:
1225 // case X: ; case Y:
1226 if (const SwitchCase
*SW
= dyn_cast_or_null
<SwitchCase
>(B
.getLabel()))
1227 if (!isa
<SwitchCase
>(SW
->getSubStmt()))
1228 return SW
->getSubStmt();
1233 bool FoundSwitchStatements
;
1234 AttrStmts FallthroughStmts
;
1236 llvm::SmallPtrSet
<const CFGBlock
*, 16> ReachableBlocks
;
1238 } // anonymous namespace
1240 static StringRef
getFallthroughAttrSpelling(Preprocessor
&PP
,
1241 SourceLocation Loc
) {
1242 TokenValue FallthroughTokens
[] = {
1243 tok::l_square
, tok::l_square
,
1244 PP
.getIdentifierInfo("fallthrough"),
1245 tok::r_square
, tok::r_square
1248 TokenValue ClangFallthroughTokens
[] = {
1249 tok::l_square
, tok::l_square
, PP
.getIdentifierInfo("clang"),
1250 tok::coloncolon
, PP
.getIdentifierInfo("fallthrough"),
1251 tok::r_square
, tok::r_square
1254 bool PreferClangAttr
= !PP
.getLangOpts().CPlusPlus17
&& !PP
.getLangOpts().C23
;
1256 StringRef MacroName
;
1257 if (PreferClangAttr
)
1258 MacroName
= PP
.getLastMacroWithSpelling(Loc
, ClangFallthroughTokens
);
1259 if (MacroName
.empty())
1260 MacroName
= PP
.getLastMacroWithSpelling(Loc
, FallthroughTokens
);
1261 if (MacroName
.empty() && !PreferClangAttr
)
1262 MacroName
= PP
.getLastMacroWithSpelling(Loc
, ClangFallthroughTokens
);
1263 if (MacroName
.empty()) {
1264 if (!PreferClangAttr
)
1265 MacroName
= "[[fallthrough]]";
1266 else if (PP
.getLangOpts().CPlusPlus
)
1267 MacroName
= "[[clang::fallthrough]]";
1269 MacroName
= "__attribute__((fallthrough))";
1274 static void DiagnoseSwitchLabelsFallthrough(Sema
&S
, AnalysisDeclContext
&AC
,
1276 FallthroughMapper
FM(S
);
1277 FM
.TraverseStmt(AC
.getBody());
1279 if (!FM
.foundSwitchStatements())
1282 if (PerFunction
&& FM
.getFallthroughStmts().empty())
1285 CFG
*Cfg
= AC
.getCFG();
1290 FM
.fillReachableBlocks(Cfg
);
1292 for (const CFGBlock
*B
: llvm::reverse(*Cfg
)) {
1293 const Stmt
*Label
= B
->getLabel();
1295 if (!isa_and_nonnull
<SwitchCase
>(Label
))
1300 bool IsTemplateInstantiation
= false;
1301 if (const FunctionDecl
*Function
= dyn_cast
<FunctionDecl
>(AC
.getDecl()))
1302 IsTemplateInstantiation
= Function
->isTemplateInstantiation();
1303 if (!FM
.checkFallThroughIntoBlock(*B
, AnnotatedCnt
,
1304 IsTemplateInstantiation
))
1307 S
.Diag(Label
->getBeginLoc(),
1308 PerFunction
? diag::warn_unannotated_fallthrough_per_function
1309 : diag::warn_unannotated_fallthrough
);
1311 if (!AnnotatedCnt
) {
1312 SourceLocation L
= Label
->getBeginLoc();
1316 const Stmt
*Term
= B
->getTerminatorStmt();
1317 // Skip empty cases.
1318 while (B
->empty() && !Term
&& B
->succ_size() == 1) {
1319 B
= *B
->succ_begin();
1320 Term
= B
->getTerminatorStmt();
1322 if (!(B
->empty() && isa_and_nonnull
<BreakStmt
>(Term
))) {
1323 Preprocessor
&PP
= S
.getPreprocessor();
1324 StringRef AnnotationSpelling
= getFallthroughAttrSpelling(PP
, L
);
1325 SmallString
<64> TextToInsert(AnnotationSpelling
);
1326 TextToInsert
+= "; ";
1327 S
.Diag(L
, diag::note_insert_fallthrough_fixit
)
1328 << AnnotationSpelling
1329 << FixItHint::CreateInsertion(L
, TextToInsert
);
1331 S
.Diag(L
, diag::note_insert_break_fixit
)
1332 << FixItHint::CreateInsertion(L
, "break; ");
1336 for (const auto *F
: FM
.getFallthroughStmts())
1337 S
.Diag(F
->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement
);
1340 static bool isInLoop(const ASTContext
&Ctx
, const ParentMap
&PM
,
1345 switch (S
->getStmtClass()) {
1346 case Stmt::ForStmtClass
:
1347 case Stmt::WhileStmtClass
:
1348 case Stmt::CXXForRangeStmtClass
:
1349 case Stmt::ObjCForCollectionStmtClass
:
1351 case Stmt::DoStmtClass
: {
1352 Expr::EvalResult Result
;
1353 if (!cast
<DoStmt
>(S
)->getCond()->EvaluateAsInt(Result
, Ctx
))
1355 return Result
.Val
.getInt().getBoolValue();
1360 } while ((S
= PM
.getParent(S
)));
1365 static void diagnoseRepeatedUseOfWeak(Sema
&S
,
1366 const sema::FunctionScopeInfo
*CurFn
,
1368 const ParentMap
&PM
) {
1369 typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy
;
1370 typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap
;
1371 typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector
;
1372 typedef std::pair
<const Stmt
*, WeakObjectUseMap::const_iterator
>
1375 ASTContext
&Ctx
= S
.getASTContext();
1377 const WeakObjectUseMap
&WeakMap
= CurFn
->getWeakObjectUses();
1379 // Extract all weak objects that are referenced more than once.
1380 SmallVector
<StmtUsesPair
, 8> UsesByStmt
;
1381 for (WeakObjectUseMap::const_iterator I
= WeakMap
.begin(), E
= WeakMap
.end();
1383 const WeakUseVector
&Uses
= I
->second
;
1385 // Find the first read of the weak object.
1386 WeakUseVector::const_iterator UI
= Uses
.begin(), UE
= Uses
.end();
1387 for ( ; UI
!= UE
; ++UI
) {
1392 // If there were only writes to this object, don't warn.
1396 // If there was only one read, followed by any number of writes, and the
1397 // read is not within a loop, don't warn. Additionally, don't warn in a
1398 // loop if the base object is a local variable -- local variables are often
1399 // changed in loops.
1400 if (UI
== Uses
.begin()) {
1401 WeakUseVector::const_iterator UI2
= UI
;
1402 for (++UI2
; UI2
!= UE
; ++UI2
)
1403 if (UI2
->isUnsafe())
1407 if (!isInLoop(Ctx
, PM
, UI
->getUseExpr()))
1410 const WeakObjectProfileTy
&Profile
= I
->first
;
1411 if (!Profile
.isExactProfile())
1414 const NamedDecl
*Base
= Profile
.getBase();
1416 Base
= Profile
.getProperty();
1417 assert(Base
&& "A profile always has a base or property.");
1419 if (const VarDecl
*BaseVar
= dyn_cast
<VarDecl
>(Base
))
1420 if (BaseVar
->hasLocalStorage() && !isa
<ParmVarDecl
>(Base
))
1425 UsesByStmt
.push_back(StmtUsesPair(UI
->getUseExpr(), I
));
1428 if (UsesByStmt
.empty())
1431 // Sort by first use so that we emit the warnings in a deterministic order.
1432 SourceManager
&SM
= S
.getSourceManager();
1433 llvm::sort(UsesByStmt
,
1434 [&SM
](const StmtUsesPair
&LHS
, const StmtUsesPair
&RHS
) {
1435 return SM
.isBeforeInTranslationUnit(LHS
.first
->getBeginLoc(),
1436 RHS
.first
->getBeginLoc());
1439 // Classify the current code body for better warning text.
1440 // This enum should stay in sync with the cases in
1441 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1442 // FIXME: Should we use a common classification enum and the same set of
1443 // possibilities all throughout Sema?
1451 if (isa
<sema::BlockScopeInfo
>(CurFn
))
1452 FunctionKind
= Block
;
1453 else if (isa
<sema::LambdaScopeInfo
>(CurFn
))
1454 FunctionKind
= Lambda
;
1455 else if (isa
<ObjCMethodDecl
>(D
))
1456 FunctionKind
= Method
;
1458 FunctionKind
= Function
;
1460 // Iterate through the sorted problems and emit warnings for each.
1461 for (const auto &P
: UsesByStmt
) {
1462 const Stmt
*FirstRead
= P
.first
;
1463 const WeakObjectProfileTy
&Key
= P
.second
->first
;
1464 const WeakUseVector
&Uses
= P
.second
->second
;
1466 // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1467 // may not contain enough information to determine that these are different
1468 // properties. We can only be 100% sure of a repeated use in certain cases,
1469 // and we adjust the diagnostic kind accordingly so that the less certain
1470 // case can be turned off if it is too noisy.
1472 if (Key
.isExactProfile())
1473 DiagKind
= diag::warn_arc_repeated_use_of_weak
;
1475 DiagKind
= diag::warn_arc_possible_repeated_use_of_weak
;
1477 // Classify the weak object being accessed for better warning text.
1478 // This enum should stay in sync with the cases in
1479 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1487 const NamedDecl
*KeyProp
= Key
.getProperty();
1488 if (isa
<VarDecl
>(KeyProp
))
1489 ObjectKind
= Variable
;
1490 else if (isa
<ObjCPropertyDecl
>(KeyProp
))
1491 ObjectKind
= Property
;
1492 else if (isa
<ObjCMethodDecl
>(KeyProp
))
1493 ObjectKind
= ImplicitProperty
;
1494 else if (isa
<ObjCIvarDecl
>(KeyProp
))
1497 llvm_unreachable("Unexpected weak object kind!");
1499 // Do not warn about IBOutlet weak property receivers being set to null
1500 // since they are typically only used from the main thread.
1501 if (const ObjCPropertyDecl
*Prop
= dyn_cast
<ObjCPropertyDecl
>(KeyProp
))
1502 if (Prop
->hasAttr
<IBOutletAttr
>())
1505 // Show the first time the object was read.
1506 S
.Diag(FirstRead
->getBeginLoc(), DiagKind
)
1507 << int(ObjectKind
) << KeyProp
<< int(FunctionKind
)
1508 << FirstRead
->getSourceRange();
1510 // Print all the other accesses as notes.
1511 for (const auto &Use
: Uses
) {
1512 if (Use
.getUseExpr() == FirstRead
)
1514 S
.Diag(Use
.getUseExpr()->getBeginLoc(),
1515 diag::note_arc_weak_also_accessed_here
)
1516 << Use
.getUseExpr()->getSourceRange();
1523 typedef SmallVector
<PartialDiagnosticAt
, 1> OptionalNotes
;
1524 typedef std::pair
<PartialDiagnosticAt
, OptionalNotes
> DelayedDiag
;
1525 typedef std::list
<DelayedDiag
> DiagList
;
1527 struct SortDiagBySourceLocation
{
1529 SortDiagBySourceLocation(SourceManager
&SM
) : SM(SM
) {}
1531 bool operator()(const DelayedDiag
&left
, const DelayedDiag
&right
) {
1532 // Although this call will be slow, this is only called when outputting
1533 // multiple warnings.
1534 return SM
.isBeforeInTranslationUnit(left
.first
.first
, right
.first
.first
);
1537 } // anonymous namespace
1538 } // namespace clang
1541 class UninitValsDiagReporter
: public UninitVariablesHandler
{
1543 typedef SmallVector
<UninitUse
, 2> UsesVec
;
1544 typedef llvm::PointerIntPair
<UsesVec
*, 1, bool> MappedType
;
1545 // Prefer using MapVector to DenseMap, so that iteration order will be
1546 // the same as insertion order. This is needed to obtain a deterministic
1547 // order of diagnostics when calling flushDiagnostics().
1548 typedef llvm::MapVector
<const VarDecl
*, MappedType
> UsesMap
;
1550 UsesMap constRefUses
;
1553 UninitValsDiagReporter(Sema
&S
) : S(S
) {}
1554 ~UninitValsDiagReporter() override
{ flushDiagnostics(); }
1556 MappedType
&getUses(UsesMap
&um
, const VarDecl
*vd
) {
1557 MappedType
&V
= um
[vd
];
1558 if (!V
.getPointer())
1559 V
.setPointer(new UsesVec());
1563 void handleUseOfUninitVariable(const VarDecl
*vd
,
1564 const UninitUse
&use
) override
{
1565 getUses(uses
, vd
).getPointer()->push_back(use
);
1568 void handleConstRefUseOfUninitVariable(const VarDecl
*vd
,
1569 const UninitUse
&use
) override
{
1570 getUses(constRefUses
, vd
).getPointer()->push_back(use
);
1573 void handleSelfInit(const VarDecl
*vd
) override
{
1574 getUses(uses
, vd
).setInt(true);
1575 getUses(constRefUses
, vd
).setInt(true);
1578 void flushDiagnostics() {
1579 for (const auto &P
: uses
) {
1580 const VarDecl
*vd
= P
.first
;
1581 const MappedType
&V
= P
.second
;
1583 UsesVec
*vec
= V
.getPointer();
1584 bool hasSelfInit
= V
.getInt();
1586 // Specially handle the case where we have uses of an uninitialized
1587 // variable, but the root cause is an idiomatic self-init. We want
1588 // to report the diagnostic at the self-init since that is the root cause.
1589 if (!vec
->empty() && hasSelfInit
&& hasAlwaysUninitializedUse(vec
))
1590 DiagnoseUninitializedUse(S
, vd
,
1591 UninitUse(vd
->getInit()->IgnoreParenCasts(),
1592 /* isAlwaysUninit */ true),
1593 /* alwaysReportSelfInit */ true);
1595 // Sort the uses by their SourceLocations. While not strictly
1596 // guaranteed to produce them in line/column order, this will provide
1597 // a stable ordering.
1598 llvm::sort(*vec
, [](const UninitUse
&a
, const UninitUse
&b
) {
1599 // Prefer a more confident report over a less confident one.
1600 if (a
.getKind() != b
.getKind())
1601 return a
.getKind() > b
.getKind();
1602 return a
.getUser()->getBeginLoc() < b
.getUser()->getBeginLoc();
1605 for (const auto &U
: *vec
) {
1606 // If we have self-init, downgrade all uses to 'may be uninitialized'.
1607 UninitUse Use
= hasSelfInit
? UninitUse(U
.getUser(), false) : U
;
1609 if (DiagnoseUninitializedUse(S
, vd
, Use
))
1610 // Skip further diagnostics for this variable. We try to warn only
1611 // on the first point at which a variable is used uninitialized.
1616 // Release the uses vector.
1622 // Flush all const reference uses diags.
1623 for (const auto &P
: constRefUses
) {
1624 const VarDecl
*vd
= P
.first
;
1625 const MappedType
&V
= P
.second
;
1627 UsesVec
*vec
= V
.getPointer();
1628 bool hasSelfInit
= V
.getInt();
1630 if (!vec
->empty() && hasSelfInit
&& hasAlwaysUninitializedUse(vec
))
1631 DiagnoseUninitializedUse(S
, vd
,
1632 UninitUse(vd
->getInit()->IgnoreParenCasts(),
1633 /* isAlwaysUninit */ true),
1634 /* alwaysReportSelfInit */ true);
1636 for (const auto &U
: *vec
) {
1637 if (DiagnoseUninitializedConstRefUse(S
, vd
, U
))
1642 // Release the uses vector.
1646 constRefUses
.clear();
1650 static bool hasAlwaysUninitializedUse(const UsesVec
* vec
) {
1651 return llvm::any_of(*vec
, [](const UninitUse
&U
) {
1652 return U
.getKind() == UninitUse::Always
||
1653 U
.getKind() == UninitUse::AfterCall
||
1654 U
.getKind() == UninitUse::AfterDecl
;
1659 /// Inter-procedural data for the called-once checker.
1660 class CalledOnceInterProceduralData
{
1662 // Add the delayed warning for the given block.
1663 void addDelayedWarning(const BlockDecl
*Block
,
1664 PartialDiagnosticAt
&&Warning
) {
1665 DelayedBlockWarnings
[Block
].emplace_back(std::move(Warning
));
1667 // Report all of the warnings we've gathered for the given block.
1668 void flushWarnings(const BlockDecl
*Block
, Sema
&S
) {
1669 for (const PartialDiagnosticAt
&Delayed
: DelayedBlockWarnings
[Block
])
1670 S
.Diag(Delayed
.first
, Delayed
.second
);
1672 discardWarnings(Block
);
1674 // Discard all of the warnings we've gathered for the given block.
1675 void discardWarnings(const BlockDecl
*Block
) {
1676 DelayedBlockWarnings
.erase(Block
);
1680 using DelayedDiagnostics
= SmallVector
<PartialDiagnosticAt
, 2>;
1681 llvm::DenseMap
<const BlockDecl
*, DelayedDiagnostics
> DelayedBlockWarnings
;
1684 class CalledOnceCheckReporter
: public CalledOnceCheckHandler
{
1686 CalledOnceCheckReporter(Sema
&S
, CalledOnceInterProceduralData
&Data
)
1687 : S(S
), Data(Data
) {}
1688 void handleDoubleCall(const ParmVarDecl
*Parameter
, const Expr
*Call
,
1689 const Expr
*PrevCall
, bool IsCompletionHandler
,
1690 bool Poised
) override
{
1691 auto DiagToReport
= IsCompletionHandler
1692 ? diag::warn_completion_handler_called_twice
1693 : diag::warn_called_once_gets_called_twice
;
1694 S
.Diag(Call
->getBeginLoc(), DiagToReport
) << Parameter
;
1695 S
.Diag(PrevCall
->getBeginLoc(), diag::note_called_once_gets_called_twice
)
1699 void handleNeverCalled(const ParmVarDecl
*Parameter
,
1700 bool IsCompletionHandler
) override
{
1701 auto DiagToReport
= IsCompletionHandler
1702 ? diag::warn_completion_handler_never_called
1703 : diag::warn_called_once_never_called
;
1704 S
.Diag(Parameter
->getBeginLoc(), DiagToReport
)
1705 << Parameter
<< /* Captured */ false;
1708 void handleNeverCalled(const ParmVarDecl
*Parameter
, const Decl
*Function
,
1709 const Stmt
*Where
, NeverCalledReason Reason
,
1710 bool IsCalledDirectly
,
1711 bool IsCompletionHandler
) override
{
1712 auto DiagToReport
= IsCompletionHandler
1713 ? diag::warn_completion_handler_never_called_when
1714 : diag::warn_called_once_never_called_when
;
1715 PartialDiagnosticAt
Warning(Where
->getBeginLoc(), S
.PDiag(DiagToReport
)
1718 << (unsigned)Reason
);
1720 if (const auto *Block
= dyn_cast
<BlockDecl
>(Function
)) {
1721 // We shouldn't report these warnings on blocks immediately
1722 Data
.addDelayedWarning(Block
, std::move(Warning
));
1724 S
.Diag(Warning
.first
, Warning
.second
);
1728 void handleCapturedNeverCalled(const ParmVarDecl
*Parameter
,
1730 bool IsCompletionHandler
) override
{
1731 auto DiagToReport
= IsCompletionHandler
1732 ? diag::warn_completion_handler_never_called
1733 : diag::warn_called_once_never_called
;
1734 S
.Diag(Where
->getBeginLoc(), DiagToReport
)
1735 << Parameter
<< /* Captured */ true;
1739 handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl
*Block
) override
{
1740 Data
.flushWarnings(Block
, S
);
1743 void handleBlockWithNoGuarantees(const BlockDecl
*Block
) override
{
1744 Data
.discardWarnings(Block
);
1749 CalledOnceInterProceduralData
&Data
;
1752 constexpr unsigned CalledOnceWarnings
[] = {
1753 diag::warn_called_once_never_called
,
1754 diag::warn_called_once_never_called_when
,
1755 diag::warn_called_once_gets_called_twice
};
1757 constexpr unsigned CompletionHandlerWarnings
[]{
1758 diag::warn_completion_handler_never_called
,
1759 diag::warn_completion_handler_never_called_when
,
1760 diag::warn_completion_handler_called_twice
};
1762 bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef
<unsigned> DiagIDs
,
1763 const DiagnosticsEngine
&Diags
,
1764 SourceLocation At
) {
1765 return llvm::any_of(DiagIDs
, [&Diags
, At
](unsigned DiagID
) {
1766 return !Diags
.isIgnored(DiagID
, At
);
1770 bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine
&Diags
,
1771 SourceLocation At
) {
1772 return shouldAnalyzeCalledOnceImpl(CompletionHandlerWarnings
, Diags
, At
);
1775 bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine
&Diags
,
1776 SourceLocation At
) {
1777 return shouldAnalyzeCalledOnceImpl(CalledOnceWarnings
, Diags
, At
) ||
1778 shouldAnalyzeCalledOnceConventions(Diags
, At
);
1780 } // anonymous namespace
1782 //===----------------------------------------------------------------------===//
1784 //===----------------------------------------------------------------------===//
1786 namespace threadSafety
{
1788 class ThreadSafetyReporter
: public clang::threadSafety::ThreadSafetyHandler
{
1791 SourceLocation FunLocation
, FunEndLocation
;
1793 const FunctionDecl
*CurrentFunction
;
1796 OptionalNotes
getNotes() const {
1797 if (Verbose
&& CurrentFunction
) {
1798 PartialDiagnosticAt
FNote(CurrentFunction
->getBody()->getBeginLoc(),
1799 S
.PDiag(diag::note_thread_warning_in_fun
)
1800 << CurrentFunction
);
1801 return OptionalNotes(1, FNote
);
1803 return OptionalNotes();
1806 OptionalNotes
getNotes(const PartialDiagnosticAt
&Note
) const {
1807 OptionalNotes
ONS(1, Note
);
1808 if (Verbose
&& CurrentFunction
) {
1809 PartialDiagnosticAt
FNote(CurrentFunction
->getBody()->getBeginLoc(),
1810 S
.PDiag(diag::note_thread_warning_in_fun
)
1811 << CurrentFunction
);
1812 ONS
.push_back(std::move(FNote
));
1817 OptionalNotes
getNotes(const PartialDiagnosticAt
&Note1
,
1818 const PartialDiagnosticAt
&Note2
) const {
1820 ONS
.push_back(Note1
);
1821 ONS
.push_back(Note2
);
1822 if (Verbose
&& CurrentFunction
) {
1823 PartialDiagnosticAt
FNote(CurrentFunction
->getBody()->getBeginLoc(),
1824 S
.PDiag(diag::note_thread_warning_in_fun
)
1825 << CurrentFunction
);
1826 ONS
.push_back(std::move(FNote
));
1831 OptionalNotes
makeLockedHereNote(SourceLocation LocLocked
, StringRef Kind
) {
1832 return LocLocked
.isValid()
1833 ? getNotes(PartialDiagnosticAt(
1834 LocLocked
, S
.PDiag(diag::note_locked_here
) << Kind
))
1838 OptionalNotes
makeUnlockedHereNote(SourceLocation LocUnlocked
,
1840 return LocUnlocked
.isValid()
1841 ? getNotes(PartialDiagnosticAt(
1842 LocUnlocked
, S
.PDiag(diag::note_unlocked_here
) << Kind
))
1847 ThreadSafetyReporter(Sema
&S
, SourceLocation FL
, SourceLocation FEL
)
1848 : S(S
), FunLocation(FL
), FunEndLocation(FEL
),
1849 CurrentFunction(nullptr), Verbose(false) {}
1851 void setVerbose(bool b
) { Verbose
= b
; }
1853 /// Emit all buffered diagnostics in order of sourcelocation.
1854 /// We need to output diagnostics produced while iterating through
1855 /// the lockset in deterministic order, so this function orders diagnostics
1856 /// and outputs them.
1857 void emitDiagnostics() {
1858 Warnings
.sort(SortDiagBySourceLocation(S
.getSourceManager()));
1859 for (const auto &Diag
: Warnings
) {
1860 S
.Diag(Diag
.first
.first
, Diag
.first
.second
);
1861 for (const auto &Note
: Diag
.second
)
1862 S
.Diag(Note
.first
, Note
.second
);
1866 void handleInvalidLockExp(SourceLocation Loc
) override
{
1867 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(diag::warn_cannot_resolve_lock
)
1869 Warnings
.emplace_back(std::move(Warning
), getNotes());
1872 void handleUnmatchedUnlock(StringRef Kind
, Name LockName
, SourceLocation Loc
,
1873 SourceLocation LocPreviousUnlock
) override
{
1874 if (Loc
.isInvalid())
1876 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(diag::warn_unlock_but_no_lock
)
1877 << Kind
<< LockName
);
1878 Warnings
.emplace_back(std::move(Warning
),
1879 makeUnlockedHereNote(LocPreviousUnlock
, Kind
));
1882 void handleIncorrectUnlockKind(StringRef Kind
, Name LockName
,
1883 LockKind Expected
, LockKind Received
,
1884 SourceLocation LocLocked
,
1885 SourceLocation LocUnlock
) override
{
1886 if (LocUnlock
.isInvalid())
1887 LocUnlock
= FunLocation
;
1888 PartialDiagnosticAt
Warning(
1889 LocUnlock
, S
.PDiag(diag::warn_unlock_kind_mismatch
)
1890 << Kind
<< LockName
<< Received
<< Expected
);
1891 Warnings
.emplace_back(std::move(Warning
),
1892 makeLockedHereNote(LocLocked
, Kind
));
1895 void handleDoubleLock(StringRef Kind
, Name LockName
, SourceLocation LocLocked
,
1896 SourceLocation LocDoubleLock
) override
{
1897 if (LocDoubleLock
.isInvalid())
1898 LocDoubleLock
= FunLocation
;
1899 PartialDiagnosticAt
Warning(LocDoubleLock
, S
.PDiag(diag::warn_double_lock
)
1900 << Kind
<< LockName
);
1901 Warnings
.emplace_back(std::move(Warning
),
1902 makeLockedHereNote(LocLocked
, Kind
));
1905 void handleMutexHeldEndOfScope(StringRef Kind
, Name LockName
,
1906 SourceLocation LocLocked
,
1907 SourceLocation LocEndOfScope
,
1908 LockErrorKind LEK
) override
{
1909 unsigned DiagID
= 0;
1911 case LEK_LockedSomePredecessors
:
1912 DiagID
= diag::warn_lock_some_predecessors
;
1914 case LEK_LockedSomeLoopIterations
:
1915 DiagID
= diag::warn_expecting_lock_held_on_loop
;
1917 case LEK_LockedAtEndOfFunction
:
1918 DiagID
= diag::warn_no_unlock
;
1920 case LEK_NotLockedAtEndOfFunction
:
1921 DiagID
= diag::warn_expecting_locked
;
1924 if (LocEndOfScope
.isInvalid())
1925 LocEndOfScope
= FunEndLocation
;
1927 PartialDiagnosticAt
Warning(LocEndOfScope
, S
.PDiag(DiagID
) << Kind
1929 Warnings
.emplace_back(std::move(Warning
),
1930 makeLockedHereNote(LocLocked
, Kind
));
1933 void handleExclusiveAndShared(StringRef Kind
, Name LockName
,
1934 SourceLocation Loc1
,
1935 SourceLocation Loc2
) override
{
1936 PartialDiagnosticAt
Warning(Loc1
,
1937 S
.PDiag(diag::warn_lock_exclusive_and_shared
)
1938 << Kind
<< LockName
);
1939 PartialDiagnosticAt
Note(Loc2
, S
.PDiag(diag::note_lock_exclusive_and_shared
)
1940 << Kind
<< LockName
);
1941 Warnings
.emplace_back(std::move(Warning
), getNotes(Note
));
1944 void handleNoMutexHeld(const NamedDecl
*D
, ProtectedOperationKind POK
,
1945 AccessKind AK
, SourceLocation Loc
) override
{
1946 assert((POK
== POK_VarAccess
|| POK
== POK_VarDereference
) &&
1947 "Only works for variables");
1948 unsigned DiagID
= POK
== POK_VarAccess
?
1949 diag::warn_variable_requires_any_lock
:
1950 diag::warn_var_deref_requires_any_lock
;
1951 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(DiagID
)
1952 << D
<< getLockKindFromAccessKind(AK
));
1953 Warnings
.emplace_back(std::move(Warning
), getNotes());
1956 void handleMutexNotHeld(StringRef Kind
, const NamedDecl
*D
,
1957 ProtectedOperationKind POK
, Name LockName
,
1958 LockKind LK
, SourceLocation Loc
,
1959 Name
*PossibleMatch
) override
{
1960 unsigned DiagID
= 0;
1961 if (PossibleMatch
) {
1964 DiagID
= diag::warn_variable_requires_lock_precise
;
1966 case POK_VarDereference
:
1967 DiagID
= diag::warn_var_deref_requires_lock_precise
;
1969 case POK_FunctionCall
:
1970 DiagID
= diag::warn_fun_requires_lock_precise
;
1973 DiagID
= diag::warn_guarded_pass_by_reference
;
1975 case POK_PtPassByRef
:
1976 DiagID
= diag::warn_pt_guarded_pass_by_reference
;
1978 case POK_ReturnByRef
:
1979 DiagID
= diag::warn_guarded_return_by_reference
;
1981 case POK_PtReturnByRef
:
1982 DiagID
= diag::warn_pt_guarded_return_by_reference
;
1985 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(DiagID
) << Kind
1988 PartialDiagnosticAt
Note(Loc
, S
.PDiag(diag::note_found_mutex_near_match
)
1990 if (Verbose
&& POK
== POK_VarAccess
) {
1991 PartialDiagnosticAt
VNote(D
->getLocation(),
1992 S
.PDiag(diag::note_guarded_by_declared_here
)
1993 << D
->getDeclName());
1994 Warnings
.emplace_back(std::move(Warning
), getNotes(Note
, VNote
));
1996 Warnings
.emplace_back(std::move(Warning
), getNotes(Note
));
2000 DiagID
= diag::warn_variable_requires_lock
;
2002 case POK_VarDereference
:
2003 DiagID
= diag::warn_var_deref_requires_lock
;
2005 case POK_FunctionCall
:
2006 DiagID
= diag::warn_fun_requires_lock
;
2009 DiagID
= diag::warn_guarded_pass_by_reference
;
2011 case POK_PtPassByRef
:
2012 DiagID
= diag::warn_pt_guarded_pass_by_reference
;
2014 case POK_ReturnByRef
:
2015 DiagID
= diag::warn_guarded_return_by_reference
;
2017 case POK_PtReturnByRef
:
2018 DiagID
= diag::warn_pt_guarded_return_by_reference
;
2021 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(DiagID
) << Kind
2024 if (Verbose
&& POK
== POK_VarAccess
) {
2025 PartialDiagnosticAt
Note(D
->getLocation(),
2026 S
.PDiag(diag::note_guarded_by_declared_here
));
2027 Warnings
.emplace_back(std::move(Warning
), getNotes(Note
));
2029 Warnings
.emplace_back(std::move(Warning
), getNotes());
2033 void handleNegativeNotHeld(StringRef Kind
, Name LockName
, Name Neg
,
2034 SourceLocation Loc
) override
{
2035 PartialDiagnosticAt
Warning(Loc
,
2036 S
.PDiag(diag::warn_acquire_requires_negative_cap
)
2037 << Kind
<< LockName
<< Neg
);
2038 Warnings
.emplace_back(std::move(Warning
), getNotes());
2041 void handleNegativeNotHeld(const NamedDecl
*D
, Name LockName
,
2042 SourceLocation Loc
) override
{
2043 PartialDiagnosticAt
Warning(
2044 Loc
, S
.PDiag(diag::warn_fun_requires_negative_cap
) << D
<< LockName
);
2045 Warnings
.emplace_back(std::move(Warning
), getNotes());
2048 void handleFunExcludesLock(StringRef Kind
, Name FunName
, Name LockName
,
2049 SourceLocation Loc
) override
{
2050 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(diag::warn_fun_excludes_mutex
)
2051 << Kind
<< FunName
<< LockName
);
2052 Warnings
.emplace_back(std::move(Warning
), getNotes());
2055 void handleLockAcquiredBefore(StringRef Kind
, Name L1Name
, Name L2Name
,
2056 SourceLocation Loc
) override
{
2057 PartialDiagnosticAt
Warning(Loc
,
2058 S
.PDiag(diag::warn_acquired_before
) << Kind
<< L1Name
<< L2Name
);
2059 Warnings
.emplace_back(std::move(Warning
), getNotes());
2062 void handleBeforeAfterCycle(Name L1Name
, SourceLocation Loc
) override
{
2063 PartialDiagnosticAt
Warning(Loc
,
2064 S
.PDiag(diag::warn_acquired_before_after_cycle
) << L1Name
);
2065 Warnings
.emplace_back(std::move(Warning
), getNotes());
2068 void enterFunction(const FunctionDecl
* FD
) override
{
2069 CurrentFunction
= FD
;
2072 void leaveFunction(const FunctionDecl
* FD
) override
{
2073 CurrentFunction
= nullptr;
2076 } // anonymous namespace
2077 } // namespace threadSafety
2078 } // namespace clang
2080 //===----------------------------------------------------------------------===//
2082 //===----------------------------------------------------------------------===//
2085 namespace consumed
{
2087 class ConsumedWarningsHandler
: public ConsumedWarningsHandlerBase
{
2094 ConsumedWarningsHandler(Sema
&S
) : S(S
) {}
2096 void emitDiagnostics() override
{
2097 Warnings
.sort(SortDiagBySourceLocation(S
.getSourceManager()));
2098 for (const auto &Diag
: Warnings
) {
2099 S
.Diag(Diag
.first
.first
, Diag
.first
.second
);
2100 for (const auto &Note
: Diag
.second
)
2101 S
.Diag(Note
.first
, Note
.second
);
2105 void warnLoopStateMismatch(SourceLocation Loc
,
2106 StringRef VariableName
) override
{
2107 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(diag::warn_loop_state_mismatch
) <<
2110 Warnings
.emplace_back(std::move(Warning
), OptionalNotes());
2113 void warnParamReturnTypestateMismatch(SourceLocation Loc
,
2114 StringRef VariableName
,
2115 StringRef ExpectedState
,
2116 StringRef ObservedState
) override
{
2118 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(
2119 diag::warn_param_return_typestate_mismatch
) << VariableName
<<
2120 ExpectedState
<< ObservedState
);
2122 Warnings
.emplace_back(std::move(Warning
), OptionalNotes());
2125 void warnParamTypestateMismatch(SourceLocation Loc
, StringRef ExpectedState
,
2126 StringRef ObservedState
) override
{
2128 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(
2129 diag::warn_param_typestate_mismatch
) << ExpectedState
<< ObservedState
);
2131 Warnings
.emplace_back(std::move(Warning
), OptionalNotes());
2134 void warnReturnTypestateForUnconsumableType(SourceLocation Loc
,
2135 StringRef TypeName
) override
{
2136 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(
2137 diag::warn_return_typestate_for_unconsumable_type
) << TypeName
);
2139 Warnings
.emplace_back(std::move(Warning
), OptionalNotes());
2142 void warnReturnTypestateMismatch(SourceLocation Loc
, StringRef ExpectedState
,
2143 StringRef ObservedState
) override
{
2145 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(
2146 diag::warn_return_typestate_mismatch
) << ExpectedState
<< ObservedState
);
2148 Warnings
.emplace_back(std::move(Warning
), OptionalNotes());
2151 void warnUseOfTempInInvalidState(StringRef MethodName
, StringRef State
,
2152 SourceLocation Loc
) override
{
2154 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(
2155 diag::warn_use_of_temp_in_invalid_state
) << MethodName
<< State
);
2157 Warnings
.emplace_back(std::move(Warning
), OptionalNotes());
2160 void warnUseInInvalidState(StringRef MethodName
, StringRef VariableName
,
2161 StringRef State
, SourceLocation Loc
) override
{
2163 PartialDiagnosticAt
Warning(Loc
, S
.PDiag(diag::warn_use_in_invalid_state
) <<
2164 MethodName
<< VariableName
<< State
);
2166 Warnings
.emplace_back(std::move(Warning
), OptionalNotes());
2169 } // anonymous namespace
2170 } // namespace consumed
2171 } // namespace clang
2173 //===----------------------------------------------------------------------===//
2174 // Unsafe buffer usage analysis.
2175 //===----------------------------------------------------------------------===//
2178 class UnsafeBufferUsageReporter
: public UnsafeBufferUsageHandler
{
2180 bool SuggestSuggestions
; // Recommend -fsafe-buffer-usage-suggestions?
2182 // Lists as a string the names of variables in `VarGroupForVD` except for `VD`
2184 std::string
listVariableGroupAsString(
2185 const VarDecl
*VD
, const ArrayRef
<const VarDecl
*> &VarGroupForVD
) const {
2186 if (VarGroupForVD
.size() <= 1)
2189 std::vector
<StringRef
> VarNames
;
2190 auto PutInQuotes
= [](StringRef S
) -> std::string
{
2191 return "'" + S
.str() + "'";
2194 for (auto *V
: VarGroupForVD
) {
2197 VarNames
.push_back(V
->getName());
2199 if (VarNames
.size() == 1) {
2200 return PutInQuotes(VarNames
[0]);
2202 if (VarNames
.size() == 2) {
2203 return PutInQuotes(VarNames
[0]) + " and " + PutInQuotes(VarNames
[1]);
2205 assert(VarGroupForVD
.size() > 3);
2206 const unsigned N
= VarNames
.size() -
2207 2; // need to print the last two names as "..., X, and Y"
2208 std::string AllVars
= "";
2210 for (unsigned I
= 0; I
< N
; ++I
)
2211 AllVars
.append(PutInQuotes(VarNames
[I
]) + ", ");
2212 AllVars
.append(PutInQuotes(VarNames
[N
]) + ", and " +
2213 PutInQuotes(VarNames
[N
+ 1]));
2218 UnsafeBufferUsageReporter(Sema
&S
, bool SuggestSuggestions
)
2219 : S(S
), SuggestSuggestions(SuggestSuggestions
) {}
2221 void handleUnsafeOperation(const Stmt
*Operation
, bool IsRelatedToDecl
,
2222 ASTContext
&Ctx
) override
{
2225 unsigned MsgParam
= 0;
2226 NamedDecl
*D
= nullptr;
2227 if (const auto *ASE
= dyn_cast
<ArraySubscriptExpr
>(Operation
)) {
2228 Loc
= ASE
->getBase()->getExprLoc();
2229 Range
= ASE
->getBase()->getSourceRange();
2231 } else if (const auto *BO
= dyn_cast
<BinaryOperator
>(Operation
)) {
2232 BinaryOperator::Opcode Op
= BO
->getOpcode();
2233 if (Op
== BO_Add
|| Op
== BO_AddAssign
|| Op
== BO_Sub
||
2234 Op
== BO_SubAssign
) {
2235 if (BO
->getRHS()->getType()->isIntegerType()) {
2236 Loc
= BO
->getLHS()->getExprLoc();
2237 Range
= BO
->getLHS()->getSourceRange();
2239 Loc
= BO
->getRHS()->getExprLoc();
2240 Range
= BO
->getRHS()->getSourceRange();
2244 } else if (const auto *UO
= dyn_cast
<UnaryOperator
>(Operation
)) {
2245 UnaryOperator::Opcode Op
= UO
->getOpcode();
2246 if (Op
== UO_PreInc
|| Op
== UO_PreDec
|| Op
== UO_PostInc
||
2248 Loc
= UO
->getSubExpr()->getExprLoc();
2249 Range
= UO
->getSubExpr()->getSourceRange();
2253 if (isa
<CallExpr
>(Operation
) || isa
<CXXConstructExpr
>(Operation
)) {
2254 // note_unsafe_buffer_operation doesn't have this mode yet.
2255 assert(!IsRelatedToDecl
&& "Not implemented yet!");
2257 } else if (isa
<MemberExpr
>(Operation
)) {
2258 // note_unsafe_buffer_operation doesn't have this mode yet.
2259 assert(!IsRelatedToDecl
&& "Not implemented yet!");
2260 auto *ME
= cast
<MemberExpr
>(Operation
);
2261 D
= ME
->getMemberDecl();
2263 } else if (const auto *ECE
= dyn_cast
<ExplicitCastExpr
>(Operation
)) {
2264 QualType destType
= ECE
->getType();
2265 bool destTypeComplete
= true;
2267 if (!isa
<PointerType
>(destType
))
2269 destType
= destType
.getTypePtr()->getPointeeType();
2270 if (const auto *D
= destType
->getAsTagDecl())
2271 destTypeComplete
= D
->isCompleteDefinition();
2273 // If destination type is incomplete, it is unsafe to cast to anyway, no
2274 // need to check its type:
2275 if (destTypeComplete
) {
2276 const uint64_t dSize
= Ctx
.getTypeSize(destType
);
2277 QualType srcType
= ECE
->getSubExpr()->getType();
2279 assert(srcType
->isPointerType());
2281 const uint64_t sSize
=
2282 Ctx
.getTypeSize(srcType
.getTypePtr()->getPointeeType());
2287 if (const auto *CE
= dyn_cast
<CXXMemberCallExpr
>(
2288 ECE
->getSubExpr()->IgnoreParens())) {
2289 D
= CE
->getMethodDecl();
2297 Loc
= Operation
->getBeginLoc();
2298 Range
= Operation
->getSourceRange();
2300 if (IsRelatedToDecl
) {
2301 assert(!SuggestSuggestions
&&
2302 "Variables blamed for unsafe buffer usage without suggestions!");
2303 S
.Diag(Loc
, diag::note_unsafe_buffer_operation
) << MsgParam
<< Range
;
2306 S
.Diag(Loc
, diag::warn_unsafe_buffer_operation
)
2307 << MsgParam
<< D
<< Range
;
2309 S
.Diag(Loc
, diag::warn_unsafe_buffer_operation
) << MsgParam
<< Range
;
2311 if (SuggestSuggestions
) {
2312 S
.Diag(Loc
, diag::note_safe_buffer_usage_suggestions_disabled
);
2317 void handleUnsafeLibcCall(const CallExpr
*Call
, unsigned PrintfInfo
,
2319 const Expr
*UnsafeArg
= nullptr) override
{
2320 S
.Diag(Call
->getBeginLoc(), diag::warn_unsafe_buffer_libc_call
)
2321 << Call
->getDirectCallee() // We've checked there is a direct callee
2322 << Call
->getSourceRange();
2323 if (PrintfInfo
> 0) {
2325 UnsafeArg
? UnsafeArg
->getSourceRange() : Call
->getSourceRange();
2326 S
.Diag(R
.getBegin(), diag::note_unsafe_buffer_printf_call
)
2331 void handleUnsafeOperationInContainer(const Stmt
*Operation
,
2332 bool IsRelatedToDecl
,
2333 ASTContext
&Ctx
) override
{
2336 unsigned MsgParam
= 0;
2338 // This function only handles SpanTwoParamConstructorGadget so far, which
2339 // always gives a CXXConstructExpr.
2340 const auto *CtorExpr
= cast
<CXXConstructExpr
>(Operation
);
2341 Loc
= CtorExpr
->getLocation();
2343 S
.Diag(Loc
, diag::warn_unsafe_buffer_usage_in_container
);
2344 if (IsRelatedToDecl
) {
2345 assert(!SuggestSuggestions
&&
2346 "Variables blamed for unsafe buffer usage without suggestions!");
2347 S
.Diag(Loc
, diag::note_unsafe_buffer_operation
) << MsgParam
<< Range
;
2351 void handleUnsafeVariableGroup(const VarDecl
*Variable
,
2352 const VariableGroupsManager
&VarGrpMgr
,
2353 FixItList
&&Fixes
, const Decl
*D
,
2354 const FixitStrategy
&VarTargetTypes
) override
{
2355 assert(!SuggestSuggestions
&&
2356 "Unsafe buffer usage fixits displayed without suggestions!");
2357 S
.Diag(Variable
->getLocation(), diag::warn_unsafe_buffer_variable
)
2358 << Variable
<< (Variable
->getType()->isPointerType() ? 0 : 1)
2359 << Variable
->getSourceRange();
2360 if (!Fixes
.empty()) {
2361 assert(isa
<NamedDecl
>(D
) &&
2362 "Fix-its are generated only for `NamedDecl`s");
2363 const NamedDecl
*ND
= cast
<NamedDecl
>(D
);
2364 bool BriefMsg
= false;
2365 // If the variable group involves parameters, the diagnostic message will
2366 // NOT explain how the variables are grouped as the reason is non-trivial
2367 // and irrelavant to users' experience:
2368 const auto VarGroupForVD
= VarGrpMgr
.getGroupOfVar(Variable
, &BriefMsg
);
2369 unsigned FixItStrategy
= 0;
2370 switch (VarTargetTypes
.lookup(Variable
)) {
2371 case clang::FixitStrategy::Kind::Span
:
2374 case clang::FixitStrategy::Kind::Array
:
2378 assert(false && "We support only std::span and std::array");
2382 S
.Diag(Variable
->getLocation(),
2383 BriefMsg
? diag::note_unsafe_buffer_variable_fixit_together
2384 : diag::note_unsafe_buffer_variable_fixit_group
);
2386 FD
<< Variable
<< FixItStrategy
;
2387 FD
<< listVariableGroupAsString(Variable
, VarGroupForVD
)
2388 << (VarGroupForVD
.size() > 1) << ND
;
2389 for (const auto &F
: Fixes
) {
2395 if (areDebugNotesRequested())
2396 for (const DebugNote
&Note
: DebugNotesByVar
[Variable
])
2397 S
.Diag(Note
.first
, diag::note_safe_buffer_debug_mode
) << Note
.second
;
2401 bool isSafeBufferOptOut(const SourceLocation
&Loc
) const override
{
2402 return S
.PP
.isSafeBufferOptOut(S
.getSourceManager(), Loc
);
2405 bool ignoreUnsafeBufferInContainer(const SourceLocation
&Loc
) const override
{
2406 return S
.Diags
.isIgnored(diag::warn_unsafe_buffer_usage_in_container
, Loc
);
2409 bool ignoreUnsafeBufferInLibcCall(const SourceLocation
&Loc
) const override
{
2410 return S
.Diags
.isIgnored(diag::warn_unsafe_buffer_libc_call
, Loc
);
2413 // Returns the text representation of clang::unsafe_buffer_usage attribute.
2414 // `WSSuffix` holds customized "white-space"s, e.g., newline or whilespace
2417 getUnsafeBufferUsageAttributeTextAt(SourceLocation Loc
,
2418 StringRef WSSuffix
= "") const override
{
2419 Preprocessor
&PP
= S
.getPreprocessor();
2420 TokenValue ClangUnsafeBufferUsageTokens
[] = {
2423 PP
.getIdentifierInfo("clang"),
2425 PP
.getIdentifierInfo("unsafe_buffer_usage"),
2429 StringRef MacroName
;
2431 // The returned macro (it returns) is guaranteed not to be function-like:
2432 MacroName
= PP
.getLastMacroWithSpelling(Loc
, ClangUnsafeBufferUsageTokens
);
2433 if (MacroName
.empty())
2434 MacroName
= "[[clang::unsafe_buffer_usage]]";
2435 return MacroName
.str() + WSSuffix
.str();
2440 //===----------------------------------------------------------------------===//
2441 // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2442 // warnings on a function, method, or block.
2443 //===----------------------------------------------------------------------===//
2445 sema::AnalysisBasedWarnings::Policy::Policy() {
2446 enableCheckFallThrough
= 1;
2447 enableCheckUnreachable
= 0;
2448 enableThreadSafetyAnalysis
= 0;
2449 enableConsumedAnalysis
= 0;
2452 /// InterProceduralData aims to be a storage of whatever data should be passed
2453 /// between analyses of different functions.
2455 /// At the moment, its primary goal is to make the information gathered during
2456 /// the analysis of the blocks available during the analysis of the enclosing
2457 /// function. This is important due to the fact that blocks are analyzed before
2458 /// the enclosed function is even parsed fully, so it is not viable to access
2459 /// anything in the outer scope while analyzing the block. On the other hand,
2460 /// re-building CFG for blocks and re-analyzing them when we do have all the
2461 /// information (i.e. during the analysis of the enclosing function) seems to be
2463 class sema::AnalysisBasedWarnings::InterProceduralData
{
2465 // It is important to analyze blocks within functions because it's a very
2466 // common pattern to capture completion handler parameters by blocks.
2467 CalledOnceInterProceduralData CalledOnceData
;
2470 static unsigned isEnabled(DiagnosticsEngine
&D
, unsigned diag
) {
2471 return (unsigned)!D
.isIgnored(diag
, SourceLocation());
2474 sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema
&s
)
2475 : S(s
), IPData(std::make_unique
<InterProceduralData
>()),
2476 NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2477 MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2478 NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2479 NumUninitAnalysisBlockVisits(0),
2480 MaxUninitAnalysisBlockVisitsPerFunction(0) {
2482 using namespace diag
;
2483 DiagnosticsEngine
&D
= S
.getDiagnostics();
2485 DefaultPolicy
.enableCheckUnreachable
=
2486 isEnabled(D
, warn_unreachable
) || isEnabled(D
, warn_unreachable_break
) ||
2487 isEnabled(D
, warn_unreachable_return
) ||
2488 isEnabled(D
, warn_unreachable_loop_increment
);
2490 DefaultPolicy
.enableThreadSafetyAnalysis
= isEnabled(D
, warn_double_lock
);
2492 DefaultPolicy
.enableConsumedAnalysis
=
2493 isEnabled(D
, warn_use_in_invalid_state
);
2496 // We need this here for unique_ptr with forward declared class.
2497 sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2499 static void flushDiagnostics(Sema
&S
, const sema::FunctionScopeInfo
*fscope
) {
2500 for (const auto &D
: fscope
->PossiblyUnreachableDiags
)
2501 S
.Diag(D
.Loc
, D
.PD
);
2504 // An AST Visitor that calls a callback function on each callable DEFINITION
2505 // that is NOT in a dependent context:
2506 class CallableVisitor
: public DynamicRecursiveASTVisitor
{
2508 llvm::function_ref
<void(const Decl
*)> Callback
;
2511 CallableVisitor(llvm::function_ref
<void(const Decl
*)> Callback
)
2512 : Callback(Callback
) {
2513 ShouldVisitTemplateInstantiations
= true;
2514 ShouldVisitImplicitCode
= false;
2517 bool VisitFunctionDecl(FunctionDecl
*Node
) override
{
2518 if (cast
<DeclContext
>(Node
)->isDependentContext())
2519 return true; // Not to analyze dependent decl
2520 // `FunctionDecl->hasBody()` returns true if the function has a body
2521 // somewhere defined. But we want to know if this `Node` has a body
2522 // child. So we use `doesThisDeclarationHaveABody`:
2523 if (Node
->doesThisDeclarationHaveABody())
2528 bool VisitBlockDecl(BlockDecl
*Node
) override
{
2529 if (cast
<DeclContext
>(Node
)->isDependentContext())
2530 return true; // Not to analyze dependent decl
2535 bool VisitObjCMethodDecl(ObjCMethodDecl
*Node
) override
{
2536 if (cast
<DeclContext
>(Node
)->isDependentContext())
2537 return true; // Not to analyze dependent decl
2538 if (Node
->hasBody())
2543 bool VisitLambdaExpr(LambdaExpr
*Node
) override
{
2544 return VisitFunctionDecl(Node
->getCallOperator());
2548 void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2549 TranslationUnitDecl
*TU
) {
2551 return; // This is unexpected, give up quietly.
2553 DiagnosticsEngine
&Diags
= S
.getDiagnostics();
2555 if (S
.hasUncompilableErrorOccurred() || Diags
.getIgnoreAllWarnings())
2556 // exit if having uncompilable errors or ignoring all warnings:
2559 DiagnosticOptions
&DiagOpts
= Diags
.getDiagnosticOptions();
2561 // UnsafeBufferUsage analysis settings.
2562 bool UnsafeBufferUsageCanEmitSuggestions
= S
.getLangOpts().CPlusPlus20
;
2563 bool UnsafeBufferUsageShouldEmitSuggestions
= // Should != Can.
2564 UnsafeBufferUsageCanEmitSuggestions
&&
2565 DiagOpts
.ShowSafeBufferUsageSuggestions
;
2566 bool UnsafeBufferUsageShouldSuggestSuggestions
=
2567 UnsafeBufferUsageCanEmitSuggestions
&&
2568 !DiagOpts
.ShowSafeBufferUsageSuggestions
;
2569 UnsafeBufferUsageReporter
R(S
, UnsafeBufferUsageShouldSuggestSuggestions
);
2571 // The Callback function that performs analyses:
2572 auto CallAnalyzers
= [&](const Decl
*Node
) -> void {
2573 // Perform unsafe buffer usage analysis:
2574 if (!Diags
.isIgnored(diag::warn_unsafe_buffer_operation
,
2575 Node
->getBeginLoc()) ||
2576 !Diags
.isIgnored(diag::warn_unsafe_buffer_variable
,
2577 Node
->getBeginLoc()) ||
2578 !Diags
.isIgnored(diag::warn_unsafe_buffer_usage_in_container
,
2579 Node
->getBeginLoc()) ||
2580 !Diags
.isIgnored(diag::warn_unsafe_buffer_libc_call
,
2581 Node
->getBeginLoc())) {
2582 clang::checkUnsafeBufferUsage(Node
, R
,
2583 UnsafeBufferUsageShouldEmitSuggestions
);
2586 // More analysis ...
2588 // Emit per-function analysis-based warnings that require the whole-TU
2589 // reasoning. Check if any of them is enabled at all before scanning the AST:
2590 if (!Diags
.isIgnored(diag::warn_unsafe_buffer_operation
, SourceLocation()) ||
2591 !Diags
.isIgnored(diag::warn_unsafe_buffer_variable
, SourceLocation()) ||
2592 !Diags
.isIgnored(diag::warn_unsafe_buffer_usage_in_container
,
2593 SourceLocation()) ||
2594 (!Diags
.isIgnored(diag::warn_unsafe_buffer_libc_call
, SourceLocation()) &&
2595 S
.getLangOpts().CPlusPlus
/* only warn about libc calls in C++ */)) {
2596 CallableVisitor(CallAnalyzers
).TraverseTranslationUnitDecl(TU
);
2600 void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2601 sema::AnalysisBasedWarnings::Policy P
, sema::FunctionScopeInfo
*fscope
,
2602 const Decl
*D
, QualType BlockType
) {
2604 // We avoid doing analysis-based warnings when there are errors for
2606 // (1) The CFGs often can't be constructed (if the body is invalid), so
2607 // don't bother trying.
2608 // (2) The code already has problems; running the analysis just takes more
2610 DiagnosticsEngine
&Diags
= S
.getDiagnostics();
2612 // Do not do any analysis if we are going to just ignore them.
2613 if (Diags
.getIgnoreAllWarnings() ||
2614 (Diags
.getSuppressSystemWarnings() &&
2615 S
.SourceMgr
.isInSystemHeader(D
->getLocation())))
2618 // For code in dependent contexts, we'll do this at instantiation time.
2619 if (cast
<DeclContext
>(D
)->isDependentContext())
2622 if (S
.hasUncompilableErrorOccurred()) {
2623 // Flush out any possibly unreachable diagnostics.
2624 flushDiagnostics(S
, fscope
);
2628 const Stmt
*Body
= D
->getBody();
2631 // Construct the analysis context with the specified CFG build options.
2632 AnalysisDeclContext
AC(/* AnalysisDeclContextManager */ nullptr, D
);
2634 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2635 // explosion for destructors that can result and the compile time hit.
2636 AC
.getCFGBuildOptions().PruneTriviallyFalseEdges
= true;
2637 AC
.getCFGBuildOptions().AddEHEdges
= false;
2638 AC
.getCFGBuildOptions().AddInitializers
= true;
2639 AC
.getCFGBuildOptions().AddImplicitDtors
= true;
2640 AC
.getCFGBuildOptions().AddTemporaryDtors
= true;
2641 AC
.getCFGBuildOptions().AddCXXNewAllocator
= false;
2642 AC
.getCFGBuildOptions().AddCXXDefaultInitExprInCtors
= true;
2644 // Force that certain expressions appear as CFGElements in the CFG. This
2645 // is used to speed up various analyses.
2646 // FIXME: This isn't the right factoring. This is here for initial
2647 // prototyping, but we need a way for analyses to say what expressions they
2648 // expect to always be CFGElements and then fill in the BuildOptions
2649 // appropriately. This is essentially a layering violation.
2650 if (P
.enableCheckUnreachable
|| P
.enableThreadSafetyAnalysis
||
2651 P
.enableConsumedAnalysis
) {
2652 // Unreachable code analysis and thread safety require a linearized CFG.
2653 AC
.getCFGBuildOptions().setAllAlwaysAdd();
2656 AC
.getCFGBuildOptions()
2657 .setAlwaysAdd(Stmt::BinaryOperatorClass
)
2658 .setAlwaysAdd(Stmt::CompoundAssignOperatorClass
)
2659 .setAlwaysAdd(Stmt::BlockExprClass
)
2660 .setAlwaysAdd(Stmt::CStyleCastExprClass
)
2661 .setAlwaysAdd(Stmt::DeclRefExprClass
)
2662 .setAlwaysAdd(Stmt::ImplicitCastExprClass
)
2663 .setAlwaysAdd(Stmt::UnaryOperatorClass
);
2666 // Install the logical handler.
2667 std::optional
<LogicalErrorHandler
> LEH
;
2668 if (LogicalErrorHandler::hasActiveDiagnostics(Diags
, D
->getBeginLoc())) {
2670 AC
.getCFGBuildOptions().Observer
= &*LEH
;
2673 // Emit delayed diagnostics.
2674 if (!fscope
->PossiblyUnreachableDiags
.empty()) {
2675 bool analyzed
= false;
2677 // Register the expressions with the CFGBuilder.
2678 for (const auto &D
: fscope
->PossiblyUnreachableDiags
) {
2679 for (const Stmt
*S
: D
.Stmts
)
2680 AC
.registerForcedBlockExpression(S
);
2685 for (const auto &D
: fscope
->PossiblyUnreachableDiags
) {
2686 bool AllReachable
= true;
2687 for (const Stmt
*S
: D
.Stmts
) {
2688 const CFGBlock
*block
= AC
.getBlockForRegisteredExpression(S
);
2689 CFGReverseBlockReachabilityAnalysis
*cra
=
2690 AC
.getCFGReachablityAnalysis();
2691 // FIXME: We should be able to assert that block is non-null, but
2692 // the CFG analysis can skip potentially-evaluated expressions in
2693 // edge cases; see test/Sema/vla-2.c.
2695 // Can this block be reached from the entrance?
2696 if (!cra
->isReachable(&AC
.getCFG()->getEntry(), block
)) {
2697 AllReachable
= false;
2701 // If we cannot map to a basic block, assume the statement is
2706 S
.Diag(D
.Loc
, D
.PD
);
2711 flushDiagnostics(S
, fscope
);
2714 // Warning: check missing 'return'
2715 if (P
.enableCheckFallThrough
) {
2716 const CheckFallThroughDiagnostics
&CD
=
2718 ? CheckFallThroughDiagnostics::MakeForBlock()
2719 : (isa
<CXXMethodDecl
>(D
) &&
2720 cast
<CXXMethodDecl
>(D
)->getOverloadedOperator() == OO_Call
&&
2721 cast
<CXXMethodDecl
>(D
)->getParent()->isLambda())
2722 ? CheckFallThroughDiagnostics::MakeForLambda()
2723 : (fscope
->isCoroutine()
2724 ? CheckFallThroughDiagnostics::MakeForCoroutine(D
)
2725 : CheckFallThroughDiagnostics::MakeForFunction(D
)));
2726 CheckFallThroughForBody(S
, D
, Body
, BlockType
, CD
, AC
, fscope
);
2729 // Warning: check for unreachable code
2730 if (P
.enableCheckUnreachable
) {
2731 // Only check for unreachable code on non-template instantiations.
2732 // Different template instantiations can effectively change the control-flow
2733 // and it is very difficult to prove that a snippet of code in a template
2734 // is unreachable for all instantiations.
2735 bool isTemplateInstantiation
= false;
2736 if (const FunctionDecl
*Function
= dyn_cast
<FunctionDecl
>(D
))
2737 isTemplateInstantiation
= Function
->isTemplateInstantiation();
2738 if (!isTemplateInstantiation
)
2739 CheckUnreachable(S
, AC
);
2742 // Check for thread safety violations
2743 if (P
.enableThreadSafetyAnalysis
) {
2744 SourceLocation FL
= AC
.getDecl()->getLocation();
2745 SourceLocation FEL
= AC
.getDecl()->getEndLoc();
2746 threadSafety::ThreadSafetyReporter
Reporter(S
, FL
, FEL
);
2747 if (!Diags
.isIgnored(diag::warn_thread_safety_beta
, D
->getBeginLoc()))
2748 Reporter
.setIssueBetaWarnings(true);
2749 if (!Diags
.isIgnored(diag::warn_thread_safety_verbose
, D
->getBeginLoc()))
2750 Reporter
.setVerbose(true);
2752 threadSafety::runThreadSafetyAnalysis(AC
, Reporter
,
2753 &S
.ThreadSafetyDeclCache
);
2754 Reporter
.emitDiagnostics();
2757 // Check for violations of consumed properties.
2758 if (P
.enableConsumedAnalysis
) {
2759 consumed::ConsumedWarningsHandler
WarningHandler(S
);
2760 consumed::ConsumedAnalyzer
Analyzer(WarningHandler
);
2764 if (!Diags
.isIgnored(diag::warn_uninit_var
, D
->getBeginLoc()) ||
2765 !Diags
.isIgnored(diag::warn_sometimes_uninit_var
, D
->getBeginLoc()) ||
2766 !Diags
.isIgnored(diag::warn_maybe_uninit_var
, D
->getBeginLoc()) ||
2767 !Diags
.isIgnored(diag::warn_uninit_const_reference
, D
->getBeginLoc())) {
2768 if (CFG
*cfg
= AC
.getCFG()) {
2769 UninitValsDiagReporter
reporter(S
);
2770 UninitVariablesAnalysisStats stats
;
2771 std::memset(&stats
, 0, sizeof(UninitVariablesAnalysisStats
));
2772 runUninitializedVariablesAnalysis(*cast
<DeclContext
>(D
), *cfg
, AC
,
2775 if (S
.CollectStats
&& stats
.NumVariablesAnalyzed
> 0) {
2776 ++NumUninitAnalysisFunctions
;
2777 NumUninitAnalysisVariables
+= stats
.NumVariablesAnalyzed
;
2778 NumUninitAnalysisBlockVisits
+= stats
.NumBlockVisits
;
2779 MaxUninitAnalysisVariablesPerFunction
=
2780 std::max(MaxUninitAnalysisVariablesPerFunction
,
2781 stats
.NumVariablesAnalyzed
);
2782 MaxUninitAnalysisBlockVisitsPerFunction
=
2783 std::max(MaxUninitAnalysisBlockVisitsPerFunction
,
2784 stats
.NumBlockVisits
);
2789 // Check for violations of "called once" parameter properties.
2790 if (S
.getLangOpts().ObjC
&& !S
.getLangOpts().CPlusPlus
&&
2791 shouldAnalyzeCalledOnceParameters(Diags
, D
->getBeginLoc())) {
2793 CalledOnceCheckReporter
Reporter(S
, IPData
->CalledOnceData
);
2794 checkCalledOnceParameters(
2796 shouldAnalyzeCalledOnceConventions(Diags
, D
->getBeginLoc()));
2800 bool FallThroughDiagFull
=
2801 !Diags
.isIgnored(diag::warn_unannotated_fallthrough
, D
->getBeginLoc());
2802 bool FallThroughDiagPerFunction
= !Diags
.isIgnored(
2803 diag::warn_unannotated_fallthrough_per_function
, D
->getBeginLoc());
2804 if (FallThroughDiagFull
|| FallThroughDiagPerFunction
||
2805 fscope
->HasFallthroughStmt
) {
2806 DiagnoseSwitchLabelsFallthrough(S
, AC
, !FallThroughDiagFull
);
2809 if (S
.getLangOpts().ObjCWeak
&&
2810 !Diags
.isIgnored(diag::warn_arc_repeated_use_of_weak
, D
->getBeginLoc()))
2811 diagnoseRepeatedUseOfWeak(S
, fscope
, D
, AC
.getParentMap());
2814 // Check for infinite self-recursion in functions
2815 if (!Diags
.isIgnored(diag::warn_infinite_recursive_function
,
2816 D
->getBeginLoc())) {
2817 if (const FunctionDecl
*FD
= dyn_cast
<FunctionDecl
>(D
)) {
2818 checkRecursiveFunction(S
, FD
, Body
, AC
);
2822 // Check for throw out of non-throwing function.
2823 if (!Diags
.isIgnored(diag::warn_throw_in_noexcept_func
, D
->getBeginLoc()))
2824 if (const FunctionDecl
*FD
= dyn_cast
<FunctionDecl
>(D
))
2825 if (S
.getLangOpts().CPlusPlus
&& !fscope
->isCoroutine() && isNoexcept(FD
))
2826 checkThrowInNonThrowingFunc(S
, FD
, AC
);
2828 // If none of the previous checks caused a CFG build, trigger one here
2829 // for the logical error handler.
2830 if (LogicalErrorHandler::hasActiveDiagnostics(Diags
, D
->getBeginLoc())) {
2834 // Collect statistics about the CFG if it was built.
2835 if (S
.CollectStats
&& AC
.isCFGBuilt()) {
2836 ++NumFunctionsAnalyzed
;
2837 if (CFG
*cfg
= AC
.getCFG()) {
2838 // If we successfully built a CFG for this context, record some more
2839 // detail information about it.
2840 NumCFGBlocks
+= cfg
->getNumBlockIDs();
2841 MaxCFGBlocksPerFunction
= std::max(MaxCFGBlocksPerFunction
,
2842 cfg
->getNumBlockIDs());
2844 ++NumFunctionsWithBadCFGs
;
2849 void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2850 llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2852 unsigned NumCFGsBuilt
= NumFunctionsAnalyzed
- NumFunctionsWithBadCFGs
;
2853 unsigned AvgCFGBlocksPerFunction
=
2854 !NumCFGsBuilt
? 0 : NumCFGBlocks
/NumCFGsBuilt
;
2855 llvm::errs() << NumFunctionsAnalyzed
<< " functions analyzed ("
2856 << NumFunctionsWithBadCFGs
<< " w/o CFGs).\n"
2857 << " " << NumCFGBlocks
<< " CFG blocks built.\n"
2858 << " " << AvgCFGBlocksPerFunction
2859 << " average CFG blocks per function.\n"
2860 << " " << MaxCFGBlocksPerFunction
2861 << " max CFG blocks per function.\n";
2863 unsigned AvgUninitVariablesPerFunction
= !NumUninitAnalysisFunctions
? 0
2864 : NumUninitAnalysisVariables
/NumUninitAnalysisFunctions
;
2865 unsigned AvgUninitBlockVisitsPerFunction
= !NumUninitAnalysisFunctions
? 0
2866 : NumUninitAnalysisBlockVisits
/NumUninitAnalysisFunctions
;
2867 llvm::errs() << NumUninitAnalysisFunctions
2868 << " functions analyzed for uninitialiazed variables\n"
2869 << " " << NumUninitAnalysisVariables
<< " variables analyzed.\n"
2870 << " " << AvgUninitVariablesPerFunction
2871 << " average variables per function.\n"
2872 << " " << MaxUninitAnalysisVariablesPerFunction
2873 << " max variables per function.\n"
2874 << " " << NumUninitAnalysisBlockVisits
<< " block visits.\n"
2875 << " " << AvgUninitBlockVisitsPerFunction
2876 << " average block visits per function.\n"
2877 << " " << MaxUninitAnalysisBlockVisitsPerFunction
2878 << " max block visits per function.\n";