[DFAJumpThreading] Remove incoming StartBlock from all phis when unfolding select...
[llvm-project.git] / clang / lib / Sema / JumpDiagnostics.cpp
blob45ff36d5fe237b32b711bdee9d27d9c96355dfe3
1 //===--- JumpDiagnostics.cpp - Protected scope jump analysis ------*- C++ -*-=//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the JumpScopeChecker class, which is used to diagnose
10 // jumps that enter a protected scope in an invalid way.
12 //===----------------------------------------------------------------------===//
14 #include "clang/AST/DeclCXX.h"
15 #include "clang/AST/Expr.h"
16 #include "clang/AST/ExprCXX.h"
17 #include "clang/AST/StmtCXX.h"
18 #include "clang/AST/StmtObjC.h"
19 #include "clang/AST/StmtOpenMP.h"
20 #include "clang/Basic/SourceLocation.h"
21 #include "clang/Sema/SemaInternal.h"
22 #include "llvm/ADT/BitVector.h"
23 using namespace clang;
25 namespace {
27 /// JumpScopeChecker - This object is used by Sema to diagnose invalid jumps
28 /// into VLA and other protected scopes. For example, this rejects:
29 /// goto L;
30 /// int a[n];
31 /// L:
32 ///
33 /// We also detect jumps out of protected scopes when it's not possible to do
34 /// cleanups properly. Indirect jumps and ASM jumps can't do cleanups because
35 /// the target is unknown. Return statements with \c [[clang::musttail]] cannot
36 /// handle any cleanups due to the nature of a tail call.
37 class JumpScopeChecker {
38 Sema &S;
40 /// Permissive - True when recovering from errors, in which case precautions
41 /// are taken to handle incomplete scope information.
42 const bool Permissive;
44 /// GotoScope - This is a record that we use to keep track of all of the
45 /// scopes that are introduced by VLAs and other things that scope jumps like
46 /// gotos. This scope tree has nothing to do with the source scope tree,
47 /// because you can have multiple VLA scopes per compound statement, and most
48 /// compound statements don't introduce any scopes.
49 struct GotoScope {
50 /// ParentScope - The index in ScopeMap of the parent scope. This is 0 for
51 /// the parent scope is the function body.
52 unsigned ParentScope;
54 /// InDiag - The note to emit if there is a jump into this scope.
55 unsigned InDiag;
57 /// OutDiag - The note to emit if there is an indirect jump out
58 /// of this scope. Direct jumps always clean up their current scope
59 /// in an orderly way.
60 unsigned OutDiag;
62 /// Loc - Location to emit the diagnostic.
63 SourceLocation Loc;
65 GotoScope(unsigned parentScope, unsigned InDiag, unsigned OutDiag,
66 SourceLocation L)
67 : ParentScope(parentScope), InDiag(InDiag), OutDiag(OutDiag), Loc(L) {}
70 SmallVector<GotoScope, 48> Scopes;
71 llvm::DenseMap<Stmt*, unsigned> LabelAndGotoScopes;
72 SmallVector<Stmt*, 16> Jumps;
74 SmallVector<Stmt*, 4> IndirectJumps;
75 SmallVector<LabelDecl *, 4> IndirectJumpTargets;
76 SmallVector<AttributedStmt *, 4> MustTailStmts;
78 public:
79 JumpScopeChecker(Stmt *Body, Sema &S);
80 private:
81 void BuildScopeInformation(Decl *D, unsigned &ParentScope);
82 void BuildScopeInformation(VarDecl *D, const BlockDecl *BDecl,
83 unsigned &ParentScope);
84 void BuildScopeInformation(CompoundLiteralExpr *CLE, unsigned &ParentScope);
85 void BuildScopeInformation(Stmt *S, unsigned &origParentScope);
87 void VerifyJumps();
88 void VerifyIndirectJumps();
89 void VerifyMustTailStmts();
90 void NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes);
91 void DiagnoseIndirectOrAsmJump(Stmt *IG, unsigned IGScope, LabelDecl *Target,
92 unsigned TargetScope);
93 void CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
94 unsigned JumpDiag, unsigned JumpDiagWarning,
95 unsigned JumpDiagCXX98Compat);
96 void CheckGotoStmt(GotoStmt *GS);
97 const Attr *GetMustTailAttr(AttributedStmt *AS);
99 unsigned GetDeepestCommonScope(unsigned A, unsigned B);
101 } // end anonymous namespace
103 #define CHECK_PERMISSIVE(x) (assert(Permissive || !(x)), (Permissive && (x)))
105 JumpScopeChecker::JumpScopeChecker(Stmt *Body, Sema &s)
106 : S(s), Permissive(s.hasAnyUnrecoverableErrorsInThisFunction()) {
107 // Add a scope entry for function scope.
108 Scopes.push_back(GotoScope(~0U, ~0U, ~0U, SourceLocation()));
110 // Build information for the top level compound statement, so that we have a
111 // defined scope record for every "goto" and label.
112 unsigned BodyParentScope = 0;
113 BuildScopeInformation(Body, BodyParentScope);
115 // Check that all jumps we saw are kosher.
116 VerifyJumps();
117 VerifyIndirectJumps();
118 VerifyMustTailStmts();
121 /// GetDeepestCommonScope - Finds the innermost scope enclosing the
122 /// two scopes.
123 unsigned JumpScopeChecker::GetDeepestCommonScope(unsigned A, unsigned B) {
124 while (A != B) {
125 // Inner scopes are created after outer scopes and therefore have
126 // higher indices.
127 if (A < B) {
128 assert(Scopes[B].ParentScope < B);
129 B = Scopes[B].ParentScope;
130 } else {
131 assert(Scopes[A].ParentScope < A);
132 A = Scopes[A].ParentScope;
135 return A;
138 typedef std::pair<unsigned,unsigned> ScopePair;
140 /// GetDiagForGotoScopeDecl - If this decl induces a new goto scope, return a
141 /// diagnostic that should be emitted if control goes over it. If not, return 0.
142 static ScopePair GetDiagForGotoScopeDecl(Sema &S, const Decl *D) {
143 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
144 unsigned InDiag = 0;
145 unsigned OutDiag = 0;
147 if (VD->getType()->isVariablyModifiedType())
148 InDiag = diag::note_protected_by_vla;
150 if (VD->hasAttr<BlocksAttr>())
151 return ScopePair(diag::note_protected_by___block,
152 diag::note_exits___block);
154 if (VD->hasAttr<CleanupAttr>())
155 return ScopePair(diag::note_protected_by_cleanup,
156 diag::note_exits_cleanup);
158 if (VD->hasLocalStorage()) {
159 switch (VD->getType().isDestructedType()) {
160 case QualType::DK_objc_strong_lifetime:
161 return ScopePair(diag::note_protected_by_objc_strong_init,
162 diag::note_exits_objc_strong);
164 case QualType::DK_objc_weak_lifetime:
165 return ScopePair(diag::note_protected_by_objc_weak_init,
166 diag::note_exits_objc_weak);
168 case QualType::DK_nontrivial_c_struct:
169 return ScopePair(diag::note_protected_by_non_trivial_c_struct_init,
170 diag::note_exits_dtor);
172 case QualType::DK_cxx_destructor:
173 OutDiag = diag::note_exits_dtor;
174 break;
176 case QualType::DK_none:
177 break;
181 const Expr *Init = VD->getInit();
182 if (S.Context.getLangOpts().CPlusPlus && VD->hasLocalStorage() && Init) {
183 // C++11 [stmt.dcl]p3:
184 // A program that jumps from a point where a variable with automatic
185 // storage duration is not in scope to a point where it is in scope
186 // is ill-formed unless the variable has scalar type, class type with
187 // a trivial default constructor and a trivial destructor, a
188 // cv-qualified version of one of these types, or an array of one of
189 // the preceding types and is declared without an initializer.
191 // C++03 [stmt.dcl.p3:
192 // A program that jumps from a point where a local variable
193 // with automatic storage duration is not in scope to a point
194 // where it is in scope is ill-formed unless the variable has
195 // POD type and is declared without an initializer.
197 InDiag = diag::note_protected_by_variable_init;
199 // For a variable of (array of) class type declared without an
200 // initializer, we will have call-style initialization and the initializer
201 // will be the CXXConstructExpr with no intervening nodes.
202 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
203 const CXXConstructorDecl *Ctor = CCE->getConstructor();
204 if (Ctor->isTrivial() && Ctor->isDefaultConstructor() &&
205 VD->getInitStyle() == VarDecl::CallInit) {
206 if (OutDiag)
207 InDiag = diag::note_protected_by_variable_nontriv_destructor;
208 else if (!Ctor->getParent()->isPOD())
209 InDiag = diag::note_protected_by_variable_non_pod;
210 else
211 InDiag = 0;
216 return ScopePair(InDiag, OutDiag);
219 if (const TypedefNameDecl *TD = dyn_cast<TypedefNameDecl>(D)) {
220 if (TD->getUnderlyingType()->isVariablyModifiedType())
221 return ScopePair(isa<TypedefDecl>(TD)
222 ? diag::note_protected_by_vla_typedef
223 : diag::note_protected_by_vla_type_alias,
227 return ScopePair(0U, 0U);
230 /// Build scope information for a declaration that is part of a DeclStmt.
231 void JumpScopeChecker::BuildScopeInformation(Decl *D, unsigned &ParentScope) {
232 // If this decl causes a new scope, push and switch to it.
233 std::pair<unsigned,unsigned> Diags = GetDiagForGotoScopeDecl(S, D);
234 if (Diags.first || Diags.second) {
235 Scopes.push_back(GotoScope(ParentScope, Diags.first, Diags.second,
236 D->getLocation()));
237 ParentScope = Scopes.size()-1;
240 // If the decl has an initializer, walk it with the potentially new
241 // scope we just installed.
242 if (VarDecl *VD = dyn_cast<VarDecl>(D))
243 if (Expr *Init = VD->getInit())
244 BuildScopeInformation(Init, ParentScope);
247 /// Build scope information for a captured block literal variables.
248 void JumpScopeChecker::BuildScopeInformation(VarDecl *D,
249 const BlockDecl *BDecl,
250 unsigned &ParentScope) {
251 // exclude captured __block variables; there's no destructor
252 // associated with the block literal for them.
253 if (D->hasAttr<BlocksAttr>())
254 return;
255 QualType T = D->getType();
256 QualType::DestructionKind destructKind = T.isDestructedType();
257 if (destructKind != QualType::DK_none) {
258 std::pair<unsigned,unsigned> Diags;
259 switch (destructKind) {
260 case QualType::DK_cxx_destructor:
261 Diags = ScopePair(diag::note_enters_block_captures_cxx_obj,
262 diag::note_exits_block_captures_cxx_obj);
263 break;
264 case QualType::DK_objc_strong_lifetime:
265 Diags = ScopePair(diag::note_enters_block_captures_strong,
266 diag::note_exits_block_captures_strong);
267 break;
268 case QualType::DK_objc_weak_lifetime:
269 Diags = ScopePair(diag::note_enters_block_captures_weak,
270 diag::note_exits_block_captures_weak);
271 break;
272 case QualType::DK_nontrivial_c_struct:
273 Diags = ScopePair(diag::note_enters_block_captures_non_trivial_c_struct,
274 diag::note_exits_block_captures_non_trivial_c_struct);
275 break;
276 case QualType::DK_none:
277 llvm_unreachable("non-lifetime captured variable");
279 SourceLocation Loc = D->getLocation();
280 if (Loc.isInvalid())
281 Loc = BDecl->getLocation();
282 Scopes.push_back(GotoScope(ParentScope,
283 Diags.first, Diags.second, Loc));
284 ParentScope = Scopes.size()-1;
288 /// Build scope information for compound literals of C struct types that are
289 /// non-trivial to destruct.
290 void JumpScopeChecker::BuildScopeInformation(CompoundLiteralExpr *CLE,
291 unsigned &ParentScope) {
292 unsigned InDiag = diag::note_enters_compound_literal_scope;
293 unsigned OutDiag = diag::note_exits_compound_literal_scope;
294 Scopes.push_back(GotoScope(ParentScope, InDiag, OutDiag, CLE->getExprLoc()));
295 ParentScope = Scopes.size() - 1;
298 /// BuildScopeInformation - The statements from CI to CE are known to form a
299 /// coherent VLA scope with a specified parent node. Walk through the
300 /// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
301 /// walking the AST as needed.
302 void JumpScopeChecker::BuildScopeInformation(Stmt *S,
303 unsigned &origParentScope) {
304 // If this is a statement, rather than an expression, scopes within it don't
305 // propagate out into the enclosing scope. Otherwise we have to worry
306 // about block literals, which have the lifetime of their enclosing statement.
307 unsigned independentParentScope = origParentScope;
308 unsigned &ParentScope = ((isa<Expr>(S) && !isa<StmtExpr>(S))
309 ? origParentScope : independentParentScope);
311 unsigned StmtsToSkip = 0u;
313 // If we found a label, remember that it is in ParentScope scope.
314 switch (S->getStmtClass()) {
315 case Stmt::AddrLabelExprClass:
316 IndirectJumpTargets.push_back(cast<AddrLabelExpr>(S)->getLabel());
317 break;
319 case Stmt::ObjCForCollectionStmtClass: {
320 auto *CS = cast<ObjCForCollectionStmt>(S);
321 unsigned Diag = diag::note_protected_by_objc_fast_enumeration;
322 unsigned NewParentScope = Scopes.size();
323 Scopes.push_back(GotoScope(ParentScope, Diag, 0, S->getBeginLoc()));
324 BuildScopeInformation(CS->getBody(), NewParentScope);
325 return;
328 case Stmt::IndirectGotoStmtClass:
329 // "goto *&&lbl;" is a special case which we treat as equivalent
330 // to a normal goto. In addition, we don't calculate scope in the
331 // operand (to avoid recording the address-of-label use), which
332 // works only because of the restricted set of expressions which
333 // we detect as constant targets.
334 if (cast<IndirectGotoStmt>(S)->getConstantTarget())
335 goto RecordJumpScope;
337 LabelAndGotoScopes[S] = ParentScope;
338 IndirectJumps.push_back(S);
339 break;
341 case Stmt::SwitchStmtClass:
342 // Evaluate the C++17 init stmt and condition variable
343 // before entering the scope of the switch statement.
344 if (Stmt *Init = cast<SwitchStmt>(S)->getInit()) {
345 BuildScopeInformation(Init, ParentScope);
346 ++StmtsToSkip;
348 if (VarDecl *Var = cast<SwitchStmt>(S)->getConditionVariable()) {
349 BuildScopeInformation(Var, ParentScope);
350 ++StmtsToSkip;
352 goto RecordJumpScope;
354 case Stmt::GCCAsmStmtClass:
355 if (!cast<GCCAsmStmt>(S)->isAsmGoto())
356 break;
357 [[fallthrough]];
359 case Stmt::GotoStmtClass:
360 RecordJumpScope:
361 // Remember both what scope a goto is in as well as the fact that we have
362 // it. This makes the second scan not have to walk the AST again.
363 LabelAndGotoScopes[S] = ParentScope;
364 Jumps.push_back(S);
365 break;
367 case Stmt::IfStmtClass: {
368 IfStmt *IS = cast<IfStmt>(S);
369 if (!(IS->isConstexpr() || IS->isConsteval() ||
370 IS->isObjCAvailabilityCheck()))
371 break;
373 unsigned Diag = diag::note_protected_by_if_available;
374 if (IS->isConstexpr())
375 Diag = diag::note_protected_by_constexpr_if;
376 else if (IS->isConsteval())
377 Diag = diag::note_protected_by_consteval_if;
379 if (VarDecl *Var = IS->getConditionVariable())
380 BuildScopeInformation(Var, ParentScope);
382 // Cannot jump into the middle of the condition.
383 unsigned NewParentScope = Scopes.size();
384 Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
386 if (!IS->isConsteval())
387 BuildScopeInformation(IS->getCond(), NewParentScope);
389 // Jumps into either arm of an 'if constexpr' are not allowed.
390 NewParentScope = Scopes.size();
391 Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
392 BuildScopeInformation(IS->getThen(), NewParentScope);
393 if (Stmt *Else = IS->getElse()) {
394 NewParentScope = Scopes.size();
395 Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
396 BuildScopeInformation(Else, NewParentScope);
398 return;
401 case Stmt::CXXTryStmtClass: {
402 CXXTryStmt *TS = cast<CXXTryStmt>(S);
404 unsigned NewParentScope = Scopes.size();
405 Scopes.push_back(GotoScope(ParentScope,
406 diag::note_protected_by_cxx_try,
407 diag::note_exits_cxx_try,
408 TS->getSourceRange().getBegin()));
409 if (Stmt *TryBlock = TS->getTryBlock())
410 BuildScopeInformation(TryBlock, NewParentScope);
413 // Jump from the catch into the try is not allowed either.
414 for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
415 CXXCatchStmt *CS = TS->getHandler(I);
416 unsigned NewParentScope = Scopes.size();
417 Scopes.push_back(GotoScope(ParentScope,
418 diag::note_protected_by_cxx_catch,
419 diag::note_exits_cxx_catch,
420 CS->getSourceRange().getBegin()));
421 BuildScopeInformation(CS->getHandlerBlock(), NewParentScope);
423 return;
426 case Stmt::SEHTryStmtClass: {
427 SEHTryStmt *TS = cast<SEHTryStmt>(S);
429 unsigned NewParentScope = Scopes.size();
430 Scopes.push_back(GotoScope(ParentScope,
431 diag::note_protected_by_seh_try,
432 diag::note_exits_seh_try,
433 TS->getSourceRange().getBegin()));
434 if (Stmt *TryBlock = TS->getTryBlock())
435 BuildScopeInformation(TryBlock, NewParentScope);
438 // Jump from __except or __finally into the __try are not allowed either.
439 if (SEHExceptStmt *Except = TS->getExceptHandler()) {
440 unsigned NewParentScope = Scopes.size();
441 Scopes.push_back(GotoScope(ParentScope,
442 diag::note_protected_by_seh_except,
443 diag::note_exits_seh_except,
444 Except->getSourceRange().getBegin()));
445 BuildScopeInformation(Except->getBlock(), NewParentScope);
446 } else if (SEHFinallyStmt *Finally = TS->getFinallyHandler()) {
447 unsigned NewParentScope = Scopes.size();
448 Scopes.push_back(GotoScope(ParentScope,
449 diag::note_protected_by_seh_finally,
450 diag::note_exits_seh_finally,
451 Finally->getSourceRange().getBegin()));
452 BuildScopeInformation(Finally->getBlock(), NewParentScope);
455 return;
458 case Stmt::DeclStmtClass: {
459 // If this is a declstmt with a VLA definition, it defines a scope from here
460 // to the end of the containing context.
461 DeclStmt *DS = cast<DeclStmt>(S);
462 // The decl statement creates a scope if any of the decls in it are VLAs
463 // or have the cleanup attribute.
464 for (auto *I : DS->decls())
465 BuildScopeInformation(I, origParentScope);
466 return;
469 case Stmt::StmtExprClass: {
470 // [GNU]
471 // Jumping into a statement expression with goto or using
472 // a switch statement outside the statement expression with
473 // a case or default label inside the statement expression is not permitted.
474 // Jumping out of a statement expression is permitted.
475 StmtExpr *SE = cast<StmtExpr>(S);
476 unsigned NewParentScope = Scopes.size();
477 Scopes.push_back(GotoScope(ParentScope,
478 diag::note_enters_statement_expression,
479 /*OutDiag=*/0, SE->getBeginLoc()));
480 BuildScopeInformation(SE->getSubStmt(), NewParentScope);
481 return;
484 case Stmt::ObjCAtTryStmtClass: {
485 // Disallow jumps into any part of an @try statement by pushing a scope and
486 // walking all sub-stmts in that scope.
487 ObjCAtTryStmt *AT = cast<ObjCAtTryStmt>(S);
488 // Recursively walk the AST for the @try part.
490 unsigned NewParentScope = Scopes.size();
491 Scopes.push_back(GotoScope(ParentScope,
492 diag::note_protected_by_objc_try,
493 diag::note_exits_objc_try,
494 AT->getAtTryLoc()));
495 if (Stmt *TryPart = AT->getTryBody())
496 BuildScopeInformation(TryPart, NewParentScope);
499 // Jump from the catch to the finally or try is not valid.
500 for (ObjCAtCatchStmt *AC : AT->catch_stmts()) {
501 unsigned NewParentScope = Scopes.size();
502 Scopes.push_back(GotoScope(ParentScope,
503 diag::note_protected_by_objc_catch,
504 diag::note_exits_objc_catch,
505 AC->getAtCatchLoc()));
506 // @catches are nested and it isn't
507 BuildScopeInformation(AC->getCatchBody(), NewParentScope);
510 // Jump from the finally to the try or catch is not valid.
511 if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
512 unsigned NewParentScope = Scopes.size();
513 Scopes.push_back(GotoScope(ParentScope,
514 diag::note_protected_by_objc_finally,
515 diag::note_exits_objc_finally,
516 AF->getAtFinallyLoc()));
517 BuildScopeInformation(AF, NewParentScope);
520 return;
523 case Stmt::ObjCAtSynchronizedStmtClass: {
524 // Disallow jumps into the protected statement of an @synchronized, but
525 // allow jumps into the object expression it protects.
526 ObjCAtSynchronizedStmt *AS = cast<ObjCAtSynchronizedStmt>(S);
527 // Recursively walk the AST for the @synchronized object expr, it is
528 // evaluated in the normal scope.
529 BuildScopeInformation(AS->getSynchExpr(), ParentScope);
531 // Recursively walk the AST for the @synchronized part, protected by a new
532 // scope.
533 unsigned NewParentScope = Scopes.size();
534 Scopes.push_back(GotoScope(ParentScope,
535 diag::note_protected_by_objc_synchronized,
536 diag::note_exits_objc_synchronized,
537 AS->getAtSynchronizedLoc()));
538 BuildScopeInformation(AS->getSynchBody(), NewParentScope);
539 return;
542 case Stmt::ObjCAutoreleasePoolStmtClass: {
543 // Disallow jumps into the protected statement of an @autoreleasepool.
544 ObjCAutoreleasePoolStmt *AS = cast<ObjCAutoreleasePoolStmt>(S);
545 // Recursively walk the AST for the @autoreleasepool part, protected by a
546 // new scope.
547 unsigned NewParentScope = Scopes.size();
548 Scopes.push_back(GotoScope(ParentScope,
549 diag::note_protected_by_objc_autoreleasepool,
550 diag::note_exits_objc_autoreleasepool,
551 AS->getAtLoc()));
552 BuildScopeInformation(AS->getSubStmt(), NewParentScope);
553 return;
556 case Stmt::ExprWithCleanupsClass: {
557 // Disallow jumps past full-expressions that use blocks with
558 // non-trivial cleanups of their captures. This is theoretically
559 // implementable but a lot of work which we haven't felt up to doing.
560 ExprWithCleanups *EWC = cast<ExprWithCleanups>(S);
561 for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
562 if (auto *BDecl = EWC->getObject(i).dyn_cast<BlockDecl *>())
563 for (const auto &CI : BDecl->captures()) {
564 VarDecl *variable = CI.getVariable();
565 BuildScopeInformation(variable, BDecl, origParentScope);
567 else if (auto *CLE = EWC->getObject(i).dyn_cast<CompoundLiteralExpr *>())
568 BuildScopeInformation(CLE, origParentScope);
569 else
570 llvm_unreachable("unexpected cleanup object type");
572 break;
575 case Stmt::MaterializeTemporaryExprClass: {
576 // Disallow jumps out of scopes containing temporaries lifetime-extended to
577 // automatic storage duration.
578 MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
579 if (MTE->getStorageDuration() == SD_Automatic) {
580 SmallVector<const Expr *, 4> CommaLHS;
581 SmallVector<SubobjectAdjustment, 4> Adjustments;
582 const Expr *ExtendedObject =
583 MTE->getSubExpr()->skipRValueSubobjectAdjustments(CommaLHS,
584 Adjustments);
585 if (ExtendedObject->getType().isDestructedType()) {
586 Scopes.push_back(GotoScope(ParentScope, 0,
587 diag::note_exits_temporary_dtor,
588 ExtendedObject->getExprLoc()));
589 origParentScope = Scopes.size()-1;
592 break;
595 case Stmt::CaseStmtClass:
596 case Stmt::DefaultStmtClass:
597 case Stmt::LabelStmtClass:
598 LabelAndGotoScopes[S] = ParentScope;
599 break;
601 case Stmt::AttributedStmtClass: {
602 AttributedStmt *AS = cast<AttributedStmt>(S);
603 if (GetMustTailAttr(AS)) {
604 LabelAndGotoScopes[AS] = ParentScope;
605 MustTailStmts.push_back(AS);
607 break;
610 default:
611 if (auto *ED = dyn_cast<OMPExecutableDirective>(S)) {
612 if (!ED->isStandaloneDirective()) {
613 unsigned NewParentScope = Scopes.size();
614 Scopes.emplace_back(ParentScope,
615 diag::note_omp_protected_structured_block,
616 diag::note_omp_exits_structured_block,
617 ED->getStructuredBlock()->getBeginLoc());
618 BuildScopeInformation(ED->getStructuredBlock(), NewParentScope);
619 return;
622 break;
625 for (Stmt *SubStmt : S->children()) {
626 if (!SubStmt)
627 continue;
628 if (StmtsToSkip) {
629 --StmtsToSkip;
630 continue;
633 // Cases, labels, and defaults aren't "scope parents". It's also
634 // important to handle these iteratively instead of recursively in
635 // order to avoid blowing out the stack.
636 while (true) {
637 Stmt *Next;
638 if (SwitchCase *SC = dyn_cast<SwitchCase>(SubStmt))
639 Next = SC->getSubStmt();
640 else if (LabelStmt *LS = dyn_cast<LabelStmt>(SubStmt))
641 Next = LS->getSubStmt();
642 else
643 break;
645 LabelAndGotoScopes[SubStmt] = ParentScope;
646 SubStmt = Next;
649 // Recursively walk the AST.
650 BuildScopeInformation(SubStmt, ParentScope);
654 /// VerifyJumps - Verify each element of the Jumps array to see if they are
655 /// valid, emitting diagnostics if not.
656 void JumpScopeChecker::VerifyJumps() {
657 while (!Jumps.empty()) {
658 Stmt *Jump = Jumps.pop_back_val();
660 // With a goto,
661 if (GotoStmt *GS = dyn_cast<GotoStmt>(Jump)) {
662 // The label may not have a statement if it's coming from inline MS ASM.
663 if (GS->getLabel()->getStmt()) {
664 CheckJump(GS, GS->getLabel()->getStmt(), GS->getGotoLoc(),
665 diag::err_goto_into_protected_scope,
666 diag::ext_goto_into_protected_scope,
667 diag::warn_cxx98_compat_goto_into_protected_scope);
669 CheckGotoStmt(GS);
670 continue;
673 // If an asm goto jumps to a different scope, things like destructors or
674 // initializers might not be run which may be suprising to users. Perhaps
675 // this behavior can be changed in the future, but today Clang will not
676 // generate such code. Produce a diagnostic instead. See also the
677 // discussion here: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=110728.
678 if (auto *G = dyn_cast<GCCAsmStmt>(Jump)) {
679 for (AddrLabelExpr *L : G->labels()) {
680 LabelDecl *LD = L->getLabel();
681 unsigned JumpScope = LabelAndGotoScopes[G];
682 unsigned TargetScope = LabelAndGotoScopes[LD->getStmt()];
683 if (JumpScope != TargetScope)
684 DiagnoseIndirectOrAsmJump(G, JumpScope, LD, TargetScope);
686 continue;
689 // We only get indirect gotos here when they have a constant target.
690 if (IndirectGotoStmt *IGS = dyn_cast<IndirectGotoStmt>(Jump)) {
691 LabelDecl *Target = IGS->getConstantTarget();
692 CheckJump(IGS, Target->getStmt(), IGS->getGotoLoc(),
693 diag::err_goto_into_protected_scope,
694 diag::ext_goto_into_protected_scope,
695 diag::warn_cxx98_compat_goto_into_protected_scope);
696 continue;
699 SwitchStmt *SS = cast<SwitchStmt>(Jump);
700 for (SwitchCase *SC = SS->getSwitchCaseList(); SC;
701 SC = SC->getNextSwitchCase()) {
702 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(SC)))
703 continue;
704 SourceLocation Loc;
705 if (CaseStmt *CS = dyn_cast<CaseStmt>(SC))
706 Loc = CS->getBeginLoc();
707 else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(SC))
708 Loc = DS->getBeginLoc();
709 else
710 Loc = SC->getBeginLoc();
711 CheckJump(SS, SC, Loc, diag::err_switch_into_protected_scope, 0,
712 diag::warn_cxx98_compat_switch_into_protected_scope);
717 /// VerifyIndirectJumps - Verify whether any possible indirect goto jump might
718 /// cross a protection boundary. Unlike direct jumps, indirect goto jumps
719 /// count cleanups as protection boundaries: since there's no way to know where
720 /// the jump is going, we can't implicitly run the right cleanups the way we
721 /// can with direct jumps. Thus, an indirect/asm jump is "trivial" if it
722 /// bypasses no initializations and no teardowns. More formally, an
723 /// indirect/asm jump from A to B is trivial if the path out from A to DCA(A,B)
724 /// is trivial and the path in from DCA(A,B) to B is trivial, where DCA(A,B) is
725 /// the deepest common ancestor of A and B. Jump-triviality is transitive but
726 /// asymmetric.
728 /// A path in is trivial if none of the entered scopes have an InDiag.
729 /// A path out is trivial is none of the exited scopes have an OutDiag.
731 /// Under these definitions, this function checks that the indirect
732 /// jump between A and B is trivial for every indirect goto statement A
733 /// and every label B whose address was taken in the function.
734 void JumpScopeChecker::VerifyIndirectJumps() {
735 if (IndirectJumps.empty())
736 return;
737 // If there aren't any address-of-label expressions in this function,
738 // complain about the first indirect goto.
739 if (IndirectJumpTargets.empty()) {
740 S.Diag(IndirectJumps[0]->getBeginLoc(),
741 diag::err_indirect_goto_without_addrlabel);
742 return;
744 // Collect a single representative of every scope containing an indirect
745 // goto. For most code bases, this substantially cuts down on the number of
746 // jump sites we'll have to consider later.
747 using JumpScope = std::pair<unsigned, Stmt *>;
748 SmallVector<JumpScope, 32> JumpScopes;
750 llvm::DenseMap<unsigned, Stmt*> JumpScopesMap;
751 for (Stmt *IG : IndirectJumps) {
752 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(IG)))
753 continue;
754 unsigned IGScope = LabelAndGotoScopes[IG];
755 if (!JumpScopesMap.contains(IGScope))
756 JumpScopesMap[IGScope] = IG;
758 JumpScopes.reserve(JumpScopesMap.size());
759 for (auto &Pair : JumpScopesMap)
760 JumpScopes.emplace_back(Pair);
763 // Collect a single representative of every scope containing a
764 // label whose address was taken somewhere in the function.
765 // For most code bases, there will be only one such scope.
766 llvm::DenseMap<unsigned, LabelDecl*> TargetScopes;
767 for (LabelDecl *TheLabel : IndirectJumpTargets) {
768 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(TheLabel->getStmt())))
769 continue;
770 unsigned LabelScope = LabelAndGotoScopes[TheLabel->getStmt()];
771 if (!TargetScopes.contains(LabelScope))
772 TargetScopes[LabelScope] = TheLabel;
775 // For each target scope, make sure it's trivially reachable from
776 // every scope containing a jump site.
778 // A path between scopes always consists of exitting zero or more
779 // scopes, then entering zero or more scopes. We build a set of
780 // of scopes S from which the target scope can be trivially
781 // entered, then verify that every jump scope can be trivially
782 // exitted to reach a scope in S.
783 llvm::BitVector Reachable(Scopes.size(), false);
784 for (auto [TargetScope, TargetLabel] : TargetScopes) {
785 Reachable.reset();
787 // Mark all the enclosing scopes from which you can safely jump
788 // into the target scope. 'Min' will end up being the index of
789 // the shallowest such scope.
790 unsigned Min = TargetScope;
791 while (true) {
792 Reachable.set(Min);
794 // Don't go beyond the outermost scope.
795 if (Min == 0) break;
797 // Stop if we can't trivially enter the current scope.
798 if (Scopes[Min].InDiag) break;
800 Min = Scopes[Min].ParentScope;
803 // Walk through all the jump sites, checking that they can trivially
804 // reach this label scope.
805 for (auto [JumpScope, JumpStmt] : JumpScopes) {
806 unsigned Scope = JumpScope;
807 // Walk out the "scope chain" for this scope, looking for a scope
808 // we've marked reachable. For well-formed code this amortizes
809 // to O(JumpScopes.size() / Scopes.size()): we only iterate
810 // when we see something unmarked, and in well-formed code we
811 // mark everything we iterate past.
812 bool IsReachable = false;
813 while (true) {
814 if (Reachable.test(Scope)) {
815 // If we find something reachable, mark all the scopes we just
816 // walked through as reachable.
817 for (unsigned S = JumpScope; S != Scope; S = Scopes[S].ParentScope)
818 Reachable.set(S);
819 IsReachable = true;
820 break;
823 // Don't walk out if we've reached the top-level scope or we've
824 // gotten shallower than the shallowest reachable scope.
825 if (Scope == 0 || Scope < Min) break;
827 // Don't walk out through an out-diagnostic.
828 if (Scopes[Scope].OutDiag) break;
830 Scope = Scopes[Scope].ParentScope;
833 // Only diagnose if we didn't find something.
834 if (IsReachable) continue;
836 DiagnoseIndirectOrAsmJump(JumpStmt, JumpScope, TargetLabel, TargetScope);
841 /// Return true if a particular error+note combination must be downgraded to a
842 /// warning in Microsoft mode.
843 static bool IsMicrosoftJumpWarning(unsigned JumpDiag, unsigned InDiagNote) {
844 return (JumpDiag == diag::err_goto_into_protected_scope &&
845 (InDiagNote == diag::note_protected_by_variable_init ||
846 InDiagNote == diag::note_protected_by_variable_nontriv_destructor));
849 /// Return true if a particular note should be downgraded to a compatibility
850 /// warning in C++11 mode.
851 static bool IsCXX98CompatWarning(Sema &S, unsigned InDiagNote) {
852 return S.getLangOpts().CPlusPlus11 &&
853 InDiagNote == diag::note_protected_by_variable_non_pod;
856 /// Produce primary diagnostic for an indirect jump statement.
857 static void DiagnoseIndirectOrAsmJumpStmt(Sema &S, Stmt *Jump,
858 LabelDecl *Target, bool &Diagnosed) {
859 if (Diagnosed)
860 return;
861 bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
862 S.Diag(Jump->getBeginLoc(), diag::err_indirect_goto_in_protected_scope)
863 << IsAsmGoto;
864 S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
865 << IsAsmGoto;
866 Diagnosed = true;
869 /// Produce note diagnostics for a jump into a protected scope.
870 void JumpScopeChecker::NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes) {
871 if (CHECK_PERMISSIVE(ToScopes.empty()))
872 return;
873 for (unsigned I = 0, E = ToScopes.size(); I != E; ++I)
874 if (Scopes[ToScopes[I]].InDiag)
875 S.Diag(Scopes[ToScopes[I]].Loc, Scopes[ToScopes[I]].InDiag);
878 /// Diagnose an indirect jump which is known to cross scopes.
879 void JumpScopeChecker::DiagnoseIndirectOrAsmJump(Stmt *Jump, unsigned JumpScope,
880 LabelDecl *Target,
881 unsigned TargetScope) {
882 if (CHECK_PERMISSIVE(JumpScope == TargetScope))
883 return;
885 unsigned Common = GetDeepestCommonScope(JumpScope, TargetScope);
886 bool Diagnosed = false;
888 // Walk out the scope chain until we reach the common ancestor.
889 for (unsigned I = JumpScope; I != Common; I = Scopes[I].ParentScope)
890 if (Scopes[I].OutDiag) {
891 DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
892 S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
895 SmallVector<unsigned, 10> ToScopesCXX98Compat;
897 // Now walk into the scopes containing the label whose address was taken.
898 for (unsigned I = TargetScope; I != Common; I = Scopes[I].ParentScope)
899 if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
900 ToScopesCXX98Compat.push_back(I);
901 else if (Scopes[I].InDiag) {
902 DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
903 S.Diag(Scopes[I].Loc, Scopes[I].InDiag);
906 // Diagnose this jump if it would be ill-formed in C++98.
907 if (!Diagnosed && !ToScopesCXX98Compat.empty()) {
908 bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
909 S.Diag(Jump->getBeginLoc(),
910 diag::warn_cxx98_compat_indirect_goto_in_protected_scope)
911 << IsAsmGoto;
912 S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
913 << IsAsmGoto;
914 NoteJumpIntoScopes(ToScopesCXX98Compat);
918 /// CheckJump - Validate that the specified jump statement is valid: that it is
919 /// jumping within or out of its current scope, not into a deeper one.
920 void JumpScopeChecker::CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
921 unsigned JumpDiagError, unsigned JumpDiagWarning,
922 unsigned JumpDiagCXX98Compat) {
923 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(From)))
924 return;
925 if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(To)))
926 return;
928 unsigned FromScope = LabelAndGotoScopes[From];
929 unsigned ToScope = LabelAndGotoScopes[To];
931 // Common case: exactly the same scope, which is fine.
932 if (FromScope == ToScope) return;
934 // Warn on gotos out of __finally blocks.
935 if (isa<GotoStmt>(From) || isa<IndirectGotoStmt>(From)) {
936 // If FromScope > ToScope, FromScope is more nested and the jump goes to a
937 // less nested scope. Check if it crosses a __finally along the way.
938 for (unsigned I = FromScope; I > ToScope; I = Scopes[I].ParentScope) {
939 if (Scopes[I].InDiag == diag::note_protected_by_seh_finally) {
940 S.Diag(From->getBeginLoc(), diag::warn_jump_out_of_seh_finally);
941 break;
943 if (Scopes[I].InDiag == diag::note_omp_protected_structured_block) {
944 S.Diag(From->getBeginLoc(), diag::err_goto_into_protected_scope);
945 S.Diag(To->getBeginLoc(), diag::note_omp_exits_structured_block);
946 break;
951 unsigned CommonScope = GetDeepestCommonScope(FromScope, ToScope);
953 // It's okay to jump out from a nested scope.
954 if (CommonScope == ToScope) return;
956 // Pull out (and reverse) any scopes we might need to diagnose skipping.
957 SmallVector<unsigned, 10> ToScopesCXX98Compat;
958 SmallVector<unsigned, 10> ToScopesError;
959 SmallVector<unsigned, 10> ToScopesWarning;
960 for (unsigned I = ToScope; I != CommonScope; I = Scopes[I].ParentScope) {
961 if (S.getLangOpts().MSVCCompat && JumpDiagWarning != 0 &&
962 IsMicrosoftJumpWarning(JumpDiagError, Scopes[I].InDiag))
963 ToScopesWarning.push_back(I);
964 else if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
965 ToScopesCXX98Compat.push_back(I);
966 else if (Scopes[I].InDiag)
967 ToScopesError.push_back(I);
970 // Handle warnings.
971 if (!ToScopesWarning.empty()) {
972 S.Diag(DiagLoc, JumpDiagWarning);
973 NoteJumpIntoScopes(ToScopesWarning);
974 assert(isa<LabelStmt>(To));
975 LabelStmt *Label = cast<LabelStmt>(To);
976 Label->setSideEntry(true);
979 // Handle errors.
980 if (!ToScopesError.empty()) {
981 S.Diag(DiagLoc, JumpDiagError);
982 NoteJumpIntoScopes(ToScopesError);
985 // Handle -Wc++98-compat warnings if the jump is well-formed.
986 if (ToScopesError.empty() && !ToScopesCXX98Compat.empty()) {
987 S.Diag(DiagLoc, JumpDiagCXX98Compat);
988 NoteJumpIntoScopes(ToScopesCXX98Compat);
992 void JumpScopeChecker::CheckGotoStmt(GotoStmt *GS) {
993 if (GS->getLabel()->isMSAsmLabel()) {
994 S.Diag(GS->getGotoLoc(), diag::err_goto_ms_asm_label)
995 << GS->getLabel()->getIdentifier();
996 S.Diag(GS->getLabel()->getLocation(), diag::note_goto_ms_asm_label)
997 << GS->getLabel()->getIdentifier();
1001 void JumpScopeChecker::VerifyMustTailStmts() {
1002 for (AttributedStmt *AS : MustTailStmts) {
1003 for (unsigned I = LabelAndGotoScopes[AS]; I; I = Scopes[I].ParentScope) {
1004 if (Scopes[I].OutDiag) {
1005 S.Diag(AS->getBeginLoc(), diag::err_musttail_scope);
1006 S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
1012 const Attr *JumpScopeChecker::GetMustTailAttr(AttributedStmt *AS) {
1013 ArrayRef<const Attr *> Attrs = AS->getAttrs();
1014 const auto *Iter =
1015 llvm::find_if(Attrs, [](const Attr *A) { return isa<MustTailAttr>(A); });
1016 return Iter != Attrs.end() ? *Iter : nullptr;
1019 void Sema::DiagnoseInvalidJumps(Stmt *Body) {
1020 (void)JumpScopeChecker(Body, *this);