[AMDGPU][True16][CodeGen] uaddsat/usubsat sdag for true16 format (#118708)
[llvm-project.git] / clang / lib / StaticAnalyzer / Core / ExprEngineCXX.cpp
blobf7020da2e6da20092ce690b4f60f3a347dffdc13
1 //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines the C++ expression evaluation engine.
11 //===----------------------------------------------------------------------===//
13 #include "clang/AST/DeclCXX.h"
14 #include "clang/AST/ParentMap.h"
15 #include "clang/AST/StmtCXX.h"
16 #include "clang/Analysis/ConstructionContext.h"
17 #include "clang/Basic/PrettyStackTrace.h"
18 #include "clang/StaticAnalyzer/Core/CheckerManager.h"
19 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
20 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
21 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
22 #include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h"
23 #include "llvm/ADT/STLExtras.h"
24 #include "llvm/ADT/Sequence.h"
25 #include <optional>
27 using namespace clang;
28 using namespace ento;
30 void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME,
31 ExplodedNode *Pred,
32 ExplodedNodeSet &Dst) {
33 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
34 const Expr *tempExpr = ME->getSubExpr()->IgnoreParens();
35 ProgramStateRef state = Pred->getState();
36 const LocationContext *LCtx = Pred->getLocationContext();
38 state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME);
39 Bldr.generateNode(ME, Pred, state);
42 // FIXME: This is the sort of code that should eventually live in a Core
43 // checker rather than as a special case in ExprEngine.
44 void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred,
45 const CallEvent &Call) {
46 SVal ThisVal;
47 bool AlwaysReturnsLValue;
48 [[maybe_unused]] const CXXRecordDecl *ThisRD = nullptr;
49 if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) {
50 assert(Ctor->getDecl()->isTrivial());
51 assert(Ctor->getDecl()->isCopyOrMoveConstructor());
52 ThisVal = Ctor->getCXXThisVal();
53 ThisRD = Ctor->getDecl()->getParent();
54 AlwaysReturnsLValue = false;
55 } else {
56 assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial());
57 assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() ==
58 OO_Equal);
59 ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal();
60 ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent();
61 AlwaysReturnsLValue = true;
64 const LocationContext *LCtx = Pred->getLocationContext();
65 const Expr *CallExpr = Call.getOriginExpr();
67 ExplodedNodeSet Dst;
68 Bldr.takeNodes(Pred);
70 assert(ThisRD);
71 SVal V = Call.getArgSVal(0);
73 // If the value being copied is not unknown, load from its location to get
74 // an aggregate rvalue.
75 if (std::optional<Loc> L = V.getAs<Loc>())
76 V = Pred->getState()->getSVal(*L);
77 else
78 assert(V.isUnknownOrUndef());
79 evalBind(Dst, CallExpr, Pred, ThisVal, V, true);
81 PostStmt PS(CallExpr, LCtx);
82 for (ExplodedNode *N : Dst) {
83 ProgramStateRef State = N->getState();
84 if (AlwaysReturnsLValue)
85 State = State->BindExpr(CallExpr, LCtx, ThisVal);
86 else
87 State = bindReturnValue(Call, LCtx, State);
88 Bldr.generateNode(PS, State, N);
92 SVal ExprEngine::makeElementRegion(ProgramStateRef State, SVal LValue,
93 QualType &Ty, bool &IsArray, unsigned Idx) {
94 SValBuilder &SVB = State->getStateManager().getSValBuilder();
95 ASTContext &Ctx = SVB.getContext();
97 if (const ArrayType *AT = Ctx.getAsArrayType(Ty)) {
98 while (AT) {
99 Ty = AT->getElementType();
100 AT = dyn_cast<ArrayType>(AT->getElementType());
102 LValue = State->getLValue(Ty, SVB.makeArrayIndex(Idx), LValue);
103 IsArray = true;
106 return LValue;
109 // In case when the prvalue is returned from the function (kind is one of
110 // SimpleReturnedValueKind, CXX17ElidedCopyReturnedValueKind), then
111 // it's materialization happens in context of the caller.
112 // We pass BldrCtx explicitly, as currBldrCtx always refers to callee's context.
113 SVal ExprEngine::computeObjectUnderConstruction(
114 const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx,
115 const LocationContext *LCtx, const ConstructionContext *CC,
116 EvalCallOptions &CallOpts, unsigned Idx) {
118 SValBuilder &SVB = getSValBuilder();
119 MemRegionManager &MRMgr = SVB.getRegionManager();
120 ASTContext &ACtx = SVB.getContext();
122 // Compute the target region by exploring the construction context.
123 if (CC) {
124 switch (CC->getKind()) {
125 case ConstructionContext::CXX17ElidedCopyVariableKind:
126 case ConstructionContext::SimpleVariableKind: {
127 const auto *DSCC = cast<VariableConstructionContext>(CC);
128 const auto *DS = DSCC->getDeclStmt();
129 const auto *Var = cast<VarDecl>(DS->getSingleDecl());
130 QualType Ty = Var->getType();
131 return makeElementRegion(State, State->getLValue(Var, LCtx), Ty,
132 CallOpts.IsArrayCtorOrDtor, Idx);
134 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
135 case ConstructionContext::SimpleConstructorInitializerKind: {
136 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
137 const auto *Init = ICC->getCXXCtorInitializer();
138 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
139 Loc ThisPtr = SVB.getCXXThis(CurCtor, LCtx->getStackFrame());
140 SVal ThisVal = State->getSVal(ThisPtr);
141 if (Init->isBaseInitializer()) {
142 const auto *ThisReg = cast<SubRegion>(ThisVal.getAsRegion());
143 const CXXRecordDecl *BaseClass =
144 Init->getBaseClass()->getAsCXXRecordDecl();
145 const auto *BaseReg =
146 MRMgr.getCXXBaseObjectRegion(BaseClass, ThisReg,
147 Init->isBaseVirtual());
148 return SVB.makeLoc(BaseReg);
150 if (Init->isDelegatingInitializer())
151 return ThisVal;
153 const ValueDecl *Field;
154 SVal FieldVal;
155 if (Init->isIndirectMemberInitializer()) {
156 Field = Init->getIndirectMember();
157 FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal);
158 } else {
159 Field = Init->getMember();
160 FieldVal = State->getLValue(Init->getMember(), ThisVal);
163 QualType Ty = Field->getType();
164 return makeElementRegion(State, FieldVal, Ty, CallOpts.IsArrayCtorOrDtor,
165 Idx);
167 case ConstructionContext::NewAllocatedObjectKind: {
168 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
169 const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC);
170 const auto *NE = NECC->getCXXNewExpr();
171 SVal V = *getObjectUnderConstruction(State, NE, LCtx);
172 if (const SubRegion *MR =
173 dyn_cast_or_null<SubRegion>(V.getAsRegion())) {
174 if (NE->isArray()) {
175 CallOpts.IsArrayCtorOrDtor = true;
177 auto Ty = NE->getType()->getPointeeType();
178 while (const auto *AT = getContext().getAsArrayType(Ty))
179 Ty = AT->getElementType();
181 auto R = MRMgr.getElementRegion(Ty, svalBuilder.makeArrayIndex(Idx),
182 MR, SVB.getContext());
184 return loc::MemRegionVal(R);
186 return V;
188 // TODO: Detect when the allocator returns a null pointer.
189 // Constructor shall not be called in this case.
191 break;
193 case ConstructionContext::SimpleReturnedValueKind:
194 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
195 // The temporary is to be managed by the parent stack frame.
196 // So build it in the parent stack frame if we're not in the
197 // top frame of the analysis.
198 const StackFrameContext *SFC = LCtx->getStackFrame();
199 if (const LocationContext *CallerLCtx = SFC->getParent()) {
200 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
201 .getAs<CFGCXXRecordTypedCall>();
202 if (!RTC) {
203 // We were unable to find the correct construction context for the
204 // call in the parent stack frame. This is equivalent to not being
205 // able to find construction context at all.
206 break;
208 if (isa<BlockInvocationContext>(CallerLCtx)) {
209 // Unwrap block invocation contexts. They're mostly part of
210 // the current stack frame.
211 CallerLCtx = CallerLCtx->getParent();
212 assert(!isa<BlockInvocationContext>(CallerLCtx));
215 NodeBuilderContext CallerBldrCtx(getCoreEngine(),
216 SFC->getCallSiteBlock(), CallerLCtx);
217 return computeObjectUnderConstruction(
218 cast<Expr>(SFC->getCallSite()), State, &CallerBldrCtx, CallerLCtx,
219 RTC->getConstructionContext(), CallOpts);
220 } else {
221 // We are on the top frame of the analysis. We do not know where is the
222 // object returned to. Conjure a symbolic region for the return value.
223 // TODO: We probably need a new MemRegion kind to represent the storage
224 // of that SymbolicRegion, so that we could produce a fancy symbol
225 // instead of an anonymous conjured symbol.
226 // TODO: Do we need to track the region to avoid having it dead
227 // too early? It does die too early, at least in C++17, but because
228 // putting anything into a SymbolicRegion causes an immediate escape,
229 // it doesn't cause any leak false positives.
230 const auto *RCC = cast<ReturnedValueConstructionContext>(CC);
231 // Make sure that this doesn't coincide with any other symbol
232 // conjured for the returned expression.
233 static const int TopLevelSymRegionTag = 0;
234 const Expr *RetE = RCC->getReturnStmt()->getRetValue();
235 assert(RetE && "Void returns should not have a construction context");
236 QualType ReturnTy = RetE->getType();
237 QualType RegionTy = ACtx.getPointerType(ReturnTy);
238 return SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC, RegionTy,
239 currBldrCtx->blockCount());
241 llvm_unreachable("Unhandled return value construction context!");
243 case ConstructionContext::ElidedTemporaryObjectKind: {
244 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
245 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
247 // Support pre-C++17 copy elision. We'll have the elidable copy
248 // constructor in the AST and in the CFG, but we'll skip it
249 // and construct directly into the final object. This call
250 // also sets the CallOpts flags for us.
251 // If the elided copy/move constructor is not supported, there's still
252 // benefit in trying to model the non-elided constructor.
253 // Stash our state before trying to elide, as it'll get overwritten.
254 ProgramStateRef PreElideState = State;
255 EvalCallOptions PreElideCallOpts = CallOpts;
257 SVal V = computeObjectUnderConstruction(
258 TCC->getConstructorAfterElision(), State, BldrCtx, LCtx,
259 TCC->getConstructionContextAfterElision(), CallOpts);
261 // FIXME: This definition of "copy elision has not failed" is unreliable.
262 // It doesn't indicate that the constructor will actually be inlined
263 // later; this is still up to evalCall() to decide.
264 if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
265 return V;
267 // Copy elision failed. Revert the changes and proceed as if we have
268 // a simple temporary.
269 CallOpts = PreElideCallOpts;
270 CallOpts.IsElidableCtorThatHasNotBeenElided = true;
271 [[fallthrough]];
273 case ConstructionContext::SimpleTemporaryObjectKind: {
274 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
275 const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
277 CallOpts.IsTemporaryCtorOrDtor = true;
278 if (MTE) {
279 if (const ValueDecl *VD = MTE->getExtendingDecl()) {
280 StorageDuration SD = MTE->getStorageDuration();
281 assert(SD != SD_FullExpression);
282 if (!VD->getType()->isReferenceType()) {
283 // We're lifetime-extended by a surrounding aggregate.
284 // Automatic destructors aren't quite working in this case
285 // on the CFG side. We should warn the caller about that.
286 // FIXME: Is there a better way to retrieve this information from
287 // the MaterializeTemporaryExpr?
288 CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true;
291 if (SD == SD_Static || SD == SD_Thread)
292 return loc::MemRegionVal(
293 MRMgr.getCXXStaticLifetimeExtendedObjectRegion(E, VD));
295 return loc::MemRegionVal(
296 MRMgr.getCXXLifetimeExtendedObjectRegion(E, VD, LCtx));
298 assert(MTE->getStorageDuration() == SD_FullExpression);
301 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
303 case ConstructionContext::LambdaCaptureKind: {
304 CallOpts.IsTemporaryCtorOrDtor = true;
306 const auto *LCC = cast<LambdaCaptureConstructionContext>(CC);
308 SVal Base = loc::MemRegionVal(
309 MRMgr.getCXXTempObjectRegion(LCC->getInitializer(), LCtx));
311 const auto *CE = dyn_cast_or_null<CXXConstructExpr>(E);
312 if (getIndexOfElementToConstruct(State, CE, LCtx)) {
313 CallOpts.IsArrayCtorOrDtor = true;
314 Base = State->getLValue(E->getType(), svalBuilder.makeArrayIndex(Idx),
315 Base);
318 return Base;
320 case ConstructionContext::ArgumentKind: {
321 // Arguments are technically temporaries.
322 CallOpts.IsTemporaryCtorOrDtor = true;
324 const auto *ACC = cast<ArgumentConstructionContext>(CC);
325 const Expr *E = ACC->getCallLikeExpr();
326 unsigned Idx = ACC->getIndex();
328 CallEventManager &CEMgr = getStateManager().getCallEventManager();
329 auto getArgLoc = [&](CallEventRef<> Caller) -> std::optional<SVal> {
330 const LocationContext *FutureSFC =
331 Caller->getCalleeStackFrame(BldrCtx->blockCount());
332 // Return early if we are unable to reliably foresee
333 // the future stack frame.
334 if (!FutureSFC)
335 return std::nullopt;
337 // This should be equivalent to Caller->getDecl() for now, but
338 // FutureSFC->getDecl() is likely to support better stuff (like
339 // virtual functions) earlier.
340 const Decl *CalleeD = FutureSFC->getDecl();
342 // FIXME: Support for variadic arguments is not implemented here yet.
343 if (CallEvent::isVariadic(CalleeD))
344 return std::nullopt;
346 // Operator arguments do not correspond to operator parameters
347 // because this-argument is implemented as a normal argument in
348 // operator call expressions but not in operator declarations.
349 const TypedValueRegion *TVR = Caller->getParameterLocation(
350 *Caller->getAdjustedParameterIndex(Idx), BldrCtx->blockCount());
351 if (!TVR)
352 return std::nullopt;
354 return loc::MemRegionVal(TVR);
357 if (const auto *CE = dyn_cast<CallExpr>(E)) {
358 CallEventRef<> Caller =
359 CEMgr.getSimpleCall(CE, State, LCtx, getCFGElementRef());
360 if (std::optional<SVal> V = getArgLoc(Caller))
361 return *V;
362 else
363 break;
364 } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) {
365 // Don't bother figuring out the target region for the future
366 // constructor because we won't need it.
367 CallEventRef<> Caller = CEMgr.getCXXConstructorCall(
368 CCE, /*Target=*/nullptr, State, LCtx, getCFGElementRef());
369 if (std::optional<SVal> V = getArgLoc(Caller))
370 return *V;
371 else
372 break;
373 } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) {
374 CallEventRef<> Caller =
375 CEMgr.getObjCMethodCall(ME, State, LCtx, getCFGElementRef());
376 if (std::optional<SVal> V = getArgLoc(Caller))
377 return *V;
378 else
379 break;
382 } // switch (CC->getKind())
385 // If we couldn't find an existing region to construct into, assume we're
386 // constructing a temporary. Notify the caller of our failure.
387 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
388 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
391 ProgramStateRef ExprEngine::updateObjectsUnderConstruction(
392 SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
393 const ConstructionContext *CC, const EvalCallOptions &CallOpts) {
394 if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) {
395 // Sounds like we failed to find the target region and therefore
396 // copy elision failed. There's nothing we can do about it here.
397 return State;
400 // See if we're constructing an existing region by looking at the
401 // current construction context.
402 assert(CC && "Computed target region without construction context?");
403 switch (CC->getKind()) {
404 case ConstructionContext::CXX17ElidedCopyVariableKind:
405 case ConstructionContext::SimpleVariableKind: {
406 const auto *DSCC = cast<VariableConstructionContext>(CC);
407 return addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, V);
409 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
410 case ConstructionContext::SimpleConstructorInitializerKind: {
411 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
412 const auto *Init = ICC->getCXXCtorInitializer();
413 // Base and delegating initializers handled above
414 assert(Init->isAnyMemberInitializer() &&
415 "Base and delegating initializers should have been handled by"
416 "computeObjectUnderConstruction()");
417 return addObjectUnderConstruction(State, Init, LCtx, V);
419 case ConstructionContext::NewAllocatedObjectKind: {
420 return State;
422 case ConstructionContext::SimpleReturnedValueKind:
423 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
424 const StackFrameContext *SFC = LCtx->getStackFrame();
425 const LocationContext *CallerLCtx = SFC->getParent();
426 if (!CallerLCtx) {
427 // No extra work is necessary in top frame.
428 return State;
431 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
432 .getAs<CFGCXXRecordTypedCall>();
433 assert(RTC && "Could not have had a target region without it");
434 if (isa<BlockInvocationContext>(CallerLCtx)) {
435 // Unwrap block invocation contexts. They're mostly part of
436 // the current stack frame.
437 CallerLCtx = CallerLCtx->getParent();
438 assert(!isa<BlockInvocationContext>(CallerLCtx));
441 return updateObjectsUnderConstruction(V,
442 cast<Expr>(SFC->getCallSite()), State, CallerLCtx,
443 RTC->getConstructionContext(), CallOpts);
445 case ConstructionContext::ElidedTemporaryObjectKind: {
446 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
447 if (!CallOpts.IsElidableCtorThatHasNotBeenElided) {
448 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
449 State = updateObjectsUnderConstruction(
450 V, TCC->getConstructorAfterElision(), State, LCtx,
451 TCC->getConstructionContextAfterElision(), CallOpts);
453 // Remember that we've elided the constructor.
454 State = addObjectUnderConstruction(
455 State, TCC->getConstructorAfterElision(), LCtx, V);
457 // Remember that we've elided the destructor.
458 if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
459 State = elideDestructor(State, BTE, LCtx);
461 // Instead of materialization, shamelessly return
462 // the final object destination.
463 if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
464 State = addObjectUnderConstruction(State, MTE, LCtx, V);
466 return State;
468 // If we decided not to elide the constructor, proceed as if
469 // it's a simple temporary.
470 [[fallthrough]];
472 case ConstructionContext::SimpleTemporaryObjectKind: {
473 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
474 if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
475 State = addObjectUnderConstruction(State, BTE, LCtx, V);
477 if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
478 State = addObjectUnderConstruction(State, MTE, LCtx, V);
480 return State;
482 case ConstructionContext::LambdaCaptureKind: {
483 const auto *LCC = cast<LambdaCaptureConstructionContext>(CC);
485 // If we capture and array, we want to store the super region, not a
486 // sub-region.
487 if (const auto *EL = dyn_cast_or_null<ElementRegion>(V.getAsRegion()))
488 V = loc::MemRegionVal(EL->getSuperRegion());
490 return addObjectUnderConstruction(
491 State, {LCC->getLambdaExpr(), LCC->getIndex()}, LCtx, V);
493 case ConstructionContext::ArgumentKind: {
494 const auto *ACC = cast<ArgumentConstructionContext>(CC);
495 if (const auto *BTE = ACC->getCXXBindTemporaryExpr())
496 State = addObjectUnderConstruction(State, BTE, LCtx, V);
498 return addObjectUnderConstruction(
499 State, {ACC->getCallLikeExpr(), ACC->getIndex()}, LCtx, V);
502 llvm_unreachable("Unhandled construction context!");
505 static ProgramStateRef
506 bindRequiredArrayElementToEnvironment(ProgramStateRef State,
507 const ArrayInitLoopExpr *AILE,
508 const LocationContext *LCtx, NonLoc Idx) {
509 SValBuilder &SVB = State->getStateManager().getSValBuilder();
510 MemRegionManager &MRMgr = SVB.getRegionManager();
511 ASTContext &Ctx = SVB.getContext();
513 // HACK: There is no way we can put the index of the array element into the
514 // CFG unless we unroll the loop, so we manually select and bind the required
515 // parameter to the environment.
516 const Expr *SourceArray = AILE->getCommonExpr()->getSourceExpr();
517 const auto *Ctor =
518 cast<CXXConstructExpr>(extractElementInitializerFromNestedAILE(AILE));
520 const auto *SourceArrayRegion =
521 cast<SubRegion>(State->getSVal(SourceArray, LCtx).getAsRegion());
522 const ElementRegion *ElementRegion =
523 MRMgr.getElementRegion(Ctor->getType(), Idx, SourceArrayRegion, Ctx);
525 return State->BindExpr(Ctor->getArg(0), LCtx,
526 loc::MemRegionVal(ElementRegion));
529 void ExprEngine::handleConstructor(const Expr *E,
530 ExplodedNode *Pred,
531 ExplodedNodeSet &destNodes) {
532 const auto *CE = dyn_cast<CXXConstructExpr>(E);
533 const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(E);
534 assert(CE || CIE);
536 const LocationContext *LCtx = Pred->getLocationContext();
537 ProgramStateRef State = Pred->getState();
539 SVal Target = UnknownVal();
541 if (CE) {
542 if (std::optional<SVal> ElidedTarget =
543 getObjectUnderConstruction(State, CE, LCtx)) {
544 // We've previously modeled an elidable constructor by pretending that
545 // it in fact constructs into the correct target. This constructor can
546 // therefore be skipped.
547 Target = *ElidedTarget;
548 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
549 State = finishObjectConstruction(State, CE, LCtx);
550 if (auto L = Target.getAs<Loc>())
551 State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType()));
552 Bldr.generateNode(CE, Pred, State);
553 return;
557 EvalCallOptions CallOpts;
558 auto C = getCurrentCFGElement().getAs<CFGConstructor>();
559 assert(C || getCurrentCFGElement().getAs<CFGStmt>());
560 const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr;
562 const CXXConstructionKind CK =
563 CE ? CE->getConstructionKind() : CIE->getConstructionKind();
564 switch (CK) {
565 case CXXConstructionKind::Complete: {
566 // Inherited constructors are always base class constructors.
567 assert(CE && !CIE && "A complete constructor is inherited?!");
569 // If the ctor is part of an ArrayInitLoopExpr, we want to handle it
570 // differently.
571 auto *AILE = CC ? CC->getArrayInitLoop() : nullptr;
573 unsigned Idx = 0;
574 if (CE->getType()->isArrayType() || AILE) {
576 auto isZeroSizeArray = [&] {
577 uint64_t Size = 1;
579 if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType()))
580 Size = getContext().getConstantArrayElementCount(CAT);
581 else if (AILE)
582 Size = getContext().getArrayInitLoopExprElementCount(AILE);
584 return Size == 0;
587 // No element construction will happen in a 0 size array.
588 if (isZeroSizeArray()) {
589 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
590 static SimpleProgramPointTag T{"ExprEngine",
591 "Skipping 0 size array construction"};
592 Bldr.generateNode(CE, Pred, State, &T);
593 return;
596 Idx = getIndexOfElementToConstruct(State, CE, LCtx).value_or(0u);
597 State = setIndexOfElementToConstruct(State, CE, LCtx, Idx + 1);
600 if (AILE) {
601 // Only set this once even though we loop through it multiple times.
602 if (!getPendingInitLoop(State, CE, LCtx))
603 State = setPendingInitLoop(
604 State, CE, LCtx,
605 getContext().getArrayInitLoopExprElementCount(AILE));
607 State = bindRequiredArrayElementToEnvironment(
608 State, AILE, LCtx, svalBuilder.makeArrayIndex(Idx));
611 // The target region is found from construction context.
612 std::tie(State, Target) = handleConstructionContext(
613 CE, State, currBldrCtx, LCtx, CC, CallOpts, Idx);
614 break;
616 case CXXConstructionKind::VirtualBase: {
617 // Make sure we are not calling virtual base class initializers twice.
618 // Only the most-derived object should initialize virtual base classes.
619 const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>(
620 LCtx->getStackFrame()->getCallSite());
621 assert(
622 (!OuterCtor ||
623 OuterCtor->getConstructionKind() == CXXConstructionKind::Complete ||
624 OuterCtor->getConstructionKind() == CXXConstructionKind::Delegating) &&
625 ("This virtual base should have already been initialized by "
626 "the most derived class!"));
627 (void)OuterCtor;
628 [[fallthrough]];
630 case CXXConstructionKind::NonVirtualBase:
631 // In C++17, classes with non-virtual bases may be aggregates, so they would
632 // be initialized as aggregates without a constructor call, so we may have
633 // a base class constructed directly into an initializer list without
634 // having the derived-class constructor call on the previous stack frame.
635 // Initializer lists may be nested into more initializer lists that
636 // correspond to surrounding aggregate initializations.
637 // FIXME: For now this code essentially bails out. We need to find the
638 // correct target region and set it.
639 // FIXME: Instead of relying on the ParentMap, we should have the
640 // trigger-statement (InitListExpr in this case) passed down from CFG or
641 // otherwise always available during construction.
642 if (isa_and_nonnull<InitListExpr>(LCtx->getParentMap().getParent(E))) {
643 MemRegionManager &MRMgr = getSValBuilder().getRegionManager();
644 Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
645 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
646 break;
648 [[fallthrough]];
649 case CXXConstructionKind::Delegating: {
650 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
651 Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor,
652 LCtx->getStackFrame());
653 SVal ThisVal = State->getSVal(ThisPtr);
655 if (CK == CXXConstructionKind::Delegating) {
656 Target = ThisVal;
657 } else {
658 // Cast to the base type.
659 bool IsVirtual = (CK == CXXConstructionKind::VirtualBase);
660 SVal BaseVal =
661 getStoreManager().evalDerivedToBase(ThisVal, E->getType(), IsVirtual);
662 Target = BaseVal;
664 break;
668 if (State != Pred->getState()) {
669 static SimpleProgramPointTag T("ExprEngine",
670 "Prepare for object construction");
671 ExplodedNodeSet DstPrepare;
672 StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx);
673 BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind);
674 assert(DstPrepare.size() <= 1);
675 if (DstPrepare.size() == 0)
676 return;
677 Pred = *BldrPrepare.begin();
680 const MemRegion *TargetRegion = Target.getAsRegion();
681 CallEventManager &CEMgr = getStateManager().getCallEventManager();
682 CallEventRef<> Call =
683 CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall(
684 CIE, TargetRegion, State, LCtx, getCFGElementRef())
685 : (CallEventRef<>)CEMgr.getCXXConstructorCall(
686 CE, TargetRegion, State, LCtx, getCFGElementRef());
688 ExplodedNodeSet DstPreVisit;
689 getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this);
691 ExplodedNodeSet PreInitialized;
692 if (CE) {
693 // FIXME: Is it possible and/or useful to do this before PreStmt?
694 StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx);
695 for (ExplodedNode *N : DstPreVisit) {
696 ProgramStateRef State = N->getState();
697 if (CE->requiresZeroInitialization()) {
698 // FIXME: Once we properly handle constructors in new-expressions, we'll
699 // need to invalidate the region before setting a default value, to make
700 // sure there aren't any lingering bindings around. This probably needs
701 // to happen regardless of whether or not the object is zero-initialized
702 // to handle random fields of a placement-initialized object picking up
703 // old bindings. We might only want to do it when we need to, though.
704 // FIXME: This isn't actually correct for arrays -- we need to zero-
705 // initialize the entire array, not just the first element -- but our
706 // handling of arrays everywhere else is weak as well, so this shouldn't
707 // actually make things worse. Placement new makes this tricky as well,
708 // since it's then possible to be initializing one part of a multi-
709 // dimensional array.
710 State = State->bindDefaultZero(Target, LCtx);
713 Bldr.generateNode(CE, N, State, /*tag=*/nullptr,
714 ProgramPoint::PreStmtKind);
716 } else {
717 PreInitialized = DstPreVisit;
720 ExplodedNodeSet DstPreCall;
721 getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized,
722 *Call, *this);
724 ExplodedNodeSet DstEvaluated;
726 if (CE && CE->getConstructor()->isTrivial() &&
727 CE->getConstructor()->isCopyOrMoveConstructor() &&
728 !CallOpts.IsArrayCtorOrDtor) {
729 StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx);
730 // FIXME: Handle other kinds of trivial constructors as well.
731 for (ExplodedNode *N : DstPreCall)
732 performTrivialCopy(Bldr, N, *Call);
734 } else {
735 for (ExplodedNode *N : DstPreCall)
736 getCheckerManager().runCheckersForEvalCall(DstEvaluated, N, *Call, *this,
737 CallOpts);
740 // If the CFG was constructed without elements for temporary destructors
741 // and the just-called constructor created a temporary object then
742 // stop exploration if the temporary object has a noreturn constructor.
743 // This can lose coverage because the destructor, if it were present
744 // in the CFG, would be called at the end of the full expression or
745 // later (for life-time extended temporaries) -- but avoids infeasible
746 // paths when no-return temporary destructors are used for assertions.
747 ExplodedNodeSet DstEvaluatedPostProcessed;
748 StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx);
749 const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext();
750 if (!ADC->getCFGBuildOptions().AddTemporaryDtors) {
751 if (llvm::isa_and_nonnull<CXXTempObjectRegion,
752 CXXLifetimeExtendedObjectRegion>(TargetRegion) &&
753 cast<CXXConstructorDecl>(Call->getDecl())
754 ->getParent()
755 ->isAnyDestructorNoReturn()) {
757 // If we've inlined the constructor, then DstEvaluated would be empty.
758 // In this case we still want a sink, which could be implemented
759 // in processCallExit. But we don't have that implemented at the moment,
760 // so if you hit this assertion, see if you can avoid inlining
761 // the respective constructor when analyzer-config cfg-temporary-dtors
762 // is set to false.
763 // Otherwise there's nothing wrong with inlining such constructor.
764 assert(!DstEvaluated.empty() &&
765 "We should not have inlined this constructor!");
767 for (ExplodedNode *N : DstEvaluated) {
768 Bldr.generateSink(E, N, N->getState());
771 // There is no need to run the PostCall and PostStmt checker
772 // callbacks because we just generated sinks on all nodes in th
773 // frontier.
774 return;
778 ExplodedNodeSet DstPostArgumentCleanup;
779 for (ExplodedNode *I : DstEvaluatedPostProcessed)
780 finishArgumentConstruction(DstPostArgumentCleanup, I, *Call);
782 // If there were other constructors called for object-type arguments
783 // of this constructor, clean them up.
784 ExplodedNodeSet DstPostCall;
785 getCheckerManager().runCheckersForPostCall(DstPostCall,
786 DstPostArgumentCleanup,
787 *Call, *this);
788 getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this);
791 void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE,
792 ExplodedNode *Pred,
793 ExplodedNodeSet &Dst) {
794 handleConstructor(CE, Pred, Dst);
797 void ExprEngine::VisitCXXInheritedCtorInitExpr(
798 const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred,
799 ExplodedNodeSet &Dst) {
800 handleConstructor(CE, Pred, Dst);
803 void ExprEngine::VisitCXXDestructor(QualType ObjectType,
804 const MemRegion *Dest,
805 const Stmt *S,
806 bool IsBaseDtor,
807 ExplodedNode *Pred,
808 ExplodedNodeSet &Dst,
809 EvalCallOptions &CallOpts) {
810 assert(S && "A destructor without a trigger!");
811 const LocationContext *LCtx = Pred->getLocationContext();
812 ProgramStateRef State = Pred->getState();
814 const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl();
815 assert(RecordDecl && "Only CXXRecordDecls should have destructors");
816 const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor();
817 // FIXME: There should always be a Decl, otherwise the destructor call
818 // shouldn't have been added to the CFG in the first place.
819 if (!DtorDecl) {
820 // Skip the invalid destructor. We cannot simply return because
821 // it would interrupt the analysis instead.
822 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
823 // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway.
824 PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx,
825 getCFGElementRef(), &T);
826 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
827 Bldr.generateNode(PP, Pred->getState(), Pred);
828 return;
831 if (!Dest) {
832 // We're trying to destroy something that is not a region. This may happen
833 // for a variety of reasons (unknown target region, concrete integer instead
834 // of target region, etc.). The current code makes an attempt to recover.
835 // FIXME: We probably don't really need to recover when we're dealing
836 // with concrete integers specifically.
837 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
838 if (const Expr *E = dyn_cast_or_null<Expr>(S)) {
839 Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext());
840 } else {
841 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
842 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
843 Bldr.generateSink(Pred->getLocation().withTag(&T),
844 Pred->getState(), Pred);
845 return;
849 CallEventManager &CEMgr = getStateManager().getCallEventManager();
850 CallEventRef<CXXDestructorCall> Call = CEMgr.getCXXDestructorCall(
851 DtorDecl, S, Dest, IsBaseDtor, State, LCtx, getCFGElementRef());
853 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
854 Call->getSourceRange().getBegin(),
855 "Error evaluating destructor");
857 ExplodedNodeSet DstPreCall;
858 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
859 *Call, *this);
861 ExplodedNodeSet DstInvalidated;
862 StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx);
863 for (ExplodedNode *N : DstPreCall)
864 defaultEvalCall(Bldr, N, *Call, CallOpts);
866 getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated,
867 *Call, *this);
870 void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
871 ExplodedNode *Pred,
872 ExplodedNodeSet &Dst) {
873 ProgramStateRef State = Pred->getState();
874 const LocationContext *LCtx = Pred->getLocationContext();
875 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
876 CNE->getBeginLoc(),
877 "Error evaluating New Allocator Call");
878 CallEventManager &CEMgr = getStateManager().getCallEventManager();
879 CallEventRef<CXXAllocatorCall> Call =
880 CEMgr.getCXXAllocatorCall(CNE, State, LCtx, getCFGElementRef());
882 ExplodedNodeSet DstPreCall;
883 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
884 *Call, *this);
886 ExplodedNodeSet DstPostCall;
887 StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx);
888 for (ExplodedNode *I : DstPreCall) {
889 // FIXME: Provide evalCall for checkers?
890 defaultEvalCall(CallBldr, I, *Call);
892 // If the call is inlined, DstPostCall will be empty and we bail out now.
894 // Store return value of operator new() for future use, until the actual
895 // CXXNewExpr gets processed.
896 ExplodedNodeSet DstPostValue;
897 StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx);
898 for (ExplodedNode *I : DstPostCall) {
899 // FIXME: Because CNE serves as the "call site" for the allocator (due to
900 // lack of a better expression in the AST), the conjured return value symbol
901 // is going to be of the same type (C++ object pointer type). Technically
902 // this is not correct because the operator new's prototype always says that
903 // it returns a 'void *'. So we should change the type of the symbol,
904 // and then evaluate the cast over the symbolic pointer from 'void *' to
905 // the object pointer type. But without changing the symbol's type it
906 // is breaking too much to evaluate the no-op symbolic cast over it, so we
907 // skip it for now.
908 ProgramStateRef State = I->getState();
909 SVal RetVal = State->getSVal(CNE, LCtx);
910 // [basic.stc.dynamic.allocation] (on the return value of an allocation
911 // function):
912 // "The order, contiguity, and initial value of storage allocated by
913 // successive calls to an allocation function are unspecified."
914 State = State->bindDefaultInitial(RetVal, UndefinedVal{}, LCtx);
916 // If this allocation function is not declared as non-throwing, failures
917 // /must/ be signalled by exceptions, and thus the return value will never
918 // be NULL. -fno-exceptions does not influence this semantics.
919 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
920 // where new can return NULL. If we end up supporting that option, we can
921 // consider adding a check for it here.
922 // C++11 [basic.stc.dynamic.allocation]p3.
923 if (const FunctionDecl *FD = CNE->getOperatorNew()) {
924 QualType Ty = FD->getType();
925 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
926 if (!ProtoType->isNothrow())
927 State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true);
930 ValueBldr.generateNode(
931 CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal));
934 ExplodedNodeSet DstPostPostCallCallback;
935 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
936 DstPostValue, *Call, *this);
937 for (ExplodedNode *I : DstPostPostCallCallback) {
938 getCheckerManager().runCheckersForNewAllocator(*Call, Dst, I, *this);
942 void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
943 ExplodedNodeSet &Dst) {
944 // FIXME: Much of this should eventually migrate to CXXAllocatorCall.
945 // Also, we need to decide how allocators actually work -- they're not
946 // really part of the CXXNewExpr because they happen BEFORE the
947 // CXXConstructExpr subexpression. See PR12014 for some discussion.
949 unsigned blockCount = currBldrCtx->blockCount();
950 const LocationContext *LCtx = Pred->getLocationContext();
951 SVal symVal = UnknownVal();
952 FunctionDecl *FD = CNE->getOperatorNew();
954 bool IsStandardGlobalOpNewFunction =
955 FD->isReplaceableGlobalAllocationFunction();
957 ProgramStateRef State = Pred->getState();
959 // Retrieve the stored operator new() return value.
960 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
961 symVal = *getObjectUnderConstruction(State, CNE, LCtx);
962 State = finishObjectConstruction(State, CNE, LCtx);
965 // We assume all standard global 'operator new' functions allocate memory in
966 // heap. We realize this is an approximation that might not correctly model
967 // a custom global allocator.
968 if (symVal.isUnknown()) {
969 if (IsStandardGlobalOpNewFunction)
970 symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount);
971 else
972 symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(),
973 blockCount);
976 CallEventManager &CEMgr = getStateManager().getCallEventManager();
977 CallEventRef<CXXAllocatorCall> Call =
978 CEMgr.getCXXAllocatorCall(CNE, State, LCtx, getCFGElementRef());
980 if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
981 // Invalidate placement args.
982 // FIXME: Once we figure out how we want allocators to work,
983 // we should be using the usual pre-/(default-)eval-/post-call checkers
984 // here.
985 State = Call->invalidateRegions(blockCount);
986 if (!State)
987 return;
989 // If this allocation function is not declared as non-throwing, failures
990 // /must/ be signalled by exceptions, and thus the return value will never
991 // be NULL. -fno-exceptions does not influence this semantics.
992 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
993 // where new can return NULL. If we end up supporting that option, we can
994 // consider adding a check for it here.
995 // C++11 [basic.stc.dynamic.allocation]p3.
996 if (const auto *ProtoType = FD->getType()->getAs<FunctionProtoType>())
997 if (!ProtoType->isNothrow())
998 if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>())
999 State = State->assume(*dSymVal, true);
1002 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
1004 SVal Result = symVal;
1006 if (CNE->isArray()) {
1008 if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) {
1009 // If each element is initialized by their default constructor, the field
1010 // values are properly placed inside the required region, however if an
1011 // initializer list is used, this doesn't happen automatically.
1012 auto *Init = CNE->getInitializer();
1013 bool isInitList = isa_and_nonnull<InitListExpr>(Init);
1015 QualType ObjTy =
1016 isInitList ? Init->getType() : CNE->getType()->getPointeeType();
1017 const ElementRegion *EleReg =
1018 MRMgr.getElementRegion(ObjTy, svalBuilder.makeArrayIndex(0), NewReg,
1019 svalBuilder.getContext());
1020 Result = loc::MemRegionVal(EleReg);
1022 // If the array is list initialized, we bind the initializer list to the
1023 // memory region here, otherwise we would lose it.
1024 if (isInitList) {
1025 Bldr.takeNodes(Pred);
1026 Pred = Bldr.generateNode(CNE, Pred, State);
1028 SVal V = State->getSVal(Init, LCtx);
1029 ExplodedNodeSet evaluated;
1030 evalBind(evaluated, CNE, Pred, Result, V, true);
1032 Bldr.takeNodes(Pred);
1033 Bldr.addNodes(evaluated);
1035 Pred = *evaluated.begin();
1036 State = Pred->getState();
1040 State = State->BindExpr(CNE, Pred->getLocationContext(), Result);
1041 Bldr.generateNode(CNE, Pred, State);
1042 return;
1045 // FIXME: Once we have proper support for CXXConstructExprs inside
1046 // CXXNewExpr, we need to make sure that the constructed object is not
1047 // immediately invalidated here. (The placement call should happen before
1048 // the constructor call anyway.)
1049 if (FD->isReservedGlobalPlacementOperator()) {
1050 // Non-array placement new should always return the placement location.
1051 SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx);
1052 Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(),
1053 CNE->getPlacementArg(0)->getType());
1056 // Bind the address of the object, then check to see if we cached out.
1057 State = State->BindExpr(CNE, LCtx, Result);
1058 ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State);
1059 if (!NewN)
1060 return;
1062 // If the type is not a record, we won't have a CXXConstructExpr as an
1063 // initializer. Copy the value over.
1064 if (const Expr *Init = CNE->getInitializer()) {
1065 if (!isa<CXXConstructExpr>(Init)) {
1066 assert(Bldr.getResults().size() == 1);
1067 Bldr.takeNodes(NewN);
1068 evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx),
1069 /*FirstInit=*/IsStandardGlobalOpNewFunction);
1074 void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE,
1075 ExplodedNode *Pred, ExplodedNodeSet &Dst) {
1077 CallEventManager &CEMgr = getStateManager().getCallEventManager();
1078 CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall(
1079 CDE, Pred->getState(), Pred->getLocationContext(), getCFGElementRef());
1081 ExplodedNodeSet DstPreCall;
1082 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, *Call, *this);
1083 ExplodedNodeSet DstPostCall;
1085 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
1086 StmtNodeBuilder Bldr(DstPreCall, DstPostCall, *currBldrCtx);
1087 for (ExplodedNode *I : DstPreCall) {
1088 defaultEvalCall(Bldr, I, *Call);
1090 } else {
1091 DstPostCall = DstPreCall;
1093 getCheckerManager().runCheckersForPostCall(Dst, DstPostCall, *Call, *this);
1096 void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred,
1097 ExplodedNodeSet &Dst) {
1098 const VarDecl *VD = CS->getExceptionDecl();
1099 if (!VD) {
1100 Dst.Add(Pred);
1101 return;
1104 const LocationContext *LCtx = Pred->getLocationContext();
1105 SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(),
1106 currBldrCtx->blockCount());
1107 ProgramStateRef state = Pred->getState();
1108 state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx);
1110 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
1111 Bldr.generateNode(CS, Pred, state);
1114 void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred,
1115 ExplodedNodeSet &Dst) {
1116 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
1118 // Get the this object region from StoreManager.
1119 const LocationContext *LCtx = Pred->getLocationContext();
1120 const MemRegion *R =
1121 svalBuilder.getRegionManager().getCXXThisRegion(
1122 getContext().getCanonicalType(TE->getType()),
1123 LCtx);
1125 ProgramStateRef state = Pred->getState();
1126 SVal V = state->getSVal(loc::MemRegionVal(R));
1127 Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V));
1130 void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred,
1131 ExplodedNodeSet &Dst) {
1132 const LocationContext *LocCtxt = Pred->getLocationContext();
1134 // Get the region of the lambda itself.
1135 const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion(
1136 LE, LocCtxt);
1137 SVal V = loc::MemRegionVal(R);
1139 ProgramStateRef State = Pred->getState();
1141 // If we created a new MemRegion for the lambda, we should explicitly bind
1142 // the captures.
1143 for (auto const [Idx, FieldForCapture, InitExpr] :
1144 llvm::zip(llvm::seq<unsigned>(0, -1), LE->getLambdaClass()->fields(),
1145 LE->capture_inits())) {
1146 SVal FieldLoc = State->getLValue(FieldForCapture, V);
1148 SVal InitVal;
1149 if (!FieldForCapture->hasCapturedVLAType()) {
1150 assert(InitExpr && "Capture missing initialization expression");
1152 // Capturing a 0 length array is a no-op, so we ignore it to get a more
1153 // accurate analysis. If it's not ignored, it would set the default
1154 // binding of the lambda to 'Unknown', which can lead to falsely detecting
1155 // 'Uninitialized' values as 'Unknown' and not reporting a warning.
1156 const auto FTy = FieldForCapture->getType();
1157 if (FTy->isConstantArrayType() &&
1158 getContext().getConstantArrayElementCount(
1159 getContext().getAsConstantArrayType(FTy)) == 0)
1160 continue;
1162 // With C++17 copy elision the InitExpr can be anything, so instead of
1163 // pattern matching all cases, we simple check if the current field is
1164 // under construction or not, regardless what it's InitExpr is.
1165 if (const auto OUC =
1166 getObjectUnderConstruction(State, {LE, Idx}, LocCtxt)) {
1167 InitVal = State->getSVal(OUC->getAsRegion());
1169 State = finishObjectConstruction(State, {LE, Idx}, LocCtxt);
1170 } else
1171 InitVal = State->getSVal(InitExpr, LocCtxt);
1173 } else {
1175 assert(!getObjectUnderConstruction(State, {LE, Idx}, LocCtxt) &&
1176 "VLA capture by value is a compile time error!");
1178 // The field stores the length of a captured variable-length array.
1179 // These captures don't have initialization expressions; instead we
1180 // get the length from the VLAType size expression.
1181 Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr();
1182 InitVal = State->getSVal(SizeExpr, LocCtxt);
1185 State = State->bindLoc(FieldLoc, InitVal, LocCtxt);
1188 // Decay the Loc into an RValue, because there might be a
1189 // MaterializeTemporaryExpr node above this one which expects the bound value
1190 // to be an RValue.
1191 SVal LambdaRVal = State->getSVal(R);
1193 ExplodedNodeSet Tmp;
1194 StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx);
1195 // FIXME: is this the right program point kind?
1196 Bldr.generateNode(LE, Pred,
1197 State->BindExpr(LE, LocCtxt, LambdaRVal),
1198 nullptr, ProgramPoint::PostLValueKind);
1200 // FIXME: Move all post/pre visits to ::Visit().
1201 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this);