1 //===--- CGVTables.cpp - Emit LLVM Code for C++ vtables -------------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This contains code dealing with C++ code generation of virtual tables.
11 //===----------------------------------------------------------------------===//
14 #include "CodeGenFunction.h"
15 #include "CodeGenModule.h"
16 #include "clang/AST/Attr.h"
17 #include "clang/AST/CXXInheritance.h"
18 #include "clang/AST/RecordLayout.h"
19 #include "clang/Basic/CodeGenOptions.h"
20 #include "clang/CodeGen/CGFunctionInfo.h"
21 #include "clang/CodeGen/ConstantInitBuilder.h"
22 #include "llvm/IR/IntrinsicInst.h"
23 #include "llvm/Support/Format.h"
24 #include "llvm/Transforms/Utils/Cloning.h"
28 using namespace clang
;
29 using namespace CodeGen
;
31 CodeGenVTables::CodeGenVTables(CodeGenModule
&CGM
)
32 : CGM(CGM
), VTContext(CGM
.getContext().getVTableContext()) {}
34 llvm::Constant
*CodeGenModule::GetAddrOfThunk(StringRef Name
, llvm::Type
*FnTy
,
36 return GetOrCreateLLVMFunction(Name
, FnTy
, GD
, /*ForVTable=*/true,
37 /*DontDefer=*/true, /*IsThunk=*/true);
40 static void setThunkProperties(CodeGenModule
&CGM
, const ThunkInfo
&Thunk
,
41 llvm::Function
*ThunkFn
, bool ForVTable
,
43 CGM
.setFunctionLinkage(GD
, ThunkFn
);
44 CGM
.getCXXABI().setThunkLinkage(ThunkFn
, ForVTable
, GD
,
45 !Thunk
.Return
.isEmpty());
47 // Set the right visibility.
48 CGM
.setGVProperties(ThunkFn
, GD
);
50 if (!CGM
.getCXXABI().exportThunk()) {
51 ThunkFn
->setDLLStorageClass(llvm::GlobalValue::DefaultStorageClass
);
52 ThunkFn
->setDSOLocal(true);
55 if (CGM
.supportsCOMDAT() && ThunkFn
->isWeakForLinker())
56 ThunkFn
->setComdat(CGM
.getModule().getOrInsertComdat(ThunkFn
->getName()));
60 static bool similar(const ABIArgInfo
&infoL
, CanQualType typeL
,
61 const ABIArgInfo
&infoR
, CanQualType typeR
) {
62 return (infoL
.getKind() == infoR
.getKind() &&
64 (isa
<PointerType
>(typeL
) && isa
<PointerType
>(typeR
)) ||
65 (isa
<ReferenceType
>(typeL
) && isa
<ReferenceType
>(typeR
))));
69 static RValue
PerformReturnAdjustment(CodeGenFunction
&CGF
,
70 QualType ResultType
, RValue RV
,
71 const ThunkInfo
&Thunk
) {
72 // Emit the return adjustment.
73 bool NullCheckValue
= !ResultType
->isReferenceType();
75 llvm::BasicBlock
*AdjustNull
= nullptr;
76 llvm::BasicBlock
*AdjustNotNull
= nullptr;
77 llvm::BasicBlock
*AdjustEnd
= nullptr;
79 llvm::Value
*ReturnValue
= RV
.getScalarVal();
82 AdjustNull
= CGF
.createBasicBlock("adjust.null");
83 AdjustNotNull
= CGF
.createBasicBlock("adjust.notnull");
84 AdjustEnd
= CGF
.createBasicBlock("adjust.end");
86 llvm::Value
*IsNull
= CGF
.Builder
.CreateIsNull(ReturnValue
);
87 CGF
.Builder
.CreateCondBr(IsNull
, AdjustNull
, AdjustNotNull
);
88 CGF
.EmitBlock(AdjustNotNull
);
91 auto ClassDecl
= ResultType
->getPointeeType()->getAsCXXRecordDecl();
92 auto ClassAlign
= CGF
.CGM
.getClassPointerAlignment(ClassDecl
);
93 ReturnValue
= CGF
.CGM
.getCXXABI().performReturnAdjustment(
95 Address(ReturnValue
, CGF
.ConvertTypeForMem(ResultType
->getPointeeType()),
100 CGF
.Builder
.CreateBr(AdjustEnd
);
101 CGF
.EmitBlock(AdjustNull
);
102 CGF
.Builder
.CreateBr(AdjustEnd
);
103 CGF
.EmitBlock(AdjustEnd
);
105 llvm::PHINode
*PHI
= CGF
.Builder
.CreatePHI(ReturnValue
->getType(), 2);
106 PHI
->addIncoming(ReturnValue
, AdjustNotNull
);
107 PHI
->addIncoming(llvm::Constant::getNullValue(ReturnValue
->getType()),
112 return RValue::get(ReturnValue
);
115 /// This function clones a function's DISubprogram node and enters it into
116 /// a value map with the intent that the map can be utilized by the cloner
117 /// to short-circuit Metadata node mapping.
118 /// Furthermore, the function resolves any DILocalVariable nodes referenced
119 /// by dbg.value intrinsics so they can be properly mapped during cloning.
120 static void resolveTopLevelMetadata(llvm::Function
*Fn
,
121 llvm::ValueToValueMapTy
&VMap
) {
122 // Clone the DISubprogram node and put it into the Value map.
123 auto *DIS
= Fn
->getSubprogram();
126 auto *NewDIS
= DIS
->replaceWithDistinct(DIS
->clone());
127 VMap
.MD()[DIS
].reset(NewDIS
);
129 // Find all llvm.dbg.declare intrinsics and resolve the DILocalVariable nodes
130 // they are referencing.
131 for (auto &BB
: *Fn
) {
133 if (auto *DII
= dyn_cast
<llvm::DbgVariableIntrinsic
>(&I
)) {
134 auto *DILocal
= DII
->getVariable();
135 if (!DILocal
->isResolved())
142 // This function does roughly the same thing as GenerateThunk, but in a
143 // very different way, so that va_start and va_end work correctly.
144 // FIXME: This function assumes "this" is the first non-sret LLVM argument of
145 // a function, and that there is an alloca built in the entry block
146 // for all accesses to "this".
147 // FIXME: This function assumes there is only one "ret" statement per function.
148 // FIXME: Cloning isn't correct in the presence of indirect goto!
149 // FIXME: This implementation of thunks bloats codesize by duplicating the
150 // function definition. There are alternatives:
151 // 1. Add some sort of stub support to LLVM for cases where we can
152 // do a this adjustment, then a sibcall.
153 // 2. We could transform the definition to take a va_list instead of an
154 // actual variable argument list, then have the thunks (including a
155 // no-op thunk for the regular definition) call va_start/va_end.
156 // There's a bit of per-call overhead for this solution, but it's
157 // better for codesize if the definition is long.
159 CodeGenFunction::GenerateVarArgsThunk(llvm::Function
*Fn
,
160 const CGFunctionInfo
&FnInfo
,
161 GlobalDecl GD
, const ThunkInfo
&Thunk
) {
162 const CXXMethodDecl
*MD
= cast
<CXXMethodDecl
>(GD
.getDecl());
163 const FunctionProtoType
*FPT
= MD
->getType()->castAs
<FunctionProtoType
>();
164 QualType ResultType
= FPT
->getReturnType();
166 // Get the original function
167 assert(FnInfo
.isVariadic());
168 llvm::Type
*Ty
= CGM
.getTypes().GetFunctionType(FnInfo
);
169 llvm::Value
*Callee
= CGM
.GetAddrOfFunction(GD
, Ty
, /*ForVTable=*/true);
170 llvm::Function
*BaseFn
= cast
<llvm::Function
>(Callee
);
172 // Cloning can't work if we don't have a definition. The Microsoft ABI may
173 // require thunks when a definition is not available. Emit an error in these
175 if (!MD
->isDefined()) {
176 CGM
.ErrorUnsupported(MD
, "return-adjusting thunk with variadic arguments");
179 assert(!BaseFn
->isDeclaration() && "cannot clone undefined variadic method");
182 llvm::ValueToValueMapTy VMap
;
184 // We are cloning a function while some Metadata nodes are still unresolved.
185 // Ensure that the value mapper does not encounter any of them.
186 resolveTopLevelMetadata(BaseFn
, VMap
);
187 llvm::Function
*NewFn
= llvm::CloneFunction(BaseFn
, VMap
);
188 Fn
->replaceAllUsesWith(NewFn
);
190 Fn
->eraseFromParent();
193 // "Initialize" CGF (minimally).
196 // Get the "this" value
197 llvm::Function::arg_iterator AI
= Fn
->arg_begin();
198 if (CGM
.ReturnTypeUsesSRet(FnInfo
))
201 // Find the first store of "this", which will be to the alloca associated
204 Address(&*AI
, ConvertTypeForMem(MD
->getThisType()->getPointeeType()),
205 CGM
.getClassPointerAlignment(MD
->getParent()));
206 llvm::BasicBlock
*EntryBB
= &Fn
->front();
207 llvm::BasicBlock::iterator ThisStore
=
208 llvm::find_if(*EntryBB
, [&](llvm::Instruction
&I
) {
209 return isa
<llvm::StoreInst
>(I
) &&
210 I
.getOperand(0) == ThisPtr
.getPointer();
212 assert(ThisStore
!= EntryBB
->end() &&
213 "Store of this should be in entry block?");
214 // Adjust "this", if necessary.
215 Builder
.SetInsertPoint(&*ThisStore
);
216 llvm::Value
*AdjustedThisPtr
=
217 CGM
.getCXXABI().performThisAdjustment(*this, ThisPtr
, Thunk
.This
);
218 AdjustedThisPtr
= Builder
.CreateBitCast(AdjustedThisPtr
,
219 ThisStore
->getOperand(0)->getType());
220 ThisStore
->setOperand(0, AdjustedThisPtr
);
222 if (!Thunk
.Return
.isEmpty()) {
223 // Fix up the returned value, if necessary.
224 for (llvm::BasicBlock
&BB
: *Fn
) {
225 llvm::Instruction
*T
= BB
.getTerminator();
226 if (isa
<llvm::ReturnInst
>(T
)) {
227 RValue RV
= RValue::get(T
->getOperand(0));
228 T
->eraseFromParent();
229 Builder
.SetInsertPoint(&BB
);
230 RV
= PerformReturnAdjustment(*this, ResultType
, RV
, Thunk
);
231 Builder
.CreateRet(RV
.getScalarVal());
240 void CodeGenFunction::StartThunk(llvm::Function
*Fn
, GlobalDecl GD
,
241 const CGFunctionInfo
&FnInfo
,
242 bool IsUnprototyped
) {
243 assert(!CurGD
.getDecl() && "CurGD was already set!");
245 CurFuncIsThunk
= true;
247 // Build FunctionArgs.
248 const CXXMethodDecl
*MD
= cast
<CXXMethodDecl
>(GD
.getDecl());
249 QualType ThisType
= MD
->getThisType();
252 ResultType
= CGM
.getContext().VoidTy
;
253 else if (CGM
.getCXXABI().HasThisReturn(GD
))
254 ResultType
= ThisType
;
255 else if (CGM
.getCXXABI().hasMostDerivedReturn(GD
))
256 ResultType
= CGM
.getContext().VoidPtrTy
;
258 ResultType
= MD
->getType()->castAs
<FunctionProtoType
>()->getReturnType();
259 FunctionArgList FunctionArgs
;
261 // Create the implicit 'this' parameter declaration.
262 CGM
.getCXXABI().buildThisParam(*this, FunctionArgs
);
264 // Add the rest of the parameters, if we have a prototype to work with.
265 if (!IsUnprototyped
) {
266 FunctionArgs
.append(MD
->param_begin(), MD
->param_end());
268 if (isa
<CXXDestructorDecl
>(MD
))
269 CGM
.getCXXABI().addImplicitStructorParams(*this, ResultType
,
273 // Start defining the function.
274 auto NL
= ApplyDebugLocation::CreateEmpty(*this);
275 StartFunction(GlobalDecl(), ResultType
, Fn
, FnInfo
, FunctionArgs
,
277 // Create a scope with an artificial location for the body of this function.
278 auto AL
= ApplyDebugLocation::CreateArtificial(*this);
280 // Since we didn't pass a GlobalDecl to StartFunction, do this ourselves.
281 CGM
.getCXXABI().EmitInstanceFunctionProlog(*this);
282 CXXThisValue
= CXXABIThisValue
;
287 void CodeGenFunction::FinishThunk() {
288 // Clear these to restore the invariants expected by
289 // StartFunction/FinishFunction.
290 CurCodeDecl
= nullptr;
291 CurFuncDecl
= nullptr;
296 void CodeGenFunction::EmitCallAndReturnForThunk(llvm::FunctionCallee Callee
,
297 const ThunkInfo
*Thunk
,
298 bool IsUnprototyped
) {
299 assert(isa
<CXXMethodDecl
>(CurGD
.getDecl()) &&
300 "Please use a new CGF for this thunk");
301 const CXXMethodDecl
*MD
= cast
<CXXMethodDecl
>(CurGD
.getDecl());
303 // Adjust the 'this' pointer if necessary
304 llvm::Value
*AdjustedThisPtr
=
305 Thunk
? CGM
.getCXXABI().performThisAdjustment(
306 *this, LoadCXXThisAddress(), Thunk
->This
)
309 // If perfect forwarding is required a variadic method, a method using
310 // inalloca, or an unprototyped thunk, use musttail. Emit an error if this
311 // thunk requires a return adjustment, since that is impossible with musttail.
312 if (CurFnInfo
->usesInAlloca() || CurFnInfo
->isVariadic() || IsUnprototyped
) {
313 if (Thunk
&& !Thunk
->Return
.isEmpty()) {
315 CGM
.ErrorUnsupported(
316 MD
, "return-adjusting thunk with incomplete parameter type");
317 else if (CurFnInfo
->isVariadic())
318 llvm_unreachable("shouldn't try to emit musttail return-adjusting "
319 "thunks for variadic functions");
321 CGM
.ErrorUnsupported(
322 MD
, "non-trivial argument copy for return-adjusting thunk");
324 EmitMustTailThunk(CurGD
, AdjustedThisPtr
, Callee
);
328 // Start building CallArgs.
329 CallArgList CallArgs
;
330 QualType ThisType
= MD
->getThisType();
331 CallArgs
.add(RValue::get(AdjustedThisPtr
), ThisType
);
333 if (isa
<CXXDestructorDecl
>(MD
))
334 CGM
.getCXXABI().adjustCallArgsForDestructorThunk(*this, CurGD
, CallArgs
);
337 unsigned PrefixArgs
= CallArgs
.size() - 1;
339 // Add the rest of the arguments.
340 for (const ParmVarDecl
*PD
: MD
->parameters())
341 EmitDelegateCallArg(CallArgs
, PD
, SourceLocation());
343 const FunctionProtoType
*FPT
= MD
->getType()->castAs
<FunctionProtoType
>();
346 const CGFunctionInfo
&CallFnInfo
= CGM
.getTypes().arrangeCXXMethodCall(
347 CallArgs
, FPT
, RequiredArgs::forPrototypePlus(FPT
, 1), PrefixArgs
);
348 assert(CallFnInfo
.getRegParm() == CurFnInfo
->getRegParm() &&
349 CallFnInfo
.isNoReturn() == CurFnInfo
->isNoReturn() &&
350 CallFnInfo
.getCallingConvention() == CurFnInfo
->getCallingConvention());
351 assert(isa
<CXXDestructorDecl
>(MD
) || // ignore dtor return types
352 similar(CallFnInfo
.getReturnInfo(), CallFnInfo
.getReturnType(),
353 CurFnInfo
->getReturnInfo(), CurFnInfo
->getReturnType()));
354 assert(CallFnInfo
.arg_size() == CurFnInfo
->arg_size());
355 for (unsigned i
= 0, e
= CurFnInfo
->arg_size(); i
!= e
; ++i
)
356 assert(similar(CallFnInfo
.arg_begin()[i
].info
,
357 CallFnInfo
.arg_begin()[i
].type
,
358 CurFnInfo
->arg_begin()[i
].info
,
359 CurFnInfo
->arg_begin()[i
].type
));
362 // Determine whether we have a return value slot to use.
363 QualType ResultType
= CGM
.getCXXABI().HasThisReturn(CurGD
)
365 : CGM
.getCXXABI().hasMostDerivedReturn(CurGD
)
366 ? CGM
.getContext().VoidPtrTy
367 : FPT
->getReturnType();
368 ReturnValueSlot Slot
;
369 if (!ResultType
->isVoidType() &&
370 (CurFnInfo
->getReturnInfo().getKind() == ABIArgInfo::Indirect
||
371 hasAggregateEvaluationKind(ResultType
)))
372 Slot
= ReturnValueSlot(ReturnValue
, ResultType
.isVolatileQualified(),
373 /*IsUnused=*/false, /*IsExternallyDestructed=*/true);
375 // Now emit our call.
376 llvm::CallBase
*CallOrInvoke
;
377 RValue RV
= EmitCall(*CurFnInfo
, CGCallee::forDirect(Callee
, CurGD
), Slot
,
378 CallArgs
, &CallOrInvoke
);
380 // Consider return adjustment if we have ThunkInfo.
381 if (Thunk
&& !Thunk
->Return
.isEmpty())
382 RV
= PerformReturnAdjustment(*this, ResultType
, RV
, *Thunk
);
383 else if (llvm::CallInst
* Call
= dyn_cast
<llvm::CallInst
>(CallOrInvoke
))
384 Call
->setTailCallKind(llvm::CallInst::TCK_Tail
);
387 if (!ResultType
->isVoidType() && Slot
.isNull())
388 CGM
.getCXXABI().EmitReturnFromThunk(*this, RV
, ResultType
);
390 // Disable the final ARC autorelease.
391 AutoreleaseResult
= false;
396 void CodeGenFunction::EmitMustTailThunk(GlobalDecl GD
,
397 llvm::Value
*AdjustedThisPtr
,
398 llvm::FunctionCallee Callee
) {
399 // Emitting a musttail call thunk doesn't use any of the CGCall.cpp machinery
400 // to translate AST arguments into LLVM IR arguments. For thunks, we know
401 // that the caller prototype more or less matches the callee prototype with
402 // the exception of 'this'.
403 SmallVector
<llvm::Value
*, 8> Args(llvm::make_pointer_range(CurFn
->args()));
405 // Set the adjusted 'this' pointer.
406 const ABIArgInfo
&ThisAI
= CurFnInfo
->arg_begin()->info
;
407 if (ThisAI
.isDirect()) {
408 const ABIArgInfo
&RetAI
= CurFnInfo
->getReturnInfo();
409 int ThisArgNo
= RetAI
.isIndirect() && !RetAI
.isSRetAfterThis() ? 1 : 0;
410 llvm::Type
*ThisType
= Args
[ThisArgNo
]->getType();
411 if (ThisType
!= AdjustedThisPtr
->getType())
412 AdjustedThisPtr
= Builder
.CreateBitCast(AdjustedThisPtr
, ThisType
);
413 Args
[ThisArgNo
] = AdjustedThisPtr
;
415 assert(ThisAI
.isInAlloca() && "this is passed directly or inalloca");
416 Address ThisAddr
= GetAddrOfLocalVar(CXXABIThisDecl
);
417 llvm::Type
*ThisType
= ThisAddr
.getElementType();
418 if (ThisType
!= AdjustedThisPtr
->getType())
419 AdjustedThisPtr
= Builder
.CreateBitCast(AdjustedThisPtr
, ThisType
);
420 Builder
.CreateStore(AdjustedThisPtr
, ThisAddr
);
423 // Emit the musttail call manually. Even if the prologue pushed cleanups, we
424 // don't actually want to run them.
425 llvm::CallInst
*Call
= Builder
.CreateCall(Callee
, Args
);
426 Call
->setTailCallKind(llvm::CallInst::TCK_MustTail
);
428 // Apply the standard set of call attributes.
429 unsigned CallingConv
;
430 llvm::AttributeList Attrs
;
431 CGM
.ConstructAttributeList(Callee
.getCallee()->getName(), *CurFnInfo
, GD
,
432 Attrs
, CallingConv
, /*AttrOnCallSite=*/true,
434 Call
->setAttributes(Attrs
);
435 Call
->setCallingConv(static_cast<llvm::CallingConv::ID
>(CallingConv
));
437 if (Call
->getType()->isVoidTy())
438 Builder
.CreateRetVoid();
440 Builder
.CreateRet(Call
);
442 // Finish the function to maintain CodeGenFunction invariants.
443 // FIXME: Don't emit unreachable code.
444 EmitBlock(createBasicBlock());
449 void CodeGenFunction::generateThunk(llvm::Function
*Fn
,
450 const CGFunctionInfo
&FnInfo
, GlobalDecl GD
,
451 const ThunkInfo
&Thunk
,
452 bool IsUnprototyped
) {
453 StartThunk(Fn
, GD
, FnInfo
, IsUnprototyped
);
454 // Create a scope with an artificial location for the body of this function.
455 auto AL
= ApplyDebugLocation::CreateArtificial(*this);
457 // Get our callee. Use a placeholder type if this method is unprototyped so
458 // that CodeGenModule doesn't try to set attributes.
461 Ty
= llvm::StructType::get(getLLVMContext());
463 Ty
= CGM
.getTypes().GetFunctionType(FnInfo
);
465 llvm::Constant
*Callee
= CGM
.GetAddrOfFunction(GD
, Ty
, /*ForVTable=*/true);
467 // Fix up the function type for an unprototyped musttail call.
469 Callee
= llvm::ConstantExpr::getBitCast(Callee
, Fn
->getType());
471 // Make the call and return the result.
472 EmitCallAndReturnForThunk(llvm::FunctionCallee(Fn
->getFunctionType(), Callee
),
473 &Thunk
, IsUnprototyped
);
476 static bool shouldEmitVTableThunk(CodeGenModule
&CGM
, const CXXMethodDecl
*MD
,
477 bool IsUnprototyped
, bool ForVTable
) {
478 // Always emit thunks in the MS C++ ABI. We cannot rely on other TUs to
479 // provide thunks for us.
480 if (CGM
.getTarget().getCXXABI().isMicrosoft())
483 // In the Itanium C++ ABI, vtable thunks are provided by TUs that provide
484 // definitions of the main method. Therefore, emitting thunks with the vtable
485 // is purely an optimization. Emit the thunk if optimizations are enabled and
486 // all of the parameter types are complete.
488 return CGM
.getCodeGenOpts().OptimizationLevel
&& !IsUnprototyped
;
490 // Always emit thunks along with the method definition.
494 llvm::Constant
*CodeGenVTables::maybeEmitThunk(GlobalDecl GD
,
497 const CXXMethodDecl
*MD
= cast
<CXXMethodDecl
>(GD
.getDecl());
499 // First, get a declaration. Compute the mangled name. Don't worry about
500 // getting the function prototype right, since we may only need this
501 // declaration to fill in a vtable slot.
502 SmallString
<256> Name
;
503 MangleContext
&MCtx
= CGM
.getCXXABI().getMangleContext();
504 llvm::raw_svector_ostream
Out(Name
);
505 if (const CXXDestructorDecl
*DD
= dyn_cast
<CXXDestructorDecl
>(MD
))
506 MCtx
.mangleCXXDtorThunk(DD
, GD
.getDtorType(), TI
.This
, Out
);
508 MCtx
.mangleThunk(MD
, TI
, Out
);
509 llvm::Type
*ThunkVTableTy
= CGM
.getTypes().GetFunctionTypeForVTable(GD
);
510 llvm::Constant
*Thunk
= CGM
.GetAddrOfThunk(Name
, ThunkVTableTy
, GD
);
512 // If we don't need to emit a definition, return this declaration as is.
513 bool IsUnprototyped
= !CGM
.getTypes().isFuncTypeConvertible(
514 MD
->getType()->castAs
<FunctionType
>());
515 if (!shouldEmitVTableThunk(CGM
, MD
, IsUnprototyped
, ForVTable
))
518 // Arrange a function prototype appropriate for a function definition. In some
519 // cases in the MS ABI, we may need to build an unprototyped musttail thunk.
520 const CGFunctionInfo
&FnInfo
=
521 IsUnprototyped
? CGM
.getTypes().arrangeUnprototypedMustTailThunk(MD
)
522 : CGM
.getTypes().arrangeGlobalDeclaration(GD
);
523 llvm::FunctionType
*ThunkFnTy
= CGM
.getTypes().GetFunctionType(FnInfo
);
525 // If the type of the underlying GlobalValue is wrong, we'll have to replace
526 // it. It should be a declaration.
527 llvm::Function
*ThunkFn
= cast
<llvm::Function
>(Thunk
->stripPointerCasts());
528 if (ThunkFn
->getFunctionType() != ThunkFnTy
) {
529 llvm::GlobalValue
*OldThunkFn
= ThunkFn
;
531 assert(OldThunkFn
->isDeclaration() && "Shouldn't replace non-declaration");
533 // Remove the name from the old thunk function and get a new thunk.
534 OldThunkFn
->setName(StringRef());
535 ThunkFn
= llvm::Function::Create(ThunkFnTy
, llvm::Function::ExternalLinkage
,
536 Name
.str(), &CGM
.getModule());
537 CGM
.SetLLVMFunctionAttributes(MD
, FnInfo
, ThunkFn
, /*IsThunk=*/false);
539 // If needed, replace the old thunk with a bitcast.
540 if (!OldThunkFn
->use_empty()) {
541 llvm::Constant
*NewPtrForOldDecl
=
542 llvm::ConstantExpr::getBitCast(ThunkFn
, OldThunkFn
->getType());
543 OldThunkFn
->replaceAllUsesWith(NewPtrForOldDecl
);
546 // Remove the old thunk.
547 OldThunkFn
->eraseFromParent();
550 bool ABIHasKeyFunctions
= CGM
.getTarget().getCXXABI().hasKeyFunctions();
551 bool UseAvailableExternallyLinkage
= ForVTable
&& ABIHasKeyFunctions
;
553 if (!ThunkFn
->isDeclaration()) {
554 if (!ABIHasKeyFunctions
|| UseAvailableExternallyLinkage
) {
555 // There is already a thunk emitted for this function, do nothing.
559 setThunkProperties(CGM
, TI
, ThunkFn
, ForVTable
, GD
);
563 // If this will be unprototyped, add the "thunk" attribute so that LLVM knows
564 // that the return type is meaningless. These thunks can be used to call
565 // functions with differing return types, and the caller is required to cast
566 // the prototype appropriately to extract the correct value.
568 ThunkFn
->addFnAttr("thunk");
570 CGM
.SetLLVMFunctionAttributesForDefinition(GD
.getDecl(), ThunkFn
);
572 // Thunks for variadic methods are special because in general variadic
573 // arguments cannot be perfectly forwarded. In the general case, clang
574 // implements such thunks by cloning the original function body. However, for
575 // thunks with no return adjustment on targets that support musttail, we can
576 // use musttail to perfectly forward the variadic arguments.
577 bool ShouldCloneVarArgs
= false;
578 if (!IsUnprototyped
&& ThunkFn
->isVarArg()) {
579 ShouldCloneVarArgs
= true;
580 if (TI
.Return
.isEmpty()) {
581 switch (CGM
.getTriple().getArch()) {
582 case llvm::Triple::x86_64
:
583 case llvm::Triple::x86
:
584 case llvm::Triple::aarch64
:
585 ShouldCloneVarArgs
= false;
593 if (ShouldCloneVarArgs
) {
594 if (UseAvailableExternallyLinkage
)
597 CodeGenFunction(CGM
).GenerateVarArgsThunk(ThunkFn
, FnInfo
, GD
, TI
);
599 // Normal thunk body generation.
600 CodeGenFunction(CGM
).generateThunk(ThunkFn
, FnInfo
, GD
, TI
, IsUnprototyped
);
603 setThunkProperties(CGM
, TI
, ThunkFn
, ForVTable
, GD
);
607 void CodeGenVTables::EmitThunks(GlobalDecl GD
) {
608 const CXXMethodDecl
*MD
=
609 cast
<CXXMethodDecl
>(GD
.getDecl())->getCanonicalDecl();
611 // We don't need to generate thunks for the base destructor.
612 if (isa
<CXXDestructorDecl
>(MD
) && GD
.getDtorType() == Dtor_Base
)
615 const VTableContextBase::ThunkInfoVectorTy
*ThunkInfoVector
=
616 VTContext
->getThunkInfo(GD
);
618 if (!ThunkInfoVector
)
621 for (const ThunkInfo
& Thunk
: *ThunkInfoVector
)
622 maybeEmitThunk(GD
, Thunk
, /*ForVTable=*/false);
625 void CodeGenVTables::addRelativeComponent(ConstantArrayBuilder
&builder
,
626 llvm::Constant
*component
,
627 unsigned vtableAddressPoint
,
628 bool vtableHasLocalLinkage
,
629 bool isCompleteDtor
) const {
630 // No need to get the offset of a nullptr.
631 if (component
->isNullValue())
632 return builder
.add(llvm::ConstantInt::get(CGM
.Int32Ty
, 0));
635 cast
<llvm::GlobalValue
>(component
->stripPointerCastsAndAliases());
636 llvm::Module
&module
= CGM
.getModule();
638 // We don't want to copy the linkage of the vtable exactly because we still
639 // want the stub/proxy to be emitted for properly calculating the offset.
640 // Examples where there would be no symbol emitted are available_externally
641 // and private linkages.
642 auto stubLinkage
= vtableHasLocalLinkage
? llvm::GlobalValue::InternalLinkage
643 : llvm::GlobalValue::ExternalLinkage
;
645 llvm::Constant
*target
;
646 if (auto *func
= dyn_cast
<llvm::Function
>(globalVal
)) {
647 target
= llvm::DSOLocalEquivalent::get(func
);
649 llvm::SmallString
<16> rttiProxyName(globalVal
->getName());
650 rttiProxyName
.append(".rtti_proxy");
652 // The RTTI component may not always be emitted in the same linkage unit as
653 // the vtable. As a general case, we can make a dso_local proxy to the RTTI
654 // that points to the actual RTTI struct somewhere. This will result in a
655 // GOTPCREL relocation when taking the relative offset to the proxy.
656 llvm::GlobalVariable
*proxy
= module
.getNamedGlobal(rttiProxyName
);
658 proxy
= new llvm::GlobalVariable(module
, globalVal
->getType(),
659 /*isConstant=*/true, stubLinkage
,
660 globalVal
, rttiProxyName
);
661 proxy
->setDSOLocal(true);
662 proxy
->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global
);
663 if (!proxy
->hasLocalLinkage()) {
664 proxy
->setVisibility(llvm::GlobalValue::HiddenVisibility
);
665 proxy
->setComdat(module
.getOrInsertComdat(rttiProxyName
));
667 // Do not instrument the rtti proxies with hwasan to avoid a duplicate
668 // symbol error. Aliases generated by hwasan will retain the same namebut
669 // the addresses they are set to may have different tags from different
670 // compilation units. We don't run into this without hwasan because the
671 // proxies are in comdat groups, but those aren't propagated to the alias.
672 RemoveHwasanMetadata(proxy
);
677 builder
.addRelativeOffsetToPosition(CGM
.Int32Ty
, target
,
678 /*position=*/vtableAddressPoint
);
681 static bool UseRelativeLayout(const CodeGenModule
&CGM
) {
682 return CGM
.getTarget().getCXXABI().isItaniumFamily() &&
683 CGM
.getItaniumVTableContext().isRelativeLayout();
686 bool CodeGenVTables::useRelativeLayout() const {
687 return UseRelativeLayout(CGM
);
690 llvm::Type
*CodeGenModule::getVTableComponentType() const {
691 if (UseRelativeLayout(*this))
696 llvm::Type
*CodeGenVTables::getVTableComponentType() const {
697 return CGM
.getVTableComponentType();
700 static void AddPointerLayoutOffset(const CodeGenModule
&CGM
,
701 ConstantArrayBuilder
&builder
,
703 builder
.add(llvm::ConstantExpr::getIntToPtr(
704 llvm::ConstantInt::get(CGM
.PtrDiffTy
, offset
.getQuantity()),
708 static void AddRelativeLayoutOffset(const CodeGenModule
&CGM
,
709 ConstantArrayBuilder
&builder
,
711 builder
.add(llvm::ConstantInt::get(CGM
.Int32Ty
, offset
.getQuantity()));
714 void CodeGenVTables::addVTableComponent(ConstantArrayBuilder
&builder
,
715 const VTableLayout
&layout
,
716 unsigned componentIndex
,
717 llvm::Constant
*rtti
,
718 unsigned &nextVTableThunkIndex
,
719 unsigned vtableAddressPoint
,
720 bool vtableHasLocalLinkage
) {
721 auto &component
= layout
.vtable_components()[componentIndex
];
723 auto addOffsetConstant
=
724 useRelativeLayout() ? AddRelativeLayoutOffset
: AddPointerLayoutOffset
;
726 switch (component
.getKind()) {
727 case VTableComponent::CK_VCallOffset
:
728 return addOffsetConstant(CGM
, builder
, component
.getVCallOffset());
730 case VTableComponent::CK_VBaseOffset
:
731 return addOffsetConstant(CGM
, builder
, component
.getVBaseOffset());
733 case VTableComponent::CK_OffsetToTop
:
734 return addOffsetConstant(CGM
, builder
, component
.getOffsetToTop());
736 case VTableComponent::CK_RTTI
:
737 if (useRelativeLayout())
738 return addRelativeComponent(builder
, rtti
, vtableAddressPoint
,
739 vtableHasLocalLinkage
,
740 /*isCompleteDtor=*/false);
742 return builder
.add(llvm::ConstantExpr::getBitCast(rtti
, CGM
.Int8PtrTy
));
744 case VTableComponent::CK_FunctionPointer
:
745 case VTableComponent::CK_CompleteDtorPointer
:
746 case VTableComponent::CK_DeletingDtorPointer
: {
747 GlobalDecl GD
= component
.getGlobalDecl();
749 if (CGM
.getLangOpts().CUDA
) {
750 // Emit NULL for methods we can't codegen on this
751 // side. Otherwise we'd end up with vtable with unresolved
753 const CXXMethodDecl
*MD
= cast
<CXXMethodDecl
>(GD
.getDecl());
754 // OK on device side: functions w/ __device__ attribute
755 // OK on host side: anything except __device__-only functions.
757 CGM
.getLangOpts().CUDAIsDevice
758 ? MD
->hasAttr
<CUDADeviceAttr
>()
759 : (MD
->hasAttr
<CUDAHostAttr
>() || !MD
->hasAttr
<CUDADeviceAttr
>());
761 return builder
.add(llvm::ConstantExpr::getNullValue(CGM
.Int8PtrTy
));
762 // Method is acceptable, continue processing as usual.
765 auto getSpecialVirtualFn
= [&](StringRef name
) -> llvm::Constant
* {
766 // FIXME(PR43094): When merging comdat groups, lld can select a local
767 // symbol as the signature symbol even though it cannot be accessed
768 // outside that symbol's TU. The relative vtables ABI would make
769 // __cxa_pure_virtual and __cxa_deleted_virtual local symbols, and
770 // depending on link order, the comdat groups could resolve to the one
771 // with the local symbol. As a temporary solution, fill these components
772 // with zero. We shouldn't be calling these in the first place anyway.
773 if (useRelativeLayout())
774 return llvm::ConstantPointerNull::get(CGM
.Int8PtrTy
);
776 // For NVPTX devices in OpenMP emit special functon as null pointers,
777 // otherwise linking ends up with unresolved references.
778 if (CGM
.getLangOpts().OpenMP
&& CGM
.getLangOpts().OpenMPIsDevice
&&
779 CGM
.getTriple().isNVPTX())
780 return llvm::ConstantPointerNull::get(CGM
.Int8PtrTy
);
781 llvm::FunctionType
*fnTy
=
782 llvm::FunctionType::get(CGM
.VoidTy
, /*isVarArg=*/false);
783 llvm::Constant
*fn
= cast
<llvm::Constant
>(
784 CGM
.CreateRuntimeFunction(fnTy
, name
).getCallee());
785 if (auto f
= dyn_cast
<llvm::Function
>(fn
))
786 f
->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global
);
787 return llvm::ConstantExpr::getBitCast(fn
, CGM
.Int8PtrTy
);
790 llvm::Constant
*fnPtr
;
792 // Pure virtual member functions.
793 if (cast
<CXXMethodDecl
>(GD
.getDecl())->isPure()) {
796 getSpecialVirtualFn(CGM
.getCXXABI().GetPureVirtualCallName());
797 fnPtr
= PureVirtualFn
;
799 // Deleted virtual member functions.
800 } else if (cast
<CXXMethodDecl
>(GD
.getDecl())->isDeleted()) {
801 if (!DeletedVirtualFn
)
803 getSpecialVirtualFn(CGM
.getCXXABI().GetDeletedVirtualCallName());
804 fnPtr
= DeletedVirtualFn
;
807 } else if (nextVTableThunkIndex
< layout
.vtable_thunks().size() &&
808 layout
.vtable_thunks()[nextVTableThunkIndex
].first
==
810 auto &thunkInfo
= layout
.vtable_thunks()[nextVTableThunkIndex
].second
;
812 nextVTableThunkIndex
++;
813 fnPtr
= maybeEmitThunk(GD
, thunkInfo
, /*ForVTable=*/true);
815 // Otherwise we can use the method definition directly.
817 llvm::Type
*fnTy
= CGM
.getTypes().GetFunctionTypeForVTable(GD
);
818 fnPtr
= CGM
.GetAddrOfFunction(GD
, fnTy
, /*ForVTable=*/true);
821 if (useRelativeLayout()) {
822 return addRelativeComponent(
823 builder
, fnPtr
, vtableAddressPoint
, vtableHasLocalLinkage
,
824 component
.getKind() == VTableComponent::CK_CompleteDtorPointer
);
826 return builder
.add(llvm::ConstantExpr::getBitCast(fnPtr
, CGM
.Int8PtrTy
));
829 case VTableComponent::CK_UnusedFunctionPointer
:
830 if (useRelativeLayout())
831 return builder
.add(llvm::ConstantExpr::getNullValue(CGM
.Int32Ty
));
833 return builder
.addNullPointer(CGM
.Int8PtrTy
);
836 llvm_unreachable("Unexpected vtable component kind");
839 llvm::Type
*CodeGenVTables::getVTableType(const VTableLayout
&layout
) {
840 SmallVector
<llvm::Type
*, 4> tys
;
841 llvm::Type
*componentType
= getVTableComponentType();
842 for (unsigned i
= 0, e
= layout
.getNumVTables(); i
!= e
; ++i
)
843 tys
.push_back(llvm::ArrayType::get(componentType
, layout
.getVTableSize(i
)));
845 return llvm::StructType::get(CGM
.getLLVMContext(), tys
);
848 void CodeGenVTables::createVTableInitializer(ConstantStructBuilder
&builder
,
849 const VTableLayout
&layout
,
850 llvm::Constant
*rtti
,
851 bool vtableHasLocalLinkage
) {
852 llvm::Type
*componentType
= getVTableComponentType();
854 const auto &addressPoints
= layout
.getAddressPointIndices();
855 unsigned nextVTableThunkIndex
= 0;
856 for (unsigned vtableIndex
= 0, endIndex
= layout
.getNumVTables();
857 vtableIndex
!= endIndex
; ++vtableIndex
) {
858 auto vtableElem
= builder
.beginArray(componentType
);
860 size_t vtableStart
= layout
.getVTableOffset(vtableIndex
);
861 size_t vtableEnd
= vtableStart
+ layout
.getVTableSize(vtableIndex
);
862 for (size_t componentIndex
= vtableStart
; componentIndex
< vtableEnd
;
864 addVTableComponent(vtableElem
, layout
, componentIndex
, rtti
,
865 nextVTableThunkIndex
, addressPoints
[vtableIndex
],
866 vtableHasLocalLinkage
);
868 vtableElem
.finishAndAddTo(builder
);
872 llvm::GlobalVariable
*CodeGenVTables::GenerateConstructionVTable(
873 const CXXRecordDecl
*RD
, const BaseSubobject
&Base
, bool BaseIsVirtual
,
874 llvm::GlobalVariable::LinkageTypes Linkage
,
875 VTableAddressPointsMapTy
&AddressPoints
) {
876 if (CGDebugInfo
*DI
= CGM
.getModuleDebugInfo())
877 DI
->completeClassData(Base
.getBase());
879 std::unique_ptr
<VTableLayout
> VTLayout(
880 getItaniumVTableContext().createConstructionVTableLayout(
881 Base
.getBase(), Base
.getBaseOffset(), BaseIsVirtual
, RD
));
883 // Add the address points.
884 AddressPoints
= VTLayout
->getAddressPoints();
886 // Get the mangled construction vtable name.
887 SmallString
<256> OutName
;
888 llvm::raw_svector_ostream
Out(OutName
);
889 cast
<ItaniumMangleContext
>(CGM
.getCXXABI().getMangleContext())
890 .mangleCXXCtorVTable(RD
, Base
.getBaseOffset().getQuantity(),
891 Base
.getBase(), Out
);
892 SmallString
<256> Name(OutName
);
894 bool UsingRelativeLayout
= getItaniumVTableContext().isRelativeLayout();
895 bool VTableAliasExists
=
896 UsingRelativeLayout
&& CGM
.getModule().getNamedAlias(Name
);
897 if (VTableAliasExists
) {
898 // We previously made the vtable hidden and changed its name.
899 Name
.append(".local");
902 llvm::Type
*VTType
= getVTableType(*VTLayout
);
904 // Construction vtable symbols are not part of the Itanium ABI, so we cannot
905 // guarantee that they actually will be available externally. Instead, when
906 // emitting an available_externally VTT, we provide references to an internal
907 // linkage construction vtable. The ABI only requires complete-object vtables
908 // to be the same for all instances of a type, not construction vtables.
909 if (Linkage
== llvm::GlobalVariable::AvailableExternallyLinkage
)
910 Linkage
= llvm::GlobalVariable::InternalLinkage
;
912 llvm::Align Align
= CGM
.getDataLayout().getABITypeAlign(VTType
);
914 // Create the variable that will hold the construction vtable.
915 llvm::GlobalVariable
*VTable
=
916 CGM
.CreateOrReplaceCXXRuntimeVariable(Name
, VTType
, Linkage
, Align
);
918 // V-tables are always unnamed_addr.
919 VTable
->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global
);
921 llvm::Constant
*RTTI
= CGM
.GetAddrOfRTTIDescriptor(
922 CGM
.getContext().getTagDeclType(Base
.getBase()));
924 // Create and set the initializer.
925 ConstantInitBuilder
builder(CGM
);
926 auto components
= builder
.beginStruct();
927 createVTableInitializer(components
, *VTLayout
, RTTI
,
928 VTable
->hasLocalLinkage());
929 components
.finishAndSetAsInitializer(VTable
);
931 // Set properties only after the initializer has been set to ensure that the
932 // GV is treated as definition and not declaration.
933 assert(!VTable
->isDeclaration() && "Shouldn't set properties on declaration");
934 CGM
.setGVProperties(VTable
, RD
);
936 CGM
.EmitVTableTypeMetadata(RD
, VTable
, *VTLayout
.get());
938 if (UsingRelativeLayout
) {
939 RemoveHwasanMetadata(VTable
);
940 if (!VTable
->isDSOLocal())
941 GenerateRelativeVTableAlias(VTable
, OutName
);
947 // Ensure this vtable is not instrumented by hwasan. That is, a global alias is
948 // not generated for it. This is mainly used by the relative-vtables ABI where
949 // vtables instead contain 32-bit offsets between the vtable and function
950 // pointers. Hwasan is disabled for these vtables for now because the tag in a
951 // vtable pointer may fail the overflow check when resolving 32-bit PLT
952 // relocations. A future alternative for this would be finding which usages of
953 // the vtable can continue to use the untagged hwasan value without any loss of
955 void CodeGenVTables::RemoveHwasanMetadata(llvm::GlobalValue
*GV
) const {
956 if (CGM
.getLangOpts().Sanitize
.has(SanitizerKind::HWAddress
)) {
957 llvm::GlobalValue::SanitizerMetadata Meta
;
958 if (GV
->hasSanitizerMetadata())
959 Meta
= GV
->getSanitizerMetadata();
960 Meta
.NoHWAddress
= true;
961 GV
->setSanitizerMetadata(Meta
);
965 // If the VTable is not dso_local, then we will not be able to indicate that
966 // the VTable does not need a relocation and move into rodata. A frequent
967 // time this can occur is for classes that should be made public from a DSO
968 // (like in libc++). For cases like these, we can make the vtable hidden or
969 // private and create a public alias with the same visibility and linkage as
970 // the original vtable type.
971 void CodeGenVTables::GenerateRelativeVTableAlias(llvm::GlobalVariable
*VTable
,
972 llvm::StringRef AliasNameRef
) {
973 assert(getItaniumVTableContext().isRelativeLayout() &&
974 "Can only use this if the relative vtable ABI is used");
975 assert(!VTable
->isDSOLocal() && "This should be called only if the vtable is "
976 "not guaranteed to be dso_local");
978 // If the vtable is available_externally, we shouldn't (or need to) generate
979 // an alias for it in the first place since the vtable won't actually by
980 // emitted in this compilation unit.
981 if (VTable
->hasAvailableExternallyLinkage())
984 // Create a new string in the event the alias is already the name of the
985 // vtable. Using the reference directly could lead to use of an inititialized
986 // value in the module's StringMap.
987 llvm::SmallString
<256> AliasName(AliasNameRef
);
988 VTable
->setName(AliasName
+ ".local");
990 auto Linkage
= VTable
->getLinkage();
991 assert(llvm::GlobalAlias::isValidLinkage(Linkage
) &&
992 "Invalid vtable alias linkage");
994 llvm::GlobalAlias
*VTableAlias
= CGM
.getModule().getNamedAlias(AliasName
);
996 VTableAlias
= llvm::GlobalAlias::create(VTable
->getValueType(),
997 VTable
->getAddressSpace(), Linkage
,
998 AliasName
, &CGM
.getModule());
1000 assert(VTableAlias
->getValueType() == VTable
->getValueType());
1001 assert(VTableAlias
->getLinkage() == Linkage
);
1003 VTableAlias
->setVisibility(VTable
->getVisibility());
1004 VTableAlias
->setUnnamedAddr(VTable
->getUnnamedAddr());
1006 // Both of these imply dso_local for the vtable.
1007 if (!VTable
->hasComdat()) {
1008 // If this is in a comdat, then we shouldn't make the linkage private due to
1009 // an issue in lld where private symbols can be used as the key symbol when
1010 // choosing the prevelant group. This leads to "relocation refers to a
1011 // symbol in a discarded section".
1012 VTable
->setLinkage(llvm::GlobalValue::PrivateLinkage
);
1014 // We should at least make this hidden since we don't want to expose it.
1015 VTable
->setVisibility(llvm::GlobalValue::HiddenVisibility
);
1018 VTableAlias
->setAliasee(VTable
);
1021 static bool shouldEmitAvailableExternallyVTable(const CodeGenModule
&CGM
,
1022 const CXXRecordDecl
*RD
) {
1023 return CGM
.getCodeGenOpts().OptimizationLevel
> 0 &&
1024 CGM
.getCXXABI().canSpeculativelyEmitVTable(RD
);
1027 /// Compute the required linkage of the vtable for the given class.
1029 /// Note that we only call this at the end of the translation unit.
1030 llvm::GlobalVariable::LinkageTypes
1031 CodeGenModule::getVTableLinkage(const CXXRecordDecl
*RD
) {
1032 if (!RD
->isExternallyVisible())
1033 return llvm::GlobalVariable::InternalLinkage
;
1035 // We're at the end of the translation unit, so the current key
1036 // function is fully correct.
1037 const CXXMethodDecl
*keyFunction
= Context
.getCurrentKeyFunction(RD
);
1038 if (keyFunction
&& !RD
->hasAttr
<DLLImportAttr
>()) {
1039 // If this class has a key function, use that to determine the
1040 // linkage of the vtable.
1041 const FunctionDecl
*def
= nullptr;
1042 if (keyFunction
->hasBody(def
))
1043 keyFunction
= cast
<CXXMethodDecl
>(def
);
1045 switch (keyFunction
->getTemplateSpecializationKind()) {
1046 case TSK_Undeclared
:
1047 case TSK_ExplicitSpecialization
:
1048 assert((def
|| CodeGenOpts
.OptimizationLevel
> 0 ||
1049 CodeGenOpts
.getDebugInfo() != codegenoptions::NoDebugInfo
) &&
1050 "Shouldn't query vtable linkage without key function, "
1051 "optimizations, or debug info");
1052 if (!def
&& CodeGenOpts
.OptimizationLevel
> 0)
1053 return llvm::GlobalVariable::AvailableExternallyLinkage
;
1055 if (keyFunction
->isInlined())
1056 return !Context
.getLangOpts().AppleKext
?
1057 llvm::GlobalVariable::LinkOnceODRLinkage
:
1058 llvm::Function::InternalLinkage
;
1060 return llvm::GlobalVariable::ExternalLinkage
;
1062 case TSK_ImplicitInstantiation
:
1063 return !Context
.getLangOpts().AppleKext
?
1064 llvm::GlobalVariable::LinkOnceODRLinkage
:
1065 llvm::Function::InternalLinkage
;
1067 case TSK_ExplicitInstantiationDefinition
:
1068 return !Context
.getLangOpts().AppleKext
?
1069 llvm::GlobalVariable::WeakODRLinkage
:
1070 llvm::Function::InternalLinkage
;
1072 case TSK_ExplicitInstantiationDeclaration
:
1073 llvm_unreachable("Should not have been asked to emit this");
1077 // -fapple-kext mode does not support weak linkage, so we must use
1078 // internal linkage.
1079 if (Context
.getLangOpts().AppleKext
)
1080 return llvm::Function::InternalLinkage
;
1082 llvm::GlobalVariable::LinkageTypes DiscardableODRLinkage
=
1083 llvm::GlobalValue::LinkOnceODRLinkage
;
1084 llvm::GlobalVariable::LinkageTypes NonDiscardableODRLinkage
=
1085 llvm::GlobalValue::WeakODRLinkage
;
1086 if (RD
->hasAttr
<DLLExportAttr
>()) {
1087 // Cannot discard exported vtables.
1088 DiscardableODRLinkage
= NonDiscardableODRLinkage
;
1089 } else if (RD
->hasAttr
<DLLImportAttr
>()) {
1090 // Imported vtables are available externally.
1091 DiscardableODRLinkage
= llvm::GlobalVariable::AvailableExternallyLinkage
;
1092 NonDiscardableODRLinkage
= llvm::GlobalVariable::AvailableExternallyLinkage
;
1095 switch (RD
->getTemplateSpecializationKind()) {
1096 case TSK_Undeclared
:
1097 case TSK_ExplicitSpecialization
:
1098 case TSK_ImplicitInstantiation
:
1099 return DiscardableODRLinkage
;
1101 case TSK_ExplicitInstantiationDeclaration
:
1102 // Explicit instantiations in MSVC do not provide vtables, so we must emit
1104 if (getTarget().getCXXABI().isMicrosoft())
1105 return DiscardableODRLinkage
;
1106 return shouldEmitAvailableExternallyVTable(*this, RD
)
1107 ? llvm::GlobalVariable::AvailableExternallyLinkage
1108 : llvm::GlobalVariable::ExternalLinkage
;
1110 case TSK_ExplicitInstantiationDefinition
:
1111 return NonDiscardableODRLinkage
;
1114 llvm_unreachable("Invalid TemplateSpecializationKind!");
1117 /// This is a callback from Sema to tell us that a particular vtable is
1118 /// required to be emitted in this translation unit.
1120 /// This is only called for vtables that _must_ be emitted (mainly due to key
1121 /// functions). For weak vtables, CodeGen tracks when they are needed and
1122 /// emits them as-needed.
1123 void CodeGenModule::EmitVTable(CXXRecordDecl
*theClass
) {
1124 VTables
.GenerateClassData(theClass
);
1128 CodeGenVTables::GenerateClassData(const CXXRecordDecl
*RD
) {
1129 if (CGDebugInfo
*DI
= CGM
.getModuleDebugInfo())
1130 DI
->completeClassData(RD
);
1132 if (RD
->getNumVBases())
1133 CGM
.getCXXABI().emitVirtualInheritanceTables(RD
);
1135 CGM
.getCXXABI().emitVTableDefinitions(*this, RD
);
1138 /// At this point in the translation unit, does it appear that can we
1139 /// rely on the vtable being defined elsewhere in the program?
1141 /// The response is really only definitive when called at the end of
1142 /// the translation unit.
1144 /// The only semantic restriction here is that the object file should
1145 /// not contain a vtable definition when that vtable is defined
1146 /// strongly elsewhere. Otherwise, we'd just like to avoid emitting
1147 /// vtables when unnecessary.
1148 bool CodeGenVTables::isVTableExternal(const CXXRecordDecl
*RD
) {
1149 assert(RD
->isDynamicClass() && "Non-dynamic classes have no VTable.");
1151 // We always synthesize vtables if they are needed in the MS ABI. MSVC doesn't
1152 // emit them even if there is an explicit template instantiation.
1153 if (CGM
.getTarget().getCXXABI().isMicrosoft())
1156 // If we have an explicit instantiation declaration (and not a
1157 // definition), the vtable is defined elsewhere.
1158 TemplateSpecializationKind TSK
= RD
->getTemplateSpecializationKind();
1159 if (TSK
== TSK_ExplicitInstantiationDeclaration
)
1162 // Otherwise, if the class is an instantiated template, the
1163 // vtable must be defined here.
1164 if (TSK
== TSK_ImplicitInstantiation
||
1165 TSK
== TSK_ExplicitInstantiationDefinition
)
1168 // Otherwise, if the class doesn't have a key function (possibly
1169 // anymore), the vtable must be defined here.
1170 const CXXMethodDecl
*keyFunction
= CGM
.getContext().getCurrentKeyFunction(RD
);
1174 // Otherwise, if we don't have a definition of the key function, the
1175 // vtable must be defined somewhere else.
1176 return !keyFunction
->hasBody();
1179 /// Given that we're currently at the end of the translation unit, and
1180 /// we've emitted a reference to the vtable for this class, should
1181 /// we define that vtable?
1182 static bool shouldEmitVTableAtEndOfTranslationUnit(CodeGenModule
&CGM
,
1183 const CXXRecordDecl
*RD
) {
1184 // If vtable is internal then it has to be done.
1185 if (!CGM
.getVTables().isVTableExternal(RD
))
1188 // If it's external then maybe we will need it as available_externally.
1189 return shouldEmitAvailableExternallyVTable(CGM
, RD
);
1192 /// Given that at some point we emitted a reference to one or more
1193 /// vtables, and that we are now at the end of the translation unit,
1194 /// decide whether we should emit them.
1195 void CodeGenModule::EmitDeferredVTables() {
1197 // Remember the size of DeferredVTables, because we're going to assume
1198 // that this entire operation doesn't modify it.
1199 size_t savedSize
= DeferredVTables
.size();
1202 for (const CXXRecordDecl
*RD
: DeferredVTables
)
1203 if (shouldEmitVTableAtEndOfTranslationUnit(*this, RD
))
1204 VTables
.GenerateClassData(RD
);
1205 else if (shouldOpportunisticallyEmitVTables())
1206 OpportunisticVTables
.push_back(RD
);
1208 assert(savedSize
== DeferredVTables
.size() &&
1209 "deferred extra vtables during vtable emission?");
1210 DeferredVTables
.clear();
1213 bool CodeGenModule::AlwaysHasLTOVisibilityPublic(const CXXRecordDecl
*RD
) {
1214 if (RD
->hasAttr
<LTOVisibilityPublicAttr
>() || RD
->hasAttr
<UuidAttr
>())
1217 if (!getCodeGenOpts().LTOVisibilityPublicStd
)
1220 const DeclContext
*DC
= RD
;
1222 auto *D
= cast
<Decl
>(DC
);
1223 DC
= DC
->getParent();
1224 if (isa
<TranslationUnitDecl
>(DC
->getRedeclContext())) {
1225 if (auto *ND
= dyn_cast
<NamespaceDecl
>(D
))
1226 if (const IdentifierInfo
*II
= ND
->getIdentifier())
1227 if (II
->isStr("std") || II
->isStr("stdext"))
1236 bool CodeGenModule::HasHiddenLTOVisibility(const CXXRecordDecl
*RD
) {
1237 LinkageInfo LV
= RD
->getLinkageAndVisibility();
1238 if (!isExternallyVisible(LV
.getLinkage()))
1241 if (getTriple().isOSBinFormatCOFF()) {
1242 if (RD
->hasAttr
<DLLExportAttr
>() || RD
->hasAttr
<DLLImportAttr
>())
1245 if (LV
.getVisibility() != HiddenVisibility
)
1249 return !AlwaysHasLTOVisibilityPublic(RD
);
1252 llvm::GlobalObject::VCallVisibility
CodeGenModule::GetVCallVisibilityLevel(
1253 const CXXRecordDecl
*RD
, llvm::DenseSet
<const CXXRecordDecl
*> &Visited
) {
1254 // If we have already visited this RD (which means this is a recursive call
1255 // since the initial call should have an empty Visited set), return the max
1256 // visibility. The recursive calls below compute the min between the result
1257 // of the recursive call and the current TypeVis, so returning the max here
1258 // ensures that it will have no effect on the current TypeVis.
1259 if (!Visited
.insert(RD
).second
)
1260 return llvm::GlobalObject::VCallVisibilityTranslationUnit
;
1262 LinkageInfo LV
= RD
->getLinkageAndVisibility();
1263 llvm::GlobalObject::VCallVisibility TypeVis
;
1264 if (!isExternallyVisible(LV
.getLinkage()))
1265 TypeVis
= llvm::GlobalObject::VCallVisibilityTranslationUnit
;
1266 else if (HasHiddenLTOVisibility(RD
))
1267 TypeVis
= llvm::GlobalObject::VCallVisibilityLinkageUnit
;
1269 TypeVis
= llvm::GlobalObject::VCallVisibilityPublic
;
1271 for (auto B
: RD
->bases())
1272 if (B
.getType()->getAsCXXRecordDecl()->isDynamicClass())
1275 GetVCallVisibilityLevel(B
.getType()->getAsCXXRecordDecl(), Visited
));
1277 for (auto B
: RD
->vbases())
1278 if (B
.getType()->getAsCXXRecordDecl()->isDynamicClass())
1281 GetVCallVisibilityLevel(B
.getType()->getAsCXXRecordDecl(), Visited
));
1286 void CodeGenModule::EmitVTableTypeMetadata(const CXXRecordDecl
*RD
,
1287 llvm::GlobalVariable
*VTable
,
1288 const VTableLayout
&VTLayout
) {
1289 if (!getCodeGenOpts().LTOUnit
)
1292 CharUnits ComponentWidth
= GetTargetTypeStoreSize(getVTableComponentType());
1294 typedef std::pair
<const CXXRecordDecl
*, unsigned> AddressPoint
;
1295 std::vector
<AddressPoint
> AddressPoints
;
1296 for (auto &&AP
: VTLayout
.getAddressPoints())
1297 AddressPoints
.push_back(std::make_pair(
1298 AP
.first
.getBase(), VTLayout
.getVTableOffset(AP
.second
.VTableIndex
) +
1299 AP
.second
.AddressPointIndex
));
1301 // Sort the address points for determinism.
1302 llvm::sort(AddressPoints
, [this](const AddressPoint
&AP1
,
1303 const AddressPoint
&AP2
) {
1308 llvm::raw_string_ostream
O1(S1
);
1309 getCXXABI().getMangleContext().mangleTypeName(
1310 QualType(AP1
.first
->getTypeForDecl(), 0), O1
);
1314 llvm::raw_string_ostream
O2(S2
);
1315 getCXXABI().getMangleContext().mangleTypeName(
1316 QualType(AP2
.first
->getTypeForDecl(), 0), O2
);
1324 return AP1
.second
< AP2
.second
;
1327 ArrayRef
<VTableComponent
> Comps
= VTLayout
.vtable_components();
1328 for (auto AP
: AddressPoints
) {
1329 // Create type metadata for the address point.
1330 AddVTableTypeMetadata(VTable
, ComponentWidth
* AP
.second
, AP
.first
);
1332 // The class associated with each address point could also potentially be
1333 // used for indirect calls via a member function pointer, so we need to
1334 // annotate the address of each function pointer with the appropriate member
1335 // function pointer type.
1336 for (unsigned I
= 0; I
!= Comps
.size(); ++I
) {
1337 if (Comps
[I
].getKind() != VTableComponent::CK_FunctionPointer
)
1339 llvm::Metadata
*MD
= CreateMetadataIdentifierForVirtualMemPtrType(
1340 Context
.getMemberPointerType(
1341 Comps
[I
].getFunctionDecl()->getType(),
1342 Context
.getRecordType(AP
.first
).getTypePtr()));
1343 VTable
->addTypeMetadata((ComponentWidth
* I
).getQuantity(), MD
);
1347 if (getCodeGenOpts().VirtualFunctionElimination
||
1348 getCodeGenOpts().WholeProgramVTables
) {
1349 llvm::DenseSet
<const CXXRecordDecl
*> Visited
;
1350 llvm::GlobalObject::VCallVisibility TypeVis
=
1351 GetVCallVisibilityLevel(RD
, Visited
);
1352 if (TypeVis
!= llvm::GlobalObject::VCallVisibilityPublic
)
1353 VTable
->setVCallVisibilityMetadata(TypeVis
);