1 //==- CGObjCRuntime.cpp - Interface to Shared Objective-C Runtime Features ==//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This abstract class defines the interface for Objective-C runtime-specific
10 // code generation. It provides some concrete helper methods for functionality
11 // shared between all (or most) of the Objective-C runtimes supported by clang.
13 //===----------------------------------------------------------------------===//
15 #include "CGObjCRuntime.h"
17 #include "CGCleanup.h"
18 #include "CGRecordLayout.h"
19 #include "CodeGenFunction.h"
20 #include "CodeGenModule.h"
21 #include "clang/AST/RecordLayout.h"
22 #include "clang/AST/StmtObjC.h"
23 #include "clang/CodeGen/CGFunctionInfo.h"
24 #include "clang/CodeGen/CodeGenABITypes.h"
25 #include "llvm/IR/Instruction.h"
26 #include "llvm/Support/SaveAndRestore.h"
28 using namespace clang
;
29 using namespace CodeGen
;
31 uint64_t CGObjCRuntime::ComputeIvarBaseOffset(CodeGen::CodeGenModule
&CGM
,
32 const ObjCInterfaceDecl
*OID
,
33 const ObjCIvarDecl
*Ivar
) {
34 return CGM
.getContext().lookupFieldBitOffset(OID
, nullptr, Ivar
) /
35 CGM
.getContext().getCharWidth();
38 uint64_t CGObjCRuntime::ComputeIvarBaseOffset(CodeGen::CodeGenModule
&CGM
,
39 const ObjCImplementationDecl
*OID
,
40 const ObjCIvarDecl
*Ivar
) {
41 return CGM
.getContext().lookupFieldBitOffset(OID
->getClassInterface(), OID
,
43 CGM
.getContext().getCharWidth();
46 unsigned CGObjCRuntime::ComputeBitfieldBitOffset(
47 CodeGen::CodeGenModule
&CGM
,
48 const ObjCInterfaceDecl
*ID
,
49 const ObjCIvarDecl
*Ivar
) {
50 return CGM
.getContext().lookupFieldBitOffset(ID
, ID
->getImplementation(),
54 LValue
CGObjCRuntime::EmitValueForIvarAtOffset(CodeGen::CodeGenFunction
&CGF
,
55 const ObjCInterfaceDecl
*OID
,
56 llvm::Value
*BaseValue
,
57 const ObjCIvarDecl
*Ivar
,
58 unsigned CVRQualifiers
,
59 llvm::Value
*Offset
) {
60 // Compute (type*) ( (char *) BaseValue + Offset)
61 QualType InterfaceTy
{OID
->getTypeForDecl(), 0};
62 QualType ObjectPtrTy
=
63 CGF
.CGM
.getContext().getObjCObjectPointerType(InterfaceTy
);
65 Ivar
->getUsageType(ObjectPtrTy
).withCVRQualifiers(CVRQualifiers
);
66 llvm::Value
*V
= BaseValue
;
67 V
= CGF
.Builder
.CreateInBoundsGEP(CGF
.Int8Ty
, V
, Offset
, "add.ptr");
69 if (!Ivar
->isBitField()) {
70 LValue LV
= CGF
.MakeNaturalAlignRawAddrLValue(V
, IvarTy
);
74 // We need to compute an access strategy for this bit-field. We are given the
75 // offset to the first byte in the bit-field, the sub-byte offset is taken
76 // from the original layout. We reuse the normal bit-field access strategy by
77 // treating this as an access to a struct where the bit-field is in byte 0,
78 // and adjust the containing type size as appropriate.
80 // FIXME: Note that currently we make a very conservative estimate of the
81 // alignment of the bit-field, because (a) it is not clear what guarantees the
82 // runtime makes us, and (b) we don't have a way to specify that the struct is
83 // at an alignment plus offset.
85 // Note, there is a subtle invariant here: we can only call this routine on
86 // non-synthesized ivars but we may be called for synthesized ivars. However,
87 // a synthesized ivar can never be a bit-field, so this is safe.
88 uint64_t FieldBitOffset
=
89 CGF
.CGM
.getContext().lookupFieldBitOffset(OID
, nullptr, Ivar
);
90 uint64_t BitOffset
= FieldBitOffset
% CGF
.CGM
.getContext().getCharWidth();
91 uint64_t AlignmentBits
= CGF
.CGM
.getTarget().getCharAlign();
92 uint64_t BitFieldSize
= Ivar
->getBitWidthValue(CGF
.getContext());
93 CharUnits StorageSize
= CGF
.CGM
.getContext().toCharUnitsFromBits(
94 llvm::alignTo(BitOffset
+ BitFieldSize
, AlignmentBits
));
95 CharUnits Alignment
= CGF
.CGM
.getContext().toCharUnitsFromBits(AlignmentBits
);
97 // Allocate a new CGBitFieldInfo object to describe this access.
99 // FIXME: This is incredibly wasteful, these should be uniqued or part of some
100 // layout object. However, this is blocked on other cleanups to the
101 // Objective-C code, so for now we just live with allocating a bunch of these
103 CGBitFieldInfo
*Info
= new (CGF
.CGM
.getContext()) CGBitFieldInfo(
104 CGBitFieldInfo::MakeInfo(CGF
.CGM
.getTypes(), Ivar
, BitOffset
, BitFieldSize
,
105 CGF
.CGM
.getContext().toBits(StorageSize
),
106 CharUnits::fromQuantity(0)));
109 Address(V
, llvm::Type::getIntNTy(CGF
.getLLVMContext(), Info
->StorageSize
),
112 return LValue::MakeBitfield(Addr
, *Info
, IvarTy
,
113 LValueBaseInfo(AlignmentSource::Decl
),
118 struct CatchHandler
{
119 const VarDecl
*Variable
;
121 llvm::BasicBlock
*Block
;
122 llvm::Constant
*TypeInfo
;
123 /// Flags used to differentiate cleanups and catchalls in Windows SEH
127 struct CallObjCEndCatch final
: EHScopeStack::Cleanup
{
128 CallObjCEndCatch(bool MightThrow
, llvm::FunctionCallee Fn
)
129 : MightThrow(MightThrow
), Fn(Fn
) {}
131 llvm::FunctionCallee Fn
;
133 void Emit(CodeGenFunction
&CGF
, Flags flags
) override
{
135 CGF
.EmitRuntimeCallOrInvoke(Fn
);
137 CGF
.EmitNounwindRuntimeCall(Fn
);
142 void CGObjCRuntime::EmitTryCatchStmt(CodeGenFunction
&CGF
,
143 const ObjCAtTryStmt
&S
,
144 llvm::FunctionCallee beginCatchFn
,
145 llvm::FunctionCallee endCatchFn
,
146 llvm::FunctionCallee exceptionRethrowFn
) {
147 // Jump destination for falling out of catch bodies.
148 CodeGenFunction::JumpDest Cont
;
149 if (S
.getNumCatchStmts())
150 Cont
= CGF
.getJumpDestInCurrentScope("eh.cont");
152 bool useFunclets
= EHPersonality::get(CGF
).usesFuncletPads();
154 CodeGenFunction::FinallyInfo FinallyInfo
;
156 if (const ObjCAtFinallyStmt
*Finally
= S
.getFinallyStmt())
157 FinallyInfo
.enter(CGF
, Finally
->getFinallyBody(),
158 beginCatchFn
, endCatchFn
, exceptionRethrowFn
);
160 SmallVector
<CatchHandler
, 8> Handlers
;
163 // Enter the catch, if there is one.
164 if (S
.getNumCatchStmts()) {
165 for (const ObjCAtCatchStmt
*CatchStmt
: S
.catch_stmts()) {
166 const VarDecl
*CatchDecl
= CatchStmt
->getCatchParamDecl();
168 Handlers
.push_back(CatchHandler());
169 CatchHandler
&Handler
= Handlers
.back();
170 Handler
.Variable
= CatchDecl
;
171 Handler
.Body
= CatchStmt
->getCatchBody();
172 Handler
.Block
= CGF
.createBasicBlock("catch");
175 // @catch(...) always matches.
177 auto catchAll
= getCatchAllTypeInfo();
178 Handler
.TypeInfo
= catchAll
.RTTI
;
179 Handler
.Flags
= catchAll
.Flags
;
180 // Don't consider any other catches.
184 Handler
.TypeInfo
= GetEHType(CatchDecl
->getType());
187 EHCatchScope
*Catch
= CGF
.EHStack
.pushCatch(Handlers
.size());
188 for (unsigned I
= 0, E
= Handlers
.size(); I
!= E
; ++I
)
189 Catch
->setHandler(I
, { Handlers
[I
].TypeInfo
, Handlers
[I
].Flags
}, Handlers
[I
].Block
);
193 if (const ObjCAtFinallyStmt
*Finally
= S
.getFinallyStmt()) {
194 CodeGenFunction
HelperCGF(CGM
, /*suppressNewContext=*/true);
195 if (!CGF
.CurSEHParent
)
196 CGF
.CurSEHParent
= cast
<NamedDecl
>(CGF
.CurFuncDecl
);
197 // Outline the finally block.
198 const Stmt
*FinallyBlock
= Finally
->getFinallyBody();
199 HelperCGF
.startOutlinedSEHHelper(CGF
, /*isFilter*/false, FinallyBlock
);
201 // Emit the original filter expression, convert to i32, and return.
202 HelperCGF
.EmitStmt(FinallyBlock
);
204 HelperCGF
.FinishFunction(FinallyBlock
->getEndLoc());
206 llvm::Function
*FinallyFunc
= HelperCGF
.CurFn
;
209 // Push a cleanup for __finally blocks.
210 CGF
.pushSEHCleanup(NormalAndEHCleanup
, FinallyFunc
);
214 // Emit the try body.
215 CGF
.EmitStmt(S
.getTryBody());
218 if (S
.getNumCatchStmts())
221 // Remember where we were.
222 CGBuilderTy::InsertPoint SavedIP
= CGF
.Builder
.saveAndClearIP();
224 // Emit the handlers.
225 for (unsigned I
= 0, E
= Handlers
.size(); I
!= E
; ++I
) {
226 CatchHandler
&Handler
= Handlers
[I
];
228 CGF
.EmitBlock(Handler
.Block
);
230 CodeGenFunction::LexicalScope
Cleanups(CGF
, Handler
.Body
->getSourceRange());
231 SaveAndRestore
RevertAfterScope(CGF
.CurrentFuncletPad
);
233 llvm::Instruction
*CPICandidate
= Handler
.Block
->getFirstNonPHI();
234 if (auto *CPI
= dyn_cast_or_null
<llvm::CatchPadInst
>(CPICandidate
)) {
235 CGF
.CurrentFuncletPad
= CPI
;
236 CPI
->setOperand(2, CGF
.getExceptionSlot().emitRawPointer(CGF
));
237 CGF
.EHStack
.pushCleanup
<CatchRetScope
>(NormalCleanup
, CPI
);
241 llvm::Value
*RawExn
= CGF
.getExceptionFromSlot();
244 llvm::Value
*Exn
= RawExn
;
246 Exn
= CGF
.EmitNounwindRuntimeCall(beginCatchFn
, RawExn
, "exn.adjusted");
249 // Add a cleanup to leave the catch.
250 bool EndCatchMightThrow
= (Handler
.Variable
== nullptr);
252 CGF
.EHStack
.pushCleanup
<CallObjCEndCatch
>(NormalAndEHCleanup
,
257 // Bind the catch parameter if it exists.
258 if (const VarDecl
*CatchParam
= Handler
.Variable
) {
259 llvm::Type
*CatchType
= CGF
.ConvertType(CatchParam
->getType());
260 llvm::Value
*CastExn
= CGF
.Builder
.CreateBitCast(Exn
, CatchType
);
262 CGF
.EmitAutoVarDecl(*CatchParam
);
263 EmitInitOfCatchParam(CGF
, CastExn
, CatchParam
);
266 CGF
.ObjCEHValueStack
.push_back(Exn
);
267 CGF
.EmitStmt(Handler
.Body
);
268 CGF
.ObjCEHValueStack
.pop_back();
270 // Leave any cleanups associated with the catch.
271 Cleanups
.ForceCleanup();
273 CGF
.EmitBranchThroughCleanup(Cont
);
276 // Go back to the try-statement fallthrough.
277 CGF
.Builder
.restoreIP(SavedIP
);
279 // Pop out of the finally.
280 if (!useFunclets
&& S
.getFinallyStmt())
281 FinallyInfo
.exit(CGF
);
284 CGF
.EmitBlock(Cont
.getBlock());
287 void CGObjCRuntime::EmitInitOfCatchParam(CodeGenFunction
&CGF
,
289 const VarDecl
*paramDecl
) {
291 Address paramAddr
= CGF
.GetAddrOfLocalVar(paramDecl
);
293 switch (paramDecl
->getType().getQualifiers().getObjCLifetime()) {
294 case Qualifiers::OCL_Strong
:
295 exn
= CGF
.EmitARCRetainNonBlock(exn
);
298 case Qualifiers::OCL_None
:
299 case Qualifiers::OCL_ExplicitNone
:
300 case Qualifiers::OCL_Autoreleasing
:
301 CGF
.Builder
.CreateStore(exn
, paramAddr
);
304 case Qualifiers::OCL_Weak
:
305 CGF
.EmitARCInitWeak(paramAddr
, exn
);
308 llvm_unreachable("invalid ownership qualifier");
312 struct CallSyncExit final
: EHScopeStack::Cleanup
{
313 llvm::FunctionCallee SyncExitFn
;
314 llvm::Value
*SyncArg
;
315 CallSyncExit(llvm::FunctionCallee SyncExitFn
, llvm::Value
*SyncArg
)
316 : SyncExitFn(SyncExitFn
), SyncArg(SyncArg
) {}
318 void Emit(CodeGenFunction
&CGF
, Flags flags
) override
{
319 CGF
.EmitNounwindRuntimeCall(SyncExitFn
, SyncArg
);
324 void CGObjCRuntime::EmitAtSynchronizedStmt(CodeGenFunction
&CGF
,
325 const ObjCAtSynchronizedStmt
&S
,
326 llvm::FunctionCallee syncEnterFn
,
327 llvm::FunctionCallee syncExitFn
) {
328 CodeGenFunction::RunCleanupsScope
cleanups(CGF
);
330 // Evaluate the lock operand. This is guaranteed to dominate the
331 // ARC release and lock-release cleanups.
332 const Expr
*lockExpr
= S
.getSynchExpr();
334 if (CGF
.getLangOpts().ObjCAutoRefCount
) {
335 lock
= CGF
.EmitARCRetainScalarExpr(lockExpr
);
336 lock
= CGF
.EmitObjCConsumeObject(lockExpr
->getType(), lock
);
338 lock
= CGF
.EmitScalarExpr(lockExpr
);
340 lock
= CGF
.Builder
.CreateBitCast(lock
, CGF
.VoidPtrTy
);
343 CGF
.Builder
.CreateCall(syncEnterFn
, lock
)->setDoesNotThrow();
345 // Register an all-paths cleanup to release the lock.
346 CGF
.EHStack
.pushCleanup
<CallSyncExit
>(NormalAndEHCleanup
, syncExitFn
, lock
);
348 // Emit the body of the statement.
349 CGF
.EmitStmt(S
.getSynchBody());
352 /// Compute the pointer-to-function type to which a message send
353 /// should be casted in order to correctly call the given method
354 /// with the given arguments.
356 /// \param method - may be null
357 /// \param resultType - the result type to use if there's no method
358 /// \param callArgs - the actual arguments, including implicit ones
359 CGObjCRuntime::MessageSendInfo
360 CGObjCRuntime::getMessageSendInfo(const ObjCMethodDecl
*method
,
362 CallArgList
&callArgs
) {
363 unsigned ProgramAS
= CGM
.getDataLayout().getProgramAddressSpace();
365 llvm::PointerType
*signatureType
=
366 llvm::PointerType::get(CGM
.getLLVMContext(), ProgramAS
);
368 // If there's a method, use information from that.
370 const CGFunctionInfo
&signature
=
371 CGM
.getTypes().arrangeObjCMessageSendSignature(method
, callArgs
[0].Ty
);
373 const CGFunctionInfo
&signatureForCall
=
374 CGM
.getTypes().arrangeCall(signature
, callArgs
);
376 return MessageSendInfo(signatureForCall
, signatureType
);
379 // There's no method; just use a default CC.
380 const CGFunctionInfo
&argsInfo
=
381 CGM
.getTypes().arrangeUnprototypedObjCMessageSend(resultType
, callArgs
);
383 return MessageSendInfo(argsInfo
, signatureType
);
386 bool CGObjCRuntime::canMessageReceiverBeNull(CodeGenFunction
&CGF
,
387 const ObjCMethodDecl
*method
,
389 const ObjCInterfaceDecl
*classReceiver
,
390 llvm::Value
*receiver
) {
391 // Super dispatch assumes that self is non-null; even the messenger
392 // doesn't have a null check internally.
396 // If this is a direct dispatch of a class method, check whether the class,
397 // or anything in its hierarchy, was weak-linked.
398 if (classReceiver
&& method
&& method
->isClassMethod())
399 return isWeakLinkedClass(classReceiver
);
401 // If we're emitting a method, and self is const (meaning just ARC, for now),
402 // and the receiver is a load of self, then self is a valid object.
404 dyn_cast_or_null
<ObjCMethodDecl
>(CGF
.CurCodeDecl
)) {
405 auto self
= curMethod
->getSelfDecl();
406 if (self
->getType().isConstQualified()) {
407 if (auto LI
= dyn_cast
<llvm::LoadInst
>(receiver
->stripPointerCasts())) {
408 llvm::Value
*selfAddr
= CGF
.GetAddrOfLocalVar(self
).emitRawPointer(CGF
);
409 if (selfAddr
== LI
->getPointerOperand()) {
416 // Otherwise, assume it can be null.
420 bool CGObjCRuntime::isWeakLinkedClass(const ObjCInterfaceDecl
*ID
) {
422 if (ID
->isWeakImported())
424 } while ((ID
= ID
->getSuperClass()));
429 void CGObjCRuntime::destroyCalleeDestroyedArguments(CodeGenFunction
&CGF
,
430 const ObjCMethodDecl
*method
,
431 const CallArgList
&callArgs
) {
432 CallArgList::const_iterator I
= callArgs
.begin();
433 for (auto i
= method
->param_begin(), e
= method
->param_end();
435 const ParmVarDecl
*param
= (*i
);
436 if (param
->hasAttr
<NSConsumedAttr
>()) {
437 RValue RV
= I
->getRValue(CGF
);
438 assert(RV
.isScalar() &&
439 "NullReturnState::complete - arg not on object");
440 CGF
.EmitARCRelease(RV
.getScalarVal(), ARCImpreciseLifetime
);
442 QualType QT
= param
->getType();
443 auto *RT
= QT
->getAs
<RecordType
>();
444 if (RT
&& RT
->getDecl()->isParamDestroyedInCallee()) {
445 RValue RV
= I
->getRValue(CGF
);
446 QualType::DestructionKind DtorKind
= QT
.isDestructedType();
448 case QualType::DK_cxx_destructor
:
449 CGF
.destroyCXXObject(CGF
, RV
.getAggregateAddress(), QT
);
451 case QualType::DK_nontrivial_c_struct
:
452 CGF
.destroyNonTrivialCStruct(CGF
, RV
.getAggregateAddress(), QT
);
455 llvm_unreachable("unexpected dtor kind");
464 clang::CodeGen::emitObjCProtocolObject(CodeGenModule
&CGM
,
465 const ObjCProtocolDecl
*protocol
) {
466 return CGM
.getObjCRuntime().GetOrEmitProtocol(protocol
);
469 std::string
CGObjCRuntime::getSymbolNameForMethod(const ObjCMethodDecl
*OMD
,
470 bool includeCategoryName
) {
472 llvm::raw_string_ostream
out(buffer
);
473 CGM
.getCXXABI().getMangleContext().mangleObjCMethodName(OMD
, out
,
474 /*includePrefixByte=*/true,
475 includeCategoryName
);