1 //===----- CGCoroutine.cpp - Emit LLVM Code for C++ coroutines ------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This contains code dealing with C++ code generation of coroutines.
11 //===----------------------------------------------------------------------===//
13 #include "CGCleanup.h"
14 #include "CodeGenFunction.h"
15 #include "llvm/ADT/ScopeExit.h"
16 #include "clang/AST/StmtCXX.h"
17 #include "clang/AST/StmtVisitor.h"
19 using namespace clang
;
20 using namespace CodeGen
;
23 using llvm::BasicBlock
;
26 enum class AwaitKind
{ Init
, Normal
, Yield
, Final
};
27 static constexpr llvm::StringLiteral AwaitKindStr
[] = {"init", "await", "yield",
31 struct clang::CodeGen::CGCoroData
{
32 // What is the current await expression kind and how many
33 // await/yield expressions were encountered so far.
34 // These are used to generate pretty labels for await expressions in LLVM IR.
35 AwaitKind CurrentAwaitKind
= AwaitKind::Init
;
36 unsigned AwaitNum
= 0;
37 unsigned YieldNum
= 0;
39 // How many co_return statements are in the coroutine. Used to decide whether
40 // we need to add co_return; equivalent at the end of the user authored body.
41 unsigned CoreturnCount
= 0;
43 // A branch to this block is emitted when coroutine needs to suspend.
44 llvm::BasicBlock
*SuspendBB
= nullptr;
46 // The promise type's 'unhandled_exception' handler, if it defines one.
47 Stmt
*ExceptionHandler
= nullptr;
49 // A temporary i1 alloca that stores whether 'await_resume' threw an
50 // exception. If it did, 'true' is stored in this variable, and the coroutine
51 // body must be skipped. If the promise type does not define an exception
52 // handler, this is null.
53 llvm::Value
*ResumeEHVar
= nullptr;
55 // Stores the jump destination just before the coroutine memory is freed.
56 // This is the destination that every suspend point jumps to for the cleanup
58 CodeGenFunction::JumpDest CleanupJD
;
60 // Stores the jump destination just before the final suspend. The co_return
61 // statements jumps to this point after calling return_xxx promise member.
62 CodeGenFunction::JumpDest FinalJD
;
64 // Stores the llvm.coro.id emitted in the function so that we can supply it
65 // as the first argument to coro.begin, coro.alloc and coro.free intrinsics.
66 // Note: llvm.coro.id returns a token that cannot be directly expressed in a
68 llvm::CallInst
*CoroId
= nullptr;
70 // Stores the llvm.coro.begin emitted in the function so that we can replace
71 // all coro.frame intrinsics with direct SSA value of coro.begin that returns
72 // the address of the coroutine frame of the current coroutine.
73 llvm::CallInst
*CoroBegin
= nullptr;
75 // Stores the last emitted coro.free for the deallocate expressions, we use it
76 // to wrap dealloc code with if(auto mem = coro.free) dealloc(mem).
77 llvm::CallInst
*LastCoroFree
= nullptr;
79 // If coro.id came from the builtin, remember the expression to give better
80 // diagnostic. If CoroIdExpr is nullptr, the coro.id was created by
82 CallExpr
const *CoroIdExpr
= nullptr;
85 // Defining these here allows to keep CGCoroData private to this file.
86 clang::CodeGen::CodeGenFunction::CGCoroInfo::CGCoroInfo() {}
87 CodeGenFunction::CGCoroInfo::~CGCoroInfo() {}
89 static void createCoroData(CodeGenFunction
&CGF
,
90 CodeGenFunction::CGCoroInfo
&CurCoro
,
91 llvm::CallInst
*CoroId
,
92 CallExpr
const *CoroIdExpr
= nullptr) {
94 if (CurCoro
.Data
->CoroIdExpr
)
95 CGF
.CGM
.Error(CoroIdExpr
->getBeginLoc(),
96 "only one __builtin_coro_id can be used in a function");
98 CGF
.CGM
.Error(CoroIdExpr
->getBeginLoc(),
99 "__builtin_coro_id shall not be used in a C++ coroutine");
101 llvm_unreachable("EmitCoroutineBodyStatement called twice?");
106 CurCoro
.Data
= std::unique_ptr
<CGCoroData
>(new CGCoroData
);
107 CurCoro
.Data
->CoroId
= CoroId
;
108 CurCoro
.Data
->CoroIdExpr
= CoroIdExpr
;
111 // Synthesize a pretty name for a suspend point.
112 static SmallString
<32> buildSuspendPrefixStr(CGCoroData
&Coro
, AwaitKind Kind
) {
115 case AwaitKind::Init
:
116 case AwaitKind::Final
:
118 case AwaitKind::Normal
:
119 No
= ++Coro
.AwaitNum
;
121 case AwaitKind::Yield
:
122 No
= ++Coro
.YieldNum
;
125 SmallString
<32> Prefix(AwaitKindStr
[static_cast<unsigned>(Kind
)]);
127 Twine(No
).toVector(Prefix
);
132 static bool memberCallExpressionCanThrow(const Expr
*E
) {
133 if (const auto *CE
= dyn_cast
<CXXMemberCallExpr
>(E
))
134 if (const auto *Proto
=
135 CE
->getMethodDecl()->getType()->getAs
<FunctionProtoType
>())
136 if (isNoexceptExceptionSpec(Proto
->getExceptionSpecType()) &&
137 Proto
->canThrow() == CT_Cannot
)
142 /// Return true when the coroutine handle may escape from the await-suspend
143 /// (`awaiter.await_suspend(std::coroutine_handle)` expression).
144 /// Return false only when the coroutine wouldn't escape in the await-suspend
147 /// While it is always safe to return true, return falses can bring better
150 /// See https://github.com/llvm/llvm-project/issues/56301 and
151 /// https://reviews.llvm.org/D157070 for the example and the full discussion.
153 /// FIXME: It will be much better to perform such analysis in the middle end.
154 /// See the comments in `CodeGenFunction::EmitCall` for example.
155 static bool MayCoroHandleEscape(CoroutineSuspendExpr
const &S
) {
156 CXXRecordDecl
*Awaiter
=
157 S
.getCommonExpr()->getType().getNonReferenceType()->getAsCXXRecordDecl();
159 // Return true conservatively if the awaiter type is not a record type.
163 // In case the awaiter type is empty, the suspend wouldn't leak the coroutine
166 // TODO: We can improve this by looking into the implementation of
167 // await-suspend and see if the coroutine handle is passed to foreign
169 return !Awaiter
->field_empty();
172 // Emit suspend expression which roughly looks like:
174 // auto && x = CommonExpr();
175 // if (!x.await_ready()) {
177 // x.await_suspend(...); (*)
178 // llvm_coro_suspend(); (**)
182 // where the result of the entire expression is the result of x.await_resume()
184 // (*) If x.await_suspend return type is bool, it allows to veto a suspend:
185 // if (x.await_suspend(...))
186 // llvm_coro_suspend();
188 // (**) llvm_coro_suspend() encodes three possible continuations as
189 // a switch instruction:
191 // %where-to = call i8 @llvm.coro.suspend(...)
192 // switch i8 %where-to, label %coro.ret [ ; jump to epilogue to suspend
193 // i8 0, label %yield.ready ; go here when resumed
194 // i8 1, label %yield.cleanup ; go here when destroyed
197 // See llvm's docs/Coroutines.rst for more details.
200 struct LValueOrRValue
{
205 static LValueOrRValue
emitSuspendExpression(CodeGenFunction
&CGF
, CGCoroData
&Coro
,
206 CoroutineSuspendExpr
const &S
,
207 AwaitKind Kind
, AggValueSlot aggSlot
,
208 bool ignoreResult
, bool forLValue
) {
209 auto *E
= S
.getCommonExpr();
212 CodeGenFunction::OpaqueValueMappingData::bind(CGF
, S
.getOpaqueValue(), E
);
213 auto UnbindOnExit
= llvm::make_scope_exit([&] { Binder
.unbind(CGF
); });
215 auto Prefix
= buildSuspendPrefixStr(Coro
, Kind
);
216 BasicBlock
*ReadyBlock
= CGF
.createBasicBlock(Prefix
+ Twine(".ready"));
217 BasicBlock
*SuspendBlock
= CGF
.createBasicBlock(Prefix
+ Twine(".suspend"));
218 BasicBlock
*CleanupBlock
= CGF
.createBasicBlock(Prefix
+ Twine(".cleanup"));
220 // If expression is ready, no need to suspend.
221 CGF
.EmitBranchOnBoolExpr(S
.getReadyExpr(), ReadyBlock
, SuspendBlock
, 0);
223 // Otherwise, emit suspend logic.
224 CGF
.EmitBlock(SuspendBlock
);
226 auto &Builder
= CGF
.Builder
;
227 llvm::Function
*CoroSave
= CGF
.CGM
.getIntrinsic(llvm::Intrinsic::coro_save
);
228 auto *NullPtr
= llvm::ConstantPointerNull::get(CGF
.CGM
.Int8PtrTy
);
229 auto *SaveCall
= Builder
.CreateCall(CoroSave
, {NullPtr
});
231 CGF
.CurCoro
.InSuspendBlock
= true;
232 CGF
.CurCoro
.MayCoroHandleEscape
= MayCoroHandleEscape(S
);
233 auto *SuspendRet
= CGF
.EmitScalarExpr(S
.getSuspendExpr());
234 CGF
.CurCoro
.InSuspendBlock
= false;
235 CGF
.CurCoro
.MayCoroHandleEscape
= false;
237 if (SuspendRet
!= nullptr && SuspendRet
->getType()->isIntegerTy(1)) {
238 // Veto suspension if requested by bool returning await_suspend.
239 BasicBlock
*RealSuspendBlock
=
240 CGF
.createBasicBlock(Prefix
+ Twine(".suspend.bool"));
241 CGF
.Builder
.CreateCondBr(SuspendRet
, RealSuspendBlock
, ReadyBlock
);
242 CGF
.EmitBlock(RealSuspendBlock
);
245 // Emit the suspend point.
246 const bool IsFinalSuspend
= (Kind
== AwaitKind::Final
);
247 llvm::Function
*CoroSuspend
=
248 CGF
.CGM
.getIntrinsic(llvm::Intrinsic::coro_suspend
);
249 auto *SuspendResult
= Builder
.CreateCall(
250 CoroSuspend
, {SaveCall
, Builder
.getInt1(IsFinalSuspend
)});
252 // Create a switch capturing three possible continuations.
253 auto *Switch
= Builder
.CreateSwitch(SuspendResult
, Coro
.SuspendBB
, 2);
254 Switch
->addCase(Builder
.getInt8(0), ReadyBlock
);
255 Switch
->addCase(Builder
.getInt8(1), CleanupBlock
);
257 // Emit cleanup for this suspend point.
258 CGF
.EmitBlock(CleanupBlock
);
259 CGF
.EmitBranchThroughCleanup(Coro
.CleanupJD
);
261 // Emit await_resume expression.
262 CGF
.EmitBlock(ReadyBlock
);
264 // Exception handling requires additional IR. If the 'await_resume' function
265 // is marked as 'noexcept', we avoid generating this additional IR.
266 CXXTryStmt
*TryStmt
= nullptr;
267 if (Coro
.ExceptionHandler
&& Kind
== AwaitKind::Init
&&
268 memberCallExpressionCanThrow(S
.getResumeExpr())) {
270 CGF
.CreateTempAlloca(Builder
.getInt1Ty(), Prefix
+ Twine("resume.eh"));
271 Builder
.CreateFlagStore(true, Coro
.ResumeEHVar
);
273 auto Loc
= S
.getResumeExpr()->getExprLoc();
274 auto *Catch
= new (CGF
.getContext())
275 CXXCatchStmt(Loc
, /*exDecl=*/nullptr, Coro
.ExceptionHandler
);
276 auto *TryBody
= CompoundStmt::Create(CGF
.getContext(), S
.getResumeExpr(),
277 FPOptionsOverride(), Loc
, Loc
);
278 TryStmt
= CXXTryStmt::Create(CGF
.getContext(), Loc
, TryBody
, Catch
);
279 CGF
.EnterCXXTryStmt(*TryStmt
);
284 Res
.LV
= CGF
.EmitLValue(S
.getResumeExpr());
286 Res
.RV
= CGF
.EmitAnyExpr(S
.getResumeExpr(), aggSlot
, ignoreResult
);
289 Builder
.CreateFlagStore(false, Coro
.ResumeEHVar
);
290 CGF
.ExitCXXTryStmt(*TryStmt
);
296 RValue
CodeGenFunction::EmitCoawaitExpr(const CoawaitExpr
&E
,
297 AggValueSlot aggSlot
,
299 return emitSuspendExpression(*this, *CurCoro
.Data
, E
,
300 CurCoro
.Data
->CurrentAwaitKind
, aggSlot
,
301 ignoreResult
, /*forLValue*/false).RV
;
303 RValue
CodeGenFunction::EmitCoyieldExpr(const CoyieldExpr
&E
,
304 AggValueSlot aggSlot
,
306 return emitSuspendExpression(*this, *CurCoro
.Data
, E
, AwaitKind::Yield
,
307 aggSlot
, ignoreResult
, /*forLValue*/false).RV
;
310 void CodeGenFunction::EmitCoreturnStmt(CoreturnStmt
const &S
) {
311 ++CurCoro
.Data
->CoreturnCount
;
312 const Expr
*RV
= S
.getOperand();
313 if (RV
&& RV
->getType()->isVoidType() && !isa
<InitListExpr
>(RV
)) {
314 // Make sure to evaluate the non initlist expression of a co_return
315 // with a void expression for side effects.
316 RunCleanupsScope
cleanupScope(*this);
319 EmitStmt(S
.getPromiseCall());
320 EmitBranchThroughCleanup(CurCoro
.Data
->FinalJD
);
325 static QualType
getCoroutineSuspendExprReturnType(const ASTContext
&Ctx
,
326 const CoroutineSuspendExpr
*E
) {
327 const auto *RE
= E
->getResumeExpr();
328 // Is it possible for RE to be a CXXBindTemporaryExpr wrapping
330 assert(isa
<CallExpr
>(RE
) && "unexpected suspend expression type");
331 return cast
<CallExpr
>(RE
)->getCallReturnType(Ctx
);
336 CodeGenFunction::EmitCoawaitLValue(const CoawaitExpr
*E
) {
337 assert(getCoroutineSuspendExprReturnType(getContext(), E
)->isReferenceType() &&
338 "Can't have a scalar return unless the return type is a "
340 return emitSuspendExpression(*this, *CurCoro
.Data
, *E
,
341 CurCoro
.Data
->CurrentAwaitKind
, AggValueSlot::ignored(),
342 /*ignoreResult*/false, /*forLValue*/true).LV
;
346 CodeGenFunction::EmitCoyieldLValue(const CoyieldExpr
*E
) {
347 assert(getCoroutineSuspendExprReturnType(getContext(), E
)->isReferenceType() &&
348 "Can't have a scalar return unless the return type is a "
350 return emitSuspendExpression(*this, *CurCoro
.Data
, *E
,
351 AwaitKind::Yield
, AggValueSlot::ignored(),
352 /*ignoreResult*/false, /*forLValue*/true).LV
;
355 // Hunts for the parameter reference in the parameter copy/move declaration.
357 struct GetParamRef
: public StmtVisitor
<GetParamRef
> {
359 DeclRefExpr
*Expr
= nullptr;
361 void VisitDeclRefExpr(DeclRefExpr
*E
) {
362 assert(Expr
== nullptr && "multilple declref in param move");
365 void VisitStmt(Stmt
*S
) {
366 for (auto *C
: S
->children()) {
374 // This class replaces references to parameters to their copies by changing
375 // the addresses in CGF.LocalDeclMap and restoring back the original values in
379 struct ParamReferenceReplacerRAII
{
380 CodeGenFunction::DeclMapTy SavedLocals
;
381 CodeGenFunction::DeclMapTy
& LocalDeclMap
;
383 ParamReferenceReplacerRAII(CodeGenFunction::DeclMapTy
&LocalDeclMap
)
384 : LocalDeclMap(LocalDeclMap
) {}
386 void addCopy(DeclStmt
const *PM
) {
387 // Figure out what param it refers to.
389 assert(PM
->isSingleDecl());
390 VarDecl
const*VD
= static_cast<VarDecl
const*>(PM
->getSingleDecl());
391 Expr
const *InitExpr
= VD
->getInit();
393 Visitor
.Visit(const_cast<Expr
*>(InitExpr
));
394 assert(Visitor
.Expr
);
395 DeclRefExpr
*DREOrig
= Visitor
.Expr
;
396 auto *PD
= DREOrig
->getDecl();
398 auto it
= LocalDeclMap
.find(PD
);
399 assert(it
!= LocalDeclMap
.end() && "parameter is not found");
400 SavedLocals
.insert({ PD
, it
->second
});
402 auto copyIt
= LocalDeclMap
.find(VD
);
403 assert(copyIt
!= LocalDeclMap
.end() && "parameter copy is not found");
404 it
->second
= copyIt
->getSecond();
407 ~ParamReferenceReplacerRAII() {
408 for (auto&& SavedLocal
: SavedLocals
) {
409 LocalDeclMap
.insert({SavedLocal
.first
, SavedLocal
.second
});
415 // For WinEH exception representation backend needs to know what funclet coro.end
416 // belongs to. That information is passed in a funclet bundle.
417 static SmallVector
<llvm::OperandBundleDef
, 1>
418 getBundlesForCoroEnd(CodeGenFunction
&CGF
) {
419 SmallVector
<llvm::OperandBundleDef
, 1> BundleList
;
421 if (llvm::Instruction
*EHPad
= CGF
.CurrentFuncletPad
)
422 BundleList
.emplace_back("funclet", EHPad
);
428 // We will insert coro.end to cut any of the destructors for objects that
429 // do not need to be destroyed once the coroutine is resumed.
430 // See llvm/docs/Coroutines.rst for more details about coro.end.
431 struct CallCoroEnd final
: public EHScopeStack::Cleanup
{
432 void Emit(CodeGenFunction
&CGF
, Flags flags
) override
{
434 auto *NullPtr
= llvm::ConstantPointerNull::get(CGF
.Int8PtrTy
);
435 llvm::Function
*CoroEndFn
= CGM
.getIntrinsic(llvm::Intrinsic::coro_end
);
436 // See if we have a funclet bundle to associate coro.end with. (WinEH)
437 auto Bundles
= getBundlesForCoroEnd(CGF
);
438 auto *CoroEnd
= CGF
.Builder
.CreateCall(
439 CoroEndFn
, {NullPtr
, CGF
.Builder
.getTrue()}, Bundles
);
440 if (Bundles
.empty()) {
441 // Otherwise, (landingpad model), create a conditional branch that leads
442 // either to a cleanup block or a block with EH resume instruction.
443 auto *ResumeBB
= CGF
.getEHResumeBlock(/*isCleanup=*/true);
444 auto *CleanupContBB
= CGF
.createBasicBlock("cleanup.cont");
445 CGF
.Builder
.CreateCondBr(CoroEnd
, ResumeBB
, CleanupContBB
);
446 CGF
.EmitBlock(CleanupContBB
);
453 // Make sure to call coro.delete on scope exit.
454 struct CallCoroDelete final
: public EHScopeStack::Cleanup
{
457 // Emit "if (coro.free(CoroId, CoroBegin)) Deallocate;"
459 // Note: That deallocation will be emitted twice: once for a normal exit and
460 // once for exceptional exit. This usage is safe because Deallocate does not
461 // contain any declarations. The SubStmtBuilder::makeNewAndDeleteExpr()
462 // builds a single call to a deallocation function which is safe to emit
464 void Emit(CodeGenFunction
&CGF
, Flags
) override
{
465 // Remember the current point, as we are going to emit deallocation code
466 // first to get to coro.free instruction that is an argument to a delete
468 BasicBlock
*SaveInsertBlock
= CGF
.Builder
.GetInsertBlock();
470 auto *FreeBB
= CGF
.createBasicBlock("coro.free");
471 CGF
.EmitBlock(FreeBB
);
472 CGF
.EmitStmt(Deallocate
);
474 auto *AfterFreeBB
= CGF
.createBasicBlock("after.coro.free");
475 CGF
.EmitBlock(AfterFreeBB
);
477 // We should have captured coro.free from the emission of deallocate.
478 auto *CoroFree
= CGF
.CurCoro
.Data
->LastCoroFree
;
480 CGF
.CGM
.Error(Deallocate
->getBeginLoc(),
481 "Deallocation expressoin does not refer to coro.free");
485 // Get back to the block we were originally and move coro.free there.
486 auto *InsertPt
= SaveInsertBlock
->getTerminator();
487 CoroFree
->moveBefore(InsertPt
);
488 CGF
.Builder
.SetInsertPoint(InsertPt
);
490 // Add if (auto *mem = coro.free) Deallocate;
491 auto *NullPtr
= llvm::ConstantPointerNull::get(CGF
.Int8PtrTy
);
492 auto *Cond
= CGF
.Builder
.CreateICmpNE(CoroFree
, NullPtr
);
493 CGF
.Builder
.CreateCondBr(Cond
, FreeBB
, AfterFreeBB
);
495 // No longer need old terminator.
496 InsertPt
->eraseFromParent();
497 CGF
.Builder
.SetInsertPoint(AfterFreeBB
);
499 explicit CallCoroDelete(Stmt
*DeallocStmt
) : Deallocate(DeallocStmt
) {}
504 struct GetReturnObjectManager
{
505 CodeGenFunction
&CGF
;
506 CGBuilderTy
&Builder
;
507 const CoroutineBodyStmt
&S
;
508 // When true, performs RVO for the return object.
509 bool DirectEmit
= false;
511 Address GroActiveFlag
;
512 CodeGenFunction::AutoVarEmission GroEmission
;
514 GetReturnObjectManager(CodeGenFunction
&CGF
, const CoroutineBodyStmt
&S
)
515 : CGF(CGF
), Builder(CGF
.Builder
), S(S
), GroActiveFlag(Address::invalid()),
516 GroEmission(CodeGenFunction::AutoVarEmission::invalid()) {
517 // The call to get_Âreturn_Âobject is sequenced before the call to
518 // initial_Âsuspend and is invoked at most once, but there are caveats
519 // regarding on whether the prvalue result object may be initialized
520 // directly/eager or delayed, depending on the types involved.
522 // More info at https://github.com/cplusplus/papers/issues/1414
524 // The general cases:
525 // 1. Same type of get_return_object and coroutine return type (direct
527 // - Constructed in the return slot.
528 // 2. Different types (delayed emission):
529 // - Constructed temporary object prior to initial suspend initialized with
530 // a call to get_return_object()
531 // - When coroutine needs to to return to the caller and needs to construct
532 // return value for the coroutine it is initialized with expiring value of
533 // the temporary obtained above.
535 // Direct emission for void returning coroutines or GROs.
537 auto *RVI
= S
.getReturnValueInit();
538 assert(RVI
&& "expected RVI");
539 auto GroType
= RVI
->getType();
540 return CGF
.getContext().hasSameType(GroType
, CGF
.FnRetTy
);
544 // The gro variable has to outlive coroutine frame and coroutine promise, but,
545 // it can only be initialized after coroutine promise was created, thus, we
546 // split its emission in two parts. EmitGroAlloca emits an alloca and sets up
547 // cleanups. Later when coroutine promise is available we initialize the gro
548 // and sets the flag that the cleanup is now active.
549 void EmitGroAlloca() {
553 auto *GroDeclStmt
= dyn_cast_or_null
<DeclStmt
>(S
.getResultDecl());
555 // If get_return_object returns void, no need to do an alloca.
559 auto *GroVarDecl
= cast
<VarDecl
>(GroDeclStmt
->getSingleDecl());
561 // Set GRO flag that it is not initialized yet
562 GroActiveFlag
= CGF
.CreateTempAlloca(Builder
.getInt1Ty(), CharUnits::One(),
564 Builder
.CreateStore(Builder
.getFalse(), GroActiveFlag
);
566 GroEmission
= CGF
.EmitAutoVarAlloca(*GroVarDecl
);
568 // Remember the top of EHStack before emitting the cleanup.
569 auto old_top
= CGF
.EHStack
.stable_begin();
570 CGF
.EmitAutoVarCleanups(GroEmission
);
571 auto top
= CGF
.EHStack
.stable_begin();
573 // Make the cleanup conditional on gro.active
574 for (auto b
= CGF
.EHStack
.find(top
), e
= CGF
.EHStack
.find(old_top
); b
!= e
;
576 if (auto *Cleanup
= dyn_cast
<EHCleanupScope
>(&*b
)) {
577 assert(!Cleanup
->hasActiveFlag() && "cleanup already has active flag?");
578 Cleanup
->setActiveFlag(GroActiveFlag
);
579 Cleanup
->setTestFlagInEHCleanup();
580 Cleanup
->setTestFlagInNormalCleanup();
587 // ReturnValue should be valid as long as the coroutine's return type
588 // is not void. The assertion could help us to reduce the check later.
589 assert(CGF
.ReturnValue
.isValid() == (bool)S
.getReturnStmt());
590 // Now we have the promise, initialize the GRO.
591 // We need to emit `get_return_object` first. According to:
592 // [dcl.fct.def.coroutine]p7
593 // The call to get_return_Âobject is sequenced before the call to
594 // initial_suspend and is invoked at most once.
596 // So we couldn't emit return value when we emit return statment,
597 // otherwise the call to get_return_object wouldn't be in front
598 // of initial_suspend.
599 if (CGF
.ReturnValue
.isValid()) {
600 CGF
.EmitAnyExprToMem(S
.getReturnValue(), CGF
.ReturnValue
,
601 S
.getReturnValue()->getType().getQualifiers(),
607 if (!GroActiveFlag
.isValid()) {
608 // No Gro variable was allocated. Simply emit the call to
609 // get_return_object.
610 CGF
.EmitStmt(S
.getResultDecl());
614 CGF
.EmitAutoVarInit(GroEmission
);
615 Builder
.CreateStore(Builder
.getTrue(), GroActiveFlag
);
620 static void emitBodyAndFallthrough(CodeGenFunction
&CGF
,
621 const CoroutineBodyStmt
&S
, Stmt
*Body
) {
623 const bool CanFallthrough
= CGF
.Builder
.GetInsertBlock();
625 if (Stmt
*OnFallthrough
= S
.getFallthroughHandler())
626 CGF
.EmitStmt(OnFallthrough
);
629 void CodeGenFunction::EmitCoroutineBody(const CoroutineBodyStmt
&S
) {
630 auto *NullPtr
= llvm::ConstantPointerNull::get(Builder
.getPtrTy());
631 auto &TI
= CGM
.getContext().getTargetInfo();
632 unsigned NewAlign
= TI
.getNewAlign() / TI
.getCharWidth();
634 auto *EntryBB
= Builder
.GetInsertBlock();
635 auto *AllocBB
= createBasicBlock("coro.alloc");
636 auto *InitBB
= createBasicBlock("coro.init");
637 auto *FinalBB
= createBasicBlock("coro.final");
638 auto *RetBB
= createBasicBlock("coro.ret");
640 auto *CoroId
= Builder
.CreateCall(
641 CGM
.getIntrinsic(llvm::Intrinsic::coro_id
),
642 {Builder
.getInt32(NewAlign
), NullPtr
, NullPtr
, NullPtr
});
643 createCoroData(*this, CurCoro
, CoroId
);
644 CurCoro
.Data
->SuspendBB
= RetBB
;
645 assert(ShouldEmitLifetimeMarkers
&&
646 "Must emit lifetime intrinsics for coroutines");
648 // Backend is allowed to elide memory allocations, to help it, emit
649 // auto mem = coro.alloc() ? 0 : ... allocation code ...;
650 auto *CoroAlloc
= Builder
.CreateCall(
651 CGM
.getIntrinsic(llvm::Intrinsic::coro_alloc
), {CoroId
});
653 Builder
.CreateCondBr(CoroAlloc
, AllocBB
, InitBB
);
656 auto *AllocateCall
= EmitScalarExpr(S
.getAllocate());
657 auto *AllocOrInvokeContBB
= Builder
.GetInsertBlock();
659 // Handle allocation failure if 'ReturnStmtOnAllocFailure' was provided.
660 if (auto *RetOnAllocFailure
= S
.getReturnStmtOnAllocFailure()) {
661 auto *RetOnFailureBB
= createBasicBlock("coro.ret.on.failure");
663 // See if allocation was successful.
664 auto *NullPtr
= llvm::ConstantPointerNull::get(Int8PtrTy
);
665 auto *Cond
= Builder
.CreateICmpNE(AllocateCall
, NullPtr
);
666 // Expect the allocation to be successful.
667 emitCondLikelihoodViaExpectIntrinsic(Cond
, Stmt::LH_Likely
);
668 Builder
.CreateCondBr(Cond
, InitBB
, RetOnFailureBB
);
670 // If not, return OnAllocFailure object.
671 EmitBlock(RetOnFailureBB
);
672 EmitStmt(RetOnAllocFailure
);
675 Builder
.CreateBr(InitBB
);
680 // Pass the result of the allocation to coro.begin.
681 auto *Phi
= Builder
.CreatePHI(VoidPtrTy
, 2);
682 Phi
->addIncoming(NullPtr
, EntryBB
);
683 Phi
->addIncoming(AllocateCall
, AllocOrInvokeContBB
);
684 auto *CoroBegin
= Builder
.CreateCall(
685 CGM
.getIntrinsic(llvm::Intrinsic::coro_begin
), {CoroId
, Phi
});
686 CurCoro
.Data
->CoroBegin
= CoroBegin
;
688 GetReturnObjectManager
GroManager(*this, S
);
689 GroManager
.EmitGroAlloca();
691 CurCoro
.Data
->CleanupJD
= getJumpDestInCurrentScope(RetBB
);
693 CGDebugInfo
*DI
= getDebugInfo();
694 ParamReferenceReplacerRAII
ParamReplacer(LocalDeclMap
);
695 CodeGenFunction::RunCleanupsScope
ResumeScope(*this);
696 EHStack
.pushCleanup
<CallCoroDelete
>(NormalAndEHCleanup
, S
.getDeallocate());
698 // Create mapping between parameters and copy-params for coroutine function.
699 llvm::ArrayRef
<const Stmt
*> ParamMoves
= S
.getParamMoves();
701 (ParamMoves
.size() == 0 || (ParamMoves
.size() == FnArgs
.size())) &&
702 "ParamMoves and FnArgs should be the same size for coroutine function");
703 if (ParamMoves
.size() == FnArgs
.size() && DI
)
704 for (const auto Pair
: llvm::zip(FnArgs
, ParamMoves
))
705 DI
->getCoroutineParameterMappings().insert(
706 {std::get
<0>(Pair
), std::get
<1>(Pair
)});
708 // Create parameter copies. We do it before creating a promise, since an
709 // evolution of coroutine TS may allow promise constructor to observe
711 for (auto *PM
: S
.getParamMoves()) {
713 ParamReplacer
.addCopy(cast
<DeclStmt
>(PM
));
714 // TODO: if(CoroParam(...)) need to surround ctor and dtor
715 // for the copy, so that llvm can elide it if the copy is
719 EmitStmt(S
.getPromiseDeclStmt());
721 Address PromiseAddr
= GetAddrOfLocalVar(S
.getPromiseDecl());
722 auto *PromiseAddrVoidPtr
=
723 new llvm::BitCastInst(PromiseAddr
.getPointer(), VoidPtrTy
, "", CoroId
);
724 // Update CoroId to refer to the promise. We could not do it earlier because
725 // promise local variable was not emitted yet.
726 CoroId
->setArgOperand(1, PromiseAddrVoidPtr
);
728 // Now we have the promise, initialize the GRO
729 GroManager
.EmitGroInit();
731 EHStack
.pushCleanup
<CallCoroEnd
>(EHCleanup
);
733 CurCoro
.Data
->CurrentAwaitKind
= AwaitKind::Init
;
734 CurCoro
.Data
->ExceptionHandler
= S
.getExceptionHandler();
735 EmitStmt(S
.getInitSuspendStmt());
736 CurCoro
.Data
->FinalJD
= getJumpDestInCurrentScope(FinalBB
);
738 CurCoro
.Data
->CurrentAwaitKind
= AwaitKind::Normal
;
740 if (CurCoro
.Data
->ExceptionHandler
) {
741 // If we generated IR to record whether an exception was thrown from
742 // 'await_resume', then use that IR to determine whether the coroutine
743 // body should be skipped.
744 // If we didn't generate the IR (perhaps because 'await_resume' was marked
745 // as 'noexcept'), then we skip this check.
746 BasicBlock
*ContBB
= nullptr;
747 if (CurCoro
.Data
->ResumeEHVar
) {
748 BasicBlock
*BodyBB
= createBasicBlock("coro.resumed.body");
749 ContBB
= createBasicBlock("coro.resumed.cont");
750 Value
*SkipBody
= Builder
.CreateFlagLoad(CurCoro
.Data
->ResumeEHVar
,
752 Builder
.CreateCondBr(SkipBody
, ContBB
, BodyBB
);
756 auto Loc
= S
.getBeginLoc();
757 CXXCatchStmt
Catch(Loc
, /*exDecl=*/nullptr,
758 CurCoro
.Data
->ExceptionHandler
);
760 CXXTryStmt::Create(getContext(), Loc
, S
.getBody(), &Catch
);
762 EnterCXXTryStmt(*TryStmt
);
763 emitBodyAndFallthrough(*this, S
, TryStmt
->getTryBlock());
764 ExitCXXTryStmt(*TryStmt
);
770 emitBodyAndFallthrough(*this, S
, S
.getBody());
773 // See if we need to generate final suspend.
774 const bool CanFallthrough
= Builder
.GetInsertBlock();
775 const bool HasCoreturns
= CurCoro
.Data
->CoreturnCount
> 0;
776 if (CanFallthrough
|| HasCoreturns
) {
778 CurCoro
.Data
->CurrentAwaitKind
= AwaitKind::Final
;
779 EmitStmt(S
.getFinalSuspendStmt());
781 // We don't need FinalBB. Emit it to make sure the block is deleted.
782 EmitBlock(FinalBB
, /*IsFinished=*/true);
787 // Emit coro.end before getReturnStmt (and parameter destructors), since
788 // resume and destroy parts of the coroutine should not include them.
789 llvm::Function
*CoroEnd
= CGM
.getIntrinsic(llvm::Intrinsic::coro_end
);
790 Builder
.CreateCall(CoroEnd
, {NullPtr
, Builder
.getFalse()});
792 if (Stmt
*Ret
= S
.getReturnStmt()) {
793 // Since we already emitted the return value above, so we shouldn't
794 // emit it again here.
795 if (GroManager
.DirectEmit
)
796 cast
<ReturnStmt
>(Ret
)->setRetValue(nullptr);
800 // LLVM require the frontend to mark the coroutine.
801 CurFn
->setPresplitCoroutine();
804 // Emit coroutine intrinsic and patch up arguments of the token type.
805 RValue
CodeGenFunction::EmitCoroutineIntrinsic(const CallExpr
*E
,
807 SmallVector
<llvm::Value
*, 8> Args
;
811 // The coro.frame builtin is replaced with an SSA value of the coro.begin
813 case llvm::Intrinsic::coro_frame
: {
814 if (CurCoro
.Data
&& CurCoro
.Data
->CoroBegin
) {
815 return RValue::get(CurCoro
.Data
->CoroBegin
);
817 CGM
.Error(E
->getBeginLoc(), "this builtin expect that __builtin_coro_begin "
818 "has been used earlier in this function");
819 auto *NullPtr
= llvm::ConstantPointerNull::get(Builder
.getPtrTy());
820 return RValue::get(NullPtr
);
822 case llvm::Intrinsic::coro_size
: {
823 auto &Context
= getContext();
824 CanQualType SizeTy
= Context
.getSizeType();
825 llvm::IntegerType
*T
= Builder
.getIntNTy(Context
.getTypeSize(SizeTy
));
826 llvm::Function
*F
= CGM
.getIntrinsic(llvm::Intrinsic::coro_size
, T
);
827 return RValue::get(Builder
.CreateCall(F
));
829 case llvm::Intrinsic::coro_align
: {
830 auto &Context
= getContext();
831 CanQualType SizeTy
= Context
.getSizeType();
832 llvm::IntegerType
*T
= Builder
.getIntNTy(Context
.getTypeSize(SizeTy
));
833 llvm::Function
*F
= CGM
.getIntrinsic(llvm::Intrinsic::coro_align
, T
);
834 return RValue::get(Builder
.CreateCall(F
));
836 // The following three intrinsics take a token parameter referring to a token
837 // returned by earlier call to @llvm.coro.id. Since we cannot represent it in
838 // builtins, we patch it up here.
839 case llvm::Intrinsic::coro_alloc
:
840 case llvm::Intrinsic::coro_begin
:
841 case llvm::Intrinsic::coro_free
: {
842 if (CurCoro
.Data
&& CurCoro
.Data
->CoroId
) {
843 Args
.push_back(CurCoro
.Data
->CoroId
);
846 CGM
.Error(E
->getBeginLoc(), "this builtin expect that __builtin_coro_id has"
847 " been used earlier in this function");
848 // Fallthrough to the next case to add TokenNone as the first argument.
851 // @llvm.coro.suspend takes a token parameter. Add token 'none' as the first
853 case llvm::Intrinsic::coro_suspend
:
854 Args
.push_back(llvm::ConstantTokenNone::get(getLLVMContext()));
857 for (const Expr
*Arg
: E
->arguments())
858 Args
.push_back(EmitScalarExpr(Arg
));
860 llvm::Function
*F
= CGM
.getIntrinsic(IID
);
861 llvm::CallInst
*Call
= Builder
.CreateCall(F
, Args
);
863 // Note: The following code is to enable to emit coro.id and coro.begin by
864 // hand to experiment with coroutines in C.
865 // If we see @llvm.coro.id remember it in the CoroData. We will update
866 // coro.alloc, coro.begin and coro.free intrinsics to refer to it.
867 if (IID
== llvm::Intrinsic::coro_id
) {
868 createCoroData(*this, CurCoro
, Call
, E
);
870 else if (IID
== llvm::Intrinsic::coro_begin
) {
872 CurCoro
.Data
->CoroBegin
= Call
;
874 else if (IID
== llvm::Intrinsic::coro_free
) {
875 // Remember the last coro_free as we need it to build the conditional
876 // deletion of the coroutine frame.
878 CurCoro
.Data
->LastCoroFree
= Call
;
880 return RValue::get(Call
);