1 //===- PreISelIntrinsicLowering.cpp - Pre-ISel intrinsic lowering pass ----===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This pass implements IR lowering for the llvm.memcpy, llvm.memmove,
10 // llvm.memset, llvm.load.relative and llvm.objc.* intrinsics.
12 //===----------------------------------------------------------------------===//
14 #include "llvm/CodeGen/PreISelIntrinsicLowering.h"
15 #include "llvm/Analysis/ObjCARCInstKind.h"
16 #include "llvm/Analysis/ObjCARCUtil.h"
17 #include "llvm/Analysis/TargetTransformInfo.h"
18 #include "llvm/CodeGen/Passes.h"
19 #include "llvm/CodeGen/TargetLowering.h"
20 #include "llvm/CodeGen/TargetPassConfig.h"
21 #include "llvm/IR/Function.h"
22 #include "llvm/IR/IRBuilder.h"
23 #include "llvm/IR/Instructions.h"
24 #include "llvm/IR/IntrinsicInst.h"
25 #include "llvm/IR/Module.h"
26 #include "llvm/IR/Type.h"
27 #include "llvm/InitializePasses.h"
28 #include "llvm/Pass.h"
29 #include "llvm/Support/Casting.h"
30 #include "llvm/Target/TargetMachine.h"
31 #include "llvm/Transforms/Utils/LowerMemIntrinsics.h"
35 /// Threshold to leave statically sized memory intrinsic calls. Calls of known
36 /// size larger than this will be expanded by the pass. Calls of unknown or
37 /// lower size will be left for expansion in codegen.
38 static cl::opt
<int64_t> MemIntrinsicExpandSizeThresholdOpt(
39 "mem-intrinsic-expand-size",
40 cl::desc("Set minimum mem intrinsic size to expand in IR"), cl::init(-1),
45 struct PreISelIntrinsicLowering
{
46 const TargetMachine
&TM
;
47 const function_ref
<TargetTransformInfo
&(Function
&)> LookupTTI
;
49 /// If this is true, assume it's preferably to leave memory intrinsic calls
50 /// for replacement with a library call later. Otherwise this depends on
51 /// TargetLoweringInfo availability of the corresponding function.
52 const bool UseMemIntrinsicLibFunc
;
54 explicit PreISelIntrinsicLowering(
55 const TargetMachine
&TM_
,
56 function_ref
<TargetTransformInfo
&(Function
&)> LookupTTI_
,
57 bool UseMemIntrinsicLibFunc_
= true)
58 : TM(TM_
), LookupTTI(LookupTTI_
),
59 UseMemIntrinsicLibFunc(UseMemIntrinsicLibFunc_
) {}
61 static bool shouldExpandMemIntrinsicWithSize(Value
*Size
,
62 const TargetTransformInfo
&TTI
);
63 bool expandMemIntrinsicUses(Function
&F
) const;
64 bool lowerIntrinsics(Module
&M
) const;
69 static bool lowerLoadRelative(Function
&F
) {
74 Type
*Int32Ty
= Type::getInt32Ty(F
.getContext());
76 for (Use
&U
: llvm::make_early_inc_range(F
.uses())) {
77 auto CI
= dyn_cast
<CallInst
>(U
.getUser());
78 if (!CI
|| CI
->getCalledOperand() != &F
)
83 B
.CreatePtrAdd(CI
->getArgOperand(0), CI
->getArgOperand(1));
84 Value
*OffsetI32
= B
.CreateAlignedLoad(Int32Ty
, OffsetPtr
, Align(4));
86 Value
*ResultPtr
= B
.CreatePtrAdd(CI
->getArgOperand(0), OffsetI32
);
88 CI
->replaceAllUsesWith(ResultPtr
);
89 CI
->eraseFromParent();
96 // ObjCARC has knowledge about whether an obj-c runtime function needs to be
97 // always tail-called or never tail-called.
98 static CallInst::TailCallKind
getOverridingTailCallKind(const Function
&F
) {
99 objcarc::ARCInstKind Kind
= objcarc::GetFunctionClass(&F
);
100 if (objcarc::IsAlwaysTail(Kind
))
101 return CallInst::TCK_Tail
;
102 else if (objcarc::IsNeverTail(Kind
))
103 return CallInst::TCK_NoTail
;
104 return CallInst::TCK_None
;
107 static bool lowerObjCCall(Function
&F
, const char *NewFn
,
108 bool setNonLazyBind
= false) {
109 assert(IntrinsicInst::mayLowerToFunctionCall(F
.getIntrinsicID()) &&
110 "Pre-ISel intrinsics do lower into regular function calls");
114 // If we haven't already looked up this function, check to see if the
115 // program already contains a function with this name.
116 Module
*M
= F
.getParent();
117 FunctionCallee FCache
= M
->getOrInsertFunction(NewFn
, F
.getFunctionType());
119 if (Function
*Fn
= dyn_cast
<Function
>(FCache
.getCallee())) {
120 Fn
->setLinkage(F
.getLinkage());
121 if (setNonLazyBind
&& !Fn
->isWeakForLinker()) {
122 // If we have Native ARC, set nonlazybind attribute for these APIs for
124 Fn
->addFnAttr(Attribute::NonLazyBind
);
128 CallInst::TailCallKind OverridingTCK
= getOverridingTailCallKind(F
);
130 for (Use
&U
: llvm::make_early_inc_range(F
.uses())) {
131 auto *CB
= cast
<CallBase
>(U
.getUser());
133 if (CB
->getCalledFunction() != &F
) {
134 objcarc::ARCInstKind Kind
= objcarc::getAttachedARCFunctionKind(CB
);
136 assert((Kind
== objcarc::ARCInstKind::RetainRV
||
137 Kind
== objcarc::ARCInstKind::UnsafeClaimRV
) &&
138 "use expected to be the argument of operand bundle "
139 "\"clang.arc.attachedcall\"");
140 U
.set(FCache
.getCallee());
144 auto *CI
= cast
<CallInst
>(CB
);
145 assert(CI
->getCalledFunction() && "Cannot lower an indirect call!");
147 IRBuilder
<> Builder(CI
->getParent(), CI
->getIterator());
148 SmallVector
<Value
*, 8> Args(CI
->args());
149 SmallVector
<llvm::OperandBundleDef
, 1> BundleList
;
150 CI
->getOperandBundlesAsDefs(BundleList
);
151 CallInst
*NewCI
= Builder
.CreateCall(FCache
, Args
, BundleList
);
152 NewCI
->setName(CI
->getName());
154 // Try to set the most appropriate TailCallKind based on both the current
155 // attributes and the ones that we could get from ObjCARC's special
156 // knowledge of the runtime functions.
158 // std::max respects both requirements of notail and tail here:
159 // * notail on either the call or from ObjCARC becomes notail
160 // * tail on either side is stronger than none, but not notail
161 CallInst::TailCallKind TCK
= CI
->getTailCallKind();
162 NewCI
->setTailCallKind(std::max(TCK
, OverridingTCK
));
164 // Transfer the 'returned' attribute from the intrinsic to the call site.
165 // By applying this only to intrinsic call sites, we avoid applying it to
166 // non-ARC explicit calls to things like objc_retain which have not been
167 // auto-upgraded to use the intrinsics.
169 if (F
.getAttributes().hasAttrSomewhere(Attribute::Returned
, &Index
) &&
171 NewCI
->addParamAttr(Index
- AttributeList::FirstArgIndex
,
172 Attribute::Returned
);
174 if (!CI
->use_empty())
175 CI
->replaceAllUsesWith(NewCI
);
176 CI
->eraseFromParent();
182 // TODO: Should refine based on estimated number of accesses (e.g. does it
183 // require splitting based on alignment)
184 bool PreISelIntrinsicLowering::shouldExpandMemIntrinsicWithSize(
185 Value
*Size
, const TargetTransformInfo
&TTI
) {
186 ConstantInt
*CI
= dyn_cast
<ConstantInt
>(Size
);
189 uint64_t Threshold
= MemIntrinsicExpandSizeThresholdOpt
.getNumOccurrences()
190 ? MemIntrinsicExpandSizeThresholdOpt
191 : TTI
.getMaxMemIntrinsicInlineSizeThreshold();
192 uint64_t SizeVal
= CI
->getZExtValue();
194 // Treat a threshold of 0 as a special case to force expansion of all
195 // intrinsics, including size 0.
196 return SizeVal
> Threshold
|| Threshold
== 0;
199 static bool canEmitLibcall(const TargetMachine
&TM
, Function
*F
,
201 // TODO: Should this consider the address space of the memcpy?
202 const TargetLowering
*TLI
= TM
.getSubtargetImpl(*F
)->getTargetLowering();
203 return TLI
->getLibcallName(LC
) != nullptr;
206 // TODO: Handle atomic memcpy and memcpy.inline
207 // TODO: Pass ScalarEvolution
208 bool PreISelIntrinsicLowering::expandMemIntrinsicUses(Function
&F
) const {
209 Intrinsic::ID ID
= F
.getIntrinsicID();
210 bool Changed
= false;
212 for (User
*U
: llvm::make_early_inc_range(F
.users())) {
213 Instruction
*Inst
= cast
<Instruction
>(U
);
216 case Intrinsic::memcpy
: {
217 auto *Memcpy
= cast
<MemCpyInst
>(Inst
);
218 Function
*ParentFunc
= Memcpy
->getFunction();
219 const TargetTransformInfo
&TTI
= LookupTTI(*ParentFunc
);
220 if (shouldExpandMemIntrinsicWithSize(Memcpy
->getLength(), TTI
)) {
221 if (UseMemIntrinsicLibFunc
&&
222 canEmitLibcall(TM
, ParentFunc
, RTLIB::MEMCPY
))
225 // TODO: For optsize, emit the loop into a separate function
226 expandMemCpyAsLoop(Memcpy
, TTI
);
228 Memcpy
->eraseFromParent();
233 case Intrinsic::memmove
: {
234 auto *Memmove
= cast
<MemMoveInst
>(Inst
);
235 Function
*ParentFunc
= Memmove
->getFunction();
236 const TargetTransformInfo
&TTI
= LookupTTI(*ParentFunc
);
237 if (shouldExpandMemIntrinsicWithSize(Memmove
->getLength(), TTI
)) {
238 if (UseMemIntrinsicLibFunc
&&
239 canEmitLibcall(TM
, ParentFunc
, RTLIB::MEMMOVE
))
242 if (expandMemMoveAsLoop(Memmove
, TTI
)) {
244 Memmove
->eraseFromParent();
250 case Intrinsic::memset
: {
251 auto *Memset
= cast
<MemSetInst
>(Inst
);
252 Function
*ParentFunc
= Memset
->getFunction();
253 const TargetTransformInfo
&TTI
= LookupTTI(*ParentFunc
);
254 if (shouldExpandMemIntrinsicWithSize(Memset
->getLength(), TTI
)) {
255 if (UseMemIntrinsicLibFunc
&&
256 canEmitLibcall(TM
, ParentFunc
, RTLIB::MEMSET
))
259 expandMemSetAsLoop(Memset
);
261 Memset
->eraseFromParent();
267 llvm_unreachable("unhandled intrinsic");
274 bool PreISelIntrinsicLowering::lowerIntrinsics(Module
&M
) const {
275 bool Changed
= false;
276 for (Function
&F
: M
) {
277 switch (F
.getIntrinsicID()) {
280 case Intrinsic::memcpy
:
281 case Intrinsic::memmove
:
282 case Intrinsic::memset
:
283 Changed
|= expandMemIntrinsicUses(F
);
285 case Intrinsic::load_relative
:
286 Changed
|= lowerLoadRelative(F
);
288 case Intrinsic::objc_autorelease
:
289 Changed
|= lowerObjCCall(F
, "objc_autorelease");
291 case Intrinsic::objc_autoreleasePoolPop
:
292 Changed
|= lowerObjCCall(F
, "objc_autoreleasePoolPop");
294 case Intrinsic::objc_autoreleasePoolPush
:
295 Changed
|= lowerObjCCall(F
, "objc_autoreleasePoolPush");
297 case Intrinsic::objc_autoreleaseReturnValue
:
298 Changed
|= lowerObjCCall(F
, "objc_autoreleaseReturnValue");
300 case Intrinsic::objc_copyWeak
:
301 Changed
|= lowerObjCCall(F
, "objc_copyWeak");
303 case Intrinsic::objc_destroyWeak
:
304 Changed
|= lowerObjCCall(F
, "objc_destroyWeak");
306 case Intrinsic::objc_initWeak
:
307 Changed
|= lowerObjCCall(F
, "objc_initWeak");
309 case Intrinsic::objc_loadWeak
:
310 Changed
|= lowerObjCCall(F
, "objc_loadWeak");
312 case Intrinsic::objc_loadWeakRetained
:
313 Changed
|= lowerObjCCall(F
, "objc_loadWeakRetained");
315 case Intrinsic::objc_moveWeak
:
316 Changed
|= lowerObjCCall(F
, "objc_moveWeak");
318 case Intrinsic::objc_release
:
319 Changed
|= lowerObjCCall(F
, "objc_release", true);
321 case Intrinsic::objc_retain
:
322 Changed
|= lowerObjCCall(F
, "objc_retain", true);
324 case Intrinsic::objc_retainAutorelease
:
325 Changed
|= lowerObjCCall(F
, "objc_retainAutorelease");
327 case Intrinsic::objc_retainAutoreleaseReturnValue
:
328 Changed
|= lowerObjCCall(F
, "objc_retainAutoreleaseReturnValue");
330 case Intrinsic::objc_retainAutoreleasedReturnValue
:
331 Changed
|= lowerObjCCall(F
, "objc_retainAutoreleasedReturnValue");
333 case Intrinsic::objc_retainBlock
:
334 Changed
|= lowerObjCCall(F
, "objc_retainBlock");
336 case Intrinsic::objc_storeStrong
:
337 Changed
|= lowerObjCCall(F
, "objc_storeStrong");
339 case Intrinsic::objc_storeWeak
:
340 Changed
|= lowerObjCCall(F
, "objc_storeWeak");
342 case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue
:
343 Changed
|= lowerObjCCall(F
, "objc_unsafeClaimAutoreleasedReturnValue");
345 case Intrinsic::objc_retainedObject
:
346 Changed
|= lowerObjCCall(F
, "objc_retainedObject");
348 case Intrinsic::objc_unretainedObject
:
349 Changed
|= lowerObjCCall(F
, "objc_unretainedObject");
351 case Intrinsic::objc_unretainedPointer
:
352 Changed
|= lowerObjCCall(F
, "objc_unretainedPointer");
354 case Intrinsic::objc_retain_autorelease
:
355 Changed
|= lowerObjCCall(F
, "objc_retain_autorelease");
357 case Intrinsic::objc_sync_enter
:
358 Changed
|= lowerObjCCall(F
, "objc_sync_enter");
360 case Intrinsic::objc_sync_exit
:
361 Changed
|= lowerObjCCall(F
, "objc_sync_exit");
370 class PreISelIntrinsicLoweringLegacyPass
: public ModulePass
{
374 PreISelIntrinsicLoweringLegacyPass() : ModulePass(ID
) {}
376 void getAnalysisUsage(AnalysisUsage
&AU
) const override
{
377 AU
.addRequired
<TargetTransformInfoWrapperPass
>();
378 AU
.addRequired
<TargetPassConfig
>();
381 bool runOnModule(Module
&M
) override
{
382 auto LookupTTI
= [this](Function
&F
) -> TargetTransformInfo
& {
383 return this->getAnalysis
<TargetTransformInfoWrapperPass
>().getTTI(F
);
386 const auto &TM
= getAnalysis
<TargetPassConfig
>().getTM
<TargetMachine
>();
387 PreISelIntrinsicLowering
Lowering(TM
, LookupTTI
);
388 return Lowering
.lowerIntrinsics(M
);
392 } // end anonymous namespace
394 char PreISelIntrinsicLoweringLegacyPass::ID
;
396 INITIALIZE_PASS_BEGIN(PreISelIntrinsicLoweringLegacyPass
,
397 "pre-isel-intrinsic-lowering",
398 "Pre-ISel Intrinsic Lowering", false, false)
399 INITIALIZE_PASS_DEPENDENCY(TargetPassConfig
)
400 INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass
)
401 INITIALIZE_PASS_END(PreISelIntrinsicLoweringLegacyPass
,
402 "pre-isel-intrinsic-lowering",
403 "Pre-ISel Intrinsic Lowering", false, false)
405 ModulePass
*llvm::createPreISelIntrinsicLoweringPass() {
406 return new PreISelIntrinsicLoweringLegacyPass();
409 PreservedAnalyses
PreISelIntrinsicLoweringPass::run(Module
&M
,
410 ModuleAnalysisManager
&AM
) {
411 auto &FAM
= AM
.getResult
<FunctionAnalysisManagerModuleProxy
>(M
).getManager();
413 auto LookupTTI
= [&FAM
](Function
&F
) -> TargetTransformInfo
& {
414 return FAM
.getResult
<TargetIRAnalysis
>(F
);
417 PreISelIntrinsicLowering
Lowering(TM
, LookupTTI
);
418 if (!Lowering
.lowerIntrinsics(M
))
419 return PreservedAnalyses::all();
421 return PreservedAnalyses::none();