[ORC] Add std::tuple support to SimplePackedSerialization.
[llvm-project.git] / llvm / lib / Transforms / Instrumentation / InstrProfiling.cpp
blobd2620fd593e15547858424b638881acea77977f3
1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling.
10 // It also builds the data structures and initialization code needed for
11 // updating execution counts and emitting the profile at runtime.
13 //===----------------------------------------------------------------------===//
15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h"
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/SmallVector.h"
18 #include "llvm/ADT/StringRef.h"
19 #include "llvm/ADT/Triple.h"
20 #include "llvm/ADT/Twine.h"
21 #include "llvm/Analysis/BlockFrequencyInfo.h"
22 #include "llvm/Analysis/BranchProbabilityInfo.h"
23 #include "llvm/Analysis/LoopInfo.h"
24 #include "llvm/Analysis/TargetLibraryInfo.h"
25 #include "llvm/IR/Attributes.h"
26 #include "llvm/IR/BasicBlock.h"
27 #include "llvm/IR/Constant.h"
28 #include "llvm/IR/Constants.h"
29 #include "llvm/IR/DerivedTypes.h"
30 #include "llvm/IR/Dominators.h"
31 #include "llvm/IR/Function.h"
32 #include "llvm/IR/GlobalValue.h"
33 #include "llvm/IR/GlobalVariable.h"
34 #include "llvm/IR/IRBuilder.h"
35 #include "llvm/IR/Instruction.h"
36 #include "llvm/IR/Instructions.h"
37 #include "llvm/IR/IntrinsicInst.h"
38 #include "llvm/IR/Module.h"
39 #include "llvm/IR/Type.h"
40 #include "llvm/InitializePasses.h"
41 #include "llvm/Pass.h"
42 #include "llvm/ProfileData/InstrProf.h"
43 #include "llvm/Support/Casting.h"
44 #include "llvm/Support/CommandLine.h"
45 #include "llvm/Support/Error.h"
46 #include "llvm/Support/ErrorHandling.h"
47 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
48 #include "llvm/Transforms/Utils/ModuleUtils.h"
49 #include "llvm/Transforms/Utils/SSAUpdater.h"
50 #include <algorithm>
51 #include <cassert>
52 #include <cstddef>
53 #include <cstdint>
54 #include <string>
56 using namespace llvm;
58 #define DEBUG_TYPE "instrprof"
60 namespace {
62 cl::opt<bool> DoHashBasedCounterSplit(
63 "hash-based-counter-split",
64 cl::desc("Rename counter variable of a comdat function based on cfg hash"),
65 cl::init(true));
67 cl::opt<bool> RuntimeCounterRelocation(
68 "runtime-counter-relocation",
69 cl::desc("Enable relocating counters at runtime."),
70 cl::init(false));
72 cl::opt<bool> ValueProfileStaticAlloc(
73 "vp-static-alloc",
74 cl::desc("Do static counter allocation for value profiler"),
75 cl::init(true));
77 cl::opt<double> NumCountersPerValueSite(
78 "vp-counters-per-site",
79 cl::desc("The average number of profile counters allocated "
80 "per value profiling site."),
81 // This is set to a very small value because in real programs, only
82 // a very small percentage of value sites have non-zero targets, e.g, 1/30.
83 // For those sites with non-zero profile, the average number of targets
84 // is usually smaller than 2.
85 cl::init(1.0));
87 cl::opt<bool> AtomicCounterUpdateAll(
88 "instrprof-atomic-counter-update-all", cl::ZeroOrMore,
89 cl::desc("Make all profile counter updates atomic (for testing only)"),
90 cl::init(false));
92 cl::opt<bool> AtomicCounterUpdatePromoted(
93 "atomic-counter-update-promoted", cl::ZeroOrMore,
94 cl::desc("Do counter update using atomic fetch add "
95 " for promoted counters only"),
96 cl::init(false));
98 cl::opt<bool> AtomicFirstCounter(
99 "atomic-first-counter", cl::ZeroOrMore,
100 cl::desc("Use atomic fetch add for first counter in a function (usually "
101 "the entry counter)"),
102 cl::init(false));
104 // If the option is not specified, the default behavior about whether
105 // counter promotion is done depends on how instrumentaiton lowering
106 // pipeline is setup, i.e., the default value of true of this option
107 // does not mean the promotion will be done by default. Explicitly
108 // setting this option can override the default behavior.
109 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore,
110 cl::desc("Do counter register promotion"),
111 cl::init(false));
112 cl::opt<unsigned> MaxNumOfPromotionsPerLoop(
113 cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20),
114 cl::desc("Max number counter promotions per loop to avoid"
115 " increasing register pressure too much"));
117 // A debug option
118 cl::opt<int>
119 MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1),
120 cl::desc("Max number of allowed counter promotions"));
122 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting(
123 cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3),
124 cl::desc("The max number of exiting blocks of a loop to allow "
125 " speculative counter promotion"));
127 cl::opt<bool> SpeculativeCounterPromotionToLoop(
128 cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false),
129 cl::desc("When the option is false, if the target block is in a loop, "
130 "the promotion will be disallowed unless the promoted counter "
131 " update can be further/iteratively promoted into an acyclic "
132 " region."));
134 cl::opt<bool> IterativeCounterPromotion(
135 cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true),
136 cl::desc("Allow counter promotion across the whole loop nest."));
138 cl::opt<bool> SkipRetExitBlock(
139 cl::ZeroOrMore, "skip-ret-exit-block", cl::init(true),
140 cl::desc("Suppress counter promotion if exit blocks contain ret."));
142 class InstrProfilingLegacyPass : public ModulePass {
143 InstrProfiling InstrProf;
145 public:
146 static char ID;
148 InstrProfilingLegacyPass() : ModulePass(ID) {}
149 InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false)
150 : ModulePass(ID), InstrProf(Options, IsCS) {
151 initializeInstrProfilingLegacyPassPass(*PassRegistry::getPassRegistry());
154 StringRef getPassName() const override {
155 return "Frontend instrumentation-based coverage lowering";
158 bool runOnModule(Module &M) override {
159 auto GetTLI = [this](Function &F) -> TargetLibraryInfo & {
160 return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
162 return InstrProf.run(M, GetTLI);
165 void getAnalysisUsage(AnalysisUsage &AU) const override {
166 AU.setPreservesCFG();
167 AU.addRequired<TargetLibraryInfoWrapperPass>();
172 /// A helper class to promote one counter RMW operation in the loop
173 /// into register update.
175 /// RWM update for the counter will be sinked out of the loop after
176 /// the transformation.
178 class PGOCounterPromoterHelper : public LoadAndStorePromoter {
179 public:
180 PGOCounterPromoterHelper(
181 Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init,
182 BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks,
183 ArrayRef<Instruction *> InsertPts,
184 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
185 LoopInfo &LI)
186 : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks),
187 InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) {
188 assert(isa<LoadInst>(L));
189 assert(isa<StoreInst>(S));
190 SSA.AddAvailableValue(PH, Init);
193 void doExtraRewritesBeforeFinalDeletion() override {
194 for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) {
195 BasicBlock *ExitBlock = ExitBlocks[i];
196 Instruction *InsertPos = InsertPts[i];
197 // Get LiveIn value into the ExitBlock. If there are multiple
198 // predecessors, the value is defined by a PHI node in this
199 // block.
200 Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock);
201 Value *Addr = cast<StoreInst>(Store)->getPointerOperand();
202 Type *Ty = LiveInValue->getType();
203 IRBuilder<> Builder(InsertPos);
204 if (AtomicCounterUpdatePromoted)
205 // automic update currently can only be promoted across the current
206 // loop, not the whole loop nest.
207 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue,
208 MaybeAlign(),
209 AtomicOrdering::SequentiallyConsistent);
210 else {
211 LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted");
212 auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue);
213 auto *NewStore = Builder.CreateStore(NewVal, Addr);
215 // Now update the parent loop's candidate list:
216 if (IterativeCounterPromotion) {
217 auto *TargetLoop = LI.getLoopFor(ExitBlock);
218 if (TargetLoop)
219 LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore);
225 private:
226 Instruction *Store;
227 ArrayRef<BasicBlock *> ExitBlocks;
228 ArrayRef<Instruction *> InsertPts;
229 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
230 LoopInfo &LI;
233 /// A helper class to do register promotion for all profile counter
234 /// updates in a loop.
236 class PGOCounterPromoter {
237 public:
238 PGOCounterPromoter(
239 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
240 Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI)
241 : LoopToCandidates(LoopToCands), ExitBlocks(), InsertPts(), L(CurLoop),
242 LI(LI), BFI(BFI) {
244 // Skip collection of ExitBlocks and InsertPts for loops that will not be
245 // able to have counters promoted.
246 SmallVector<BasicBlock *, 8> LoopExitBlocks;
247 SmallPtrSet<BasicBlock *, 8> BlockSet;
249 L.getExitBlocks(LoopExitBlocks);
250 if (!isPromotionPossible(&L, LoopExitBlocks))
251 return;
253 for (BasicBlock *ExitBlock : LoopExitBlocks) {
254 if (BlockSet.insert(ExitBlock).second) {
255 ExitBlocks.push_back(ExitBlock);
256 InsertPts.push_back(&*ExitBlock->getFirstInsertionPt());
261 bool run(int64_t *NumPromoted) {
262 // Skip 'infinite' loops:
263 if (ExitBlocks.size() == 0)
264 return false;
266 // Skip if any of the ExitBlocks contains a ret instruction.
267 // This is to prevent dumping of incomplete profile -- if the
268 // the loop is a long running loop and dump is called in the middle
269 // of the loop, the result profile is incomplete.
270 // FIXME: add other heuristics to detect long running loops.
271 if (SkipRetExitBlock) {
272 for (auto BB : ExitBlocks)
273 if (isa<ReturnInst>(BB->getTerminator()))
274 return false;
277 unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L);
278 if (MaxProm == 0)
279 return false;
281 unsigned Promoted = 0;
282 for (auto &Cand : LoopToCandidates[&L]) {
284 SmallVector<PHINode *, 4> NewPHIs;
285 SSAUpdater SSA(&NewPHIs);
286 Value *InitVal = ConstantInt::get(Cand.first->getType(), 0);
288 // If BFI is set, we will use it to guide the promotions.
289 if (BFI) {
290 auto *BB = Cand.first->getParent();
291 auto InstrCount = BFI->getBlockProfileCount(BB);
292 if (!InstrCount)
293 continue;
294 auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader());
295 // If the average loop trip count is not greater than 1.5, we skip
296 // promotion.
297 if (PreheaderCount &&
298 (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2))
299 continue;
302 PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal,
303 L.getLoopPreheader(), ExitBlocks,
304 InsertPts, LoopToCandidates, LI);
305 Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second}));
306 Promoted++;
307 if (Promoted >= MaxProm)
308 break;
310 (*NumPromoted)++;
311 if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions)
312 break;
315 LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth="
316 << L.getLoopDepth() << ")\n");
317 return Promoted != 0;
320 private:
321 bool allowSpeculativeCounterPromotion(Loop *LP) {
322 SmallVector<BasicBlock *, 8> ExitingBlocks;
323 L.getExitingBlocks(ExitingBlocks);
324 // Not considierered speculative.
325 if (ExitingBlocks.size() == 1)
326 return true;
327 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
328 return false;
329 return true;
332 // Check whether the loop satisfies the basic conditions needed to perform
333 // Counter Promotions.
334 bool isPromotionPossible(Loop *LP,
335 const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) {
336 // We can't insert into a catchswitch.
337 if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) {
338 return isa<CatchSwitchInst>(Exit->getTerminator());
340 return false;
342 if (!LP->hasDedicatedExits())
343 return false;
345 BasicBlock *PH = LP->getLoopPreheader();
346 if (!PH)
347 return false;
349 return true;
352 // Returns the max number of Counter Promotions for LP.
353 unsigned getMaxNumOfPromotionsInLoop(Loop *LP) {
354 SmallVector<BasicBlock *, 8> LoopExitBlocks;
355 LP->getExitBlocks(LoopExitBlocks);
356 if (!isPromotionPossible(LP, LoopExitBlocks))
357 return 0;
359 SmallVector<BasicBlock *, 8> ExitingBlocks;
360 LP->getExitingBlocks(ExitingBlocks);
362 // If BFI is set, we do more aggressive promotions based on BFI.
363 if (BFI)
364 return (unsigned)-1;
366 // Not considierered speculative.
367 if (ExitingBlocks.size() == 1)
368 return MaxNumOfPromotionsPerLoop;
370 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
371 return 0;
373 // Whether the target block is in a loop does not matter:
374 if (SpeculativeCounterPromotionToLoop)
375 return MaxNumOfPromotionsPerLoop;
377 // Now check the target block:
378 unsigned MaxProm = MaxNumOfPromotionsPerLoop;
379 for (auto *TargetBlock : LoopExitBlocks) {
380 auto *TargetLoop = LI.getLoopFor(TargetBlock);
381 if (!TargetLoop)
382 continue;
383 unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop);
384 unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size();
385 MaxProm =
386 std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) -
387 PendingCandsInTarget);
389 return MaxProm;
392 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
393 SmallVector<BasicBlock *, 8> ExitBlocks;
394 SmallVector<Instruction *, 8> InsertPts;
395 Loop &L;
396 LoopInfo &LI;
397 BlockFrequencyInfo *BFI;
400 enum class ValueProfilingCallType {
401 // Individual values are tracked. Currently used for indiret call target
402 // profiling.
403 Default,
405 // MemOp: the memop size value profiling.
406 MemOp
409 } // end anonymous namespace
411 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) {
412 FunctionAnalysisManager &FAM =
413 AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
414 auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
415 return FAM.getResult<TargetLibraryAnalysis>(F);
417 if (!run(M, GetTLI))
418 return PreservedAnalyses::all();
420 return PreservedAnalyses::none();
423 char InstrProfilingLegacyPass::ID = 0;
424 INITIALIZE_PASS_BEGIN(
425 InstrProfilingLegacyPass, "instrprof",
426 "Frontend instrumentation-based coverage lowering.", false, false)
427 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
428 INITIALIZE_PASS_END(
429 InstrProfilingLegacyPass, "instrprof",
430 "Frontend instrumentation-based coverage lowering.", false, false)
432 ModulePass *
433 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options,
434 bool IsCS) {
435 return new InstrProfilingLegacyPass(Options, IsCS);
438 static InstrProfIncrementInst *castToIncrementInst(Instruction *Instr) {
439 InstrProfIncrementInst *Inc = dyn_cast<InstrProfIncrementInstStep>(Instr);
440 if (Inc)
441 return Inc;
442 return dyn_cast<InstrProfIncrementInst>(Instr);
445 bool InstrProfiling::lowerIntrinsics(Function *F) {
446 bool MadeChange = false;
447 PromotionCandidates.clear();
448 for (BasicBlock &BB : *F) {
449 for (auto I = BB.begin(), E = BB.end(); I != E;) {
450 auto Instr = I++;
451 InstrProfIncrementInst *Inc = castToIncrementInst(&*Instr);
452 if (Inc) {
453 lowerIncrement(Inc);
454 MadeChange = true;
455 } else if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(Instr)) {
456 lowerValueProfileInst(Ind);
457 MadeChange = true;
462 if (!MadeChange)
463 return false;
465 promoteCounterLoadStores(F);
466 return true;
469 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const {
470 // Mach-O don't support weak external references.
471 if (TT.isOSBinFormatMachO())
472 return false;
474 if (RuntimeCounterRelocation.getNumOccurrences() > 0)
475 return RuntimeCounterRelocation;
477 // Fuchsia uses runtime counter relocation by default.
478 return TT.isOSFuchsia();
481 bool InstrProfiling::isCounterPromotionEnabled() const {
482 if (DoCounterPromotion.getNumOccurrences() > 0)
483 return DoCounterPromotion;
485 return Options.DoCounterPromotion;
488 void InstrProfiling::promoteCounterLoadStores(Function *F) {
489 if (!isCounterPromotionEnabled())
490 return;
492 DominatorTree DT(*F);
493 LoopInfo LI(DT);
494 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates;
496 std::unique_ptr<BlockFrequencyInfo> BFI;
497 if (Options.UseBFIInPromotion) {
498 std::unique_ptr<BranchProbabilityInfo> BPI;
499 BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F)));
500 BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI));
503 for (const auto &LoadStore : PromotionCandidates) {
504 auto *CounterLoad = LoadStore.first;
505 auto *CounterStore = LoadStore.second;
506 BasicBlock *BB = CounterLoad->getParent();
507 Loop *ParentLoop = LI.getLoopFor(BB);
508 if (!ParentLoop)
509 continue;
510 LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore);
513 SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder();
515 // Do a post-order traversal of the loops so that counter updates can be
516 // iteratively hoisted outside the loop nest.
517 for (auto *Loop : llvm::reverse(Loops)) {
518 PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get());
519 Promoter.run(&TotalCountersPromoted);
523 static bool needsRuntimeHookUnconditionally(const Triple &TT) {
524 // On Fuchsia, we only need runtime hook if any counters are present.
525 if (TT.isOSFuchsia())
526 return false;
528 return true;
531 /// Check if the module contains uses of any profiling intrinsics.
532 static bool containsProfilingIntrinsics(Module &M) {
533 if (auto *F = M.getFunction(
534 Intrinsic::getName(llvm::Intrinsic::instrprof_increment)))
535 if (!F->use_empty())
536 return true;
537 if (auto *F = M.getFunction(
538 Intrinsic::getName(llvm::Intrinsic::instrprof_increment_step)))
539 if (!F->use_empty())
540 return true;
541 if (auto *F = M.getFunction(
542 Intrinsic::getName(llvm::Intrinsic::instrprof_value_profile)))
543 if (!F->use_empty())
544 return true;
545 return false;
548 bool InstrProfiling::run(
549 Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) {
550 this->M = &M;
551 this->GetTLI = std::move(GetTLI);
552 NamesVar = nullptr;
553 NamesSize = 0;
554 ProfileDataMap.clear();
555 CompilerUsedVars.clear();
556 UsedVars.clear();
557 TT = Triple(M.getTargetTriple());
559 bool MadeChange = false;
561 // Emit the runtime hook even if no counters are present.
562 if (needsRuntimeHookUnconditionally(TT))
563 MadeChange = emitRuntimeHook();
565 // Improve compile time by avoiding linear scans when there is no work.
566 GlobalVariable *CoverageNamesVar =
567 M.getNamedGlobal(getCoverageUnusedNamesVarName());
568 if (!containsProfilingIntrinsics(M) && !CoverageNamesVar)
569 return MadeChange;
571 // We did not know how many value sites there would be inside
572 // the instrumented function. This is counting the number of instrumented
573 // target value sites to enter it as field in the profile data variable.
574 for (Function &F : M) {
575 InstrProfIncrementInst *FirstProfIncInst = nullptr;
576 for (BasicBlock &BB : F)
577 for (auto I = BB.begin(), E = BB.end(); I != E; I++)
578 if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I))
579 computeNumValueSiteCounts(Ind);
580 else if (FirstProfIncInst == nullptr)
581 FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I);
583 // Value profiling intrinsic lowering requires per-function profile data
584 // variable to be created first.
585 if (FirstProfIncInst != nullptr)
586 static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst));
589 for (Function &F : M)
590 MadeChange |= lowerIntrinsics(&F);
592 if (CoverageNamesVar) {
593 lowerCoverageData(CoverageNamesVar);
594 MadeChange = true;
597 if (!MadeChange)
598 return false;
600 emitVNodes();
601 emitNameData();
602 emitRuntimeHook();
603 emitRegistration();
604 emitUses();
605 emitInitialization();
606 return true;
609 static FunctionCallee getOrInsertValueProfilingCall(
610 Module &M, const TargetLibraryInfo &TLI,
611 ValueProfilingCallType CallType = ValueProfilingCallType::Default) {
612 LLVMContext &Ctx = M.getContext();
613 auto *ReturnTy = Type::getVoidTy(M.getContext());
615 AttributeList AL;
616 if (auto AK = TLI.getExtAttrForI32Param(false))
617 AL = AL.addParamAttribute(M.getContext(), 2, AK);
619 assert((CallType == ValueProfilingCallType::Default ||
620 CallType == ValueProfilingCallType::MemOp) &&
621 "Must be Default or MemOp");
622 Type *ParamTypes[] = {
623 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType
624 #include "llvm/ProfileData/InstrProfData.inc"
626 auto *ValueProfilingCallTy =
627 FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false);
628 StringRef FuncName = CallType == ValueProfilingCallType::Default
629 ? getInstrProfValueProfFuncName()
630 : getInstrProfValueProfMemOpFuncName();
631 return M.getOrInsertFunction(FuncName, ValueProfilingCallTy, AL);
634 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) {
635 GlobalVariable *Name = Ind->getName();
636 uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
637 uint64_t Index = Ind->getIndex()->getZExtValue();
638 auto It = ProfileDataMap.find(Name);
639 if (It == ProfileDataMap.end()) {
640 PerFunctionProfileData PD;
641 PD.NumValueSites[ValueKind] = Index + 1;
642 ProfileDataMap[Name] = PD;
643 } else if (It->second.NumValueSites[ValueKind] <= Index)
644 It->second.NumValueSites[ValueKind] = Index + 1;
647 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) {
648 GlobalVariable *Name = Ind->getName();
649 auto It = ProfileDataMap.find(Name);
650 assert(It != ProfileDataMap.end() && It->second.DataVar &&
651 "value profiling detected in function with no counter incerement");
653 GlobalVariable *DataVar = It->second.DataVar;
654 uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
655 uint64_t Index = Ind->getIndex()->getZExtValue();
656 for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind)
657 Index += It->second.NumValueSites[Kind];
659 IRBuilder<> Builder(Ind);
660 bool IsMemOpSize = (Ind->getValueKind()->getZExtValue() ==
661 llvm::InstrProfValueKind::IPVK_MemOPSize);
662 CallInst *Call = nullptr;
663 auto *TLI = &GetTLI(*Ind->getFunction());
665 // To support value profiling calls within Windows exception handlers, funclet
666 // information contained within operand bundles needs to be copied over to
667 // the library call. This is required for the IR to be processed by the
668 // WinEHPrepare pass.
669 SmallVector<OperandBundleDef, 1> OpBundles;
670 Ind->getOperandBundlesAsDefs(OpBundles);
671 if (!IsMemOpSize) {
672 Value *Args[3] = {Ind->getTargetValue(),
673 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
674 Builder.getInt32(Index)};
675 Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args,
676 OpBundles);
677 } else {
678 Value *Args[3] = {Ind->getTargetValue(),
679 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
680 Builder.getInt32(Index)};
681 Call = Builder.CreateCall(
682 getOrInsertValueProfilingCall(*M, *TLI, ValueProfilingCallType::MemOp),
683 Args, OpBundles);
685 if (auto AK = TLI->getExtAttrForI32Param(false))
686 Call->addParamAttr(2, AK);
687 Ind->replaceAllUsesWith(Call);
688 Ind->eraseFromParent();
691 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) {
692 GlobalVariable *Counters = getOrCreateRegionCounters(Inc);
694 IRBuilder<> Builder(Inc);
695 uint64_t Index = Inc->getIndex()->getZExtValue();
696 Value *Addr = Builder.CreateConstInBoundsGEP2_64(Counters->getValueType(),
697 Counters, 0, Index);
699 if (isRuntimeCounterRelocationEnabled()) {
700 Type *Int64Ty = Type::getInt64Ty(M->getContext());
701 Type *Int64PtrTy = Type::getInt64PtrTy(M->getContext());
702 Function *Fn = Inc->getParent()->getParent();
703 Instruction &I = Fn->getEntryBlock().front();
704 LoadInst *LI = dyn_cast<LoadInst>(&I);
705 if (!LI) {
706 IRBuilder<> Builder(&I);
707 GlobalVariable *Bias = M->getGlobalVariable(getInstrProfCounterBiasVarName());
708 if (!Bias) {
709 // Compiler must define this variable when runtime counter relocation
710 // is being used. Runtime has a weak external reference that is used
711 // to check whether that's the case or not.
712 Bias = new GlobalVariable(*M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage,
713 Constant::getNullValue(Int64Ty),
714 getInstrProfCounterBiasVarName());
715 Bias->setVisibility(GlobalVariable::HiddenVisibility);
716 // A definition that's weak (linkonce_odr) without being in a COMDAT
717 // section wouldn't lead to link errors, but it would lead to a dead
718 // data word from every TU but one. Putting it in COMDAT ensures there
719 // will be exactly one data slot in the link.
720 if (TT.supportsCOMDAT())
721 Bias->setComdat(M->getOrInsertComdat(Bias->getName()));
723 LI = Builder.CreateLoad(Int64Ty, Bias);
725 auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), LI);
726 Addr = Builder.CreateIntToPtr(Add, Int64PtrTy);
729 if (Options.Atomic || AtomicCounterUpdateAll ||
730 (Index == 0 && AtomicFirstCounter)) {
731 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(),
732 MaybeAlign(), AtomicOrdering::Monotonic);
733 } else {
734 Value *IncStep = Inc->getStep();
735 Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount");
736 auto *Count = Builder.CreateAdd(Load, Inc->getStep());
737 auto *Store = Builder.CreateStore(Count, Addr);
738 if (isCounterPromotionEnabled())
739 PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
741 Inc->eraseFromParent();
744 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) {
745 ConstantArray *Names =
746 cast<ConstantArray>(CoverageNamesVar->getInitializer());
747 for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) {
748 Constant *NC = Names->getOperand(I);
749 Value *V = NC->stripPointerCasts();
750 assert(isa<GlobalVariable>(V) && "Missing reference to function name");
751 GlobalVariable *Name = cast<GlobalVariable>(V);
753 Name->setLinkage(GlobalValue::PrivateLinkage);
754 ReferencedNames.push_back(Name);
755 NC->dropAllReferences();
757 CoverageNamesVar->eraseFromParent();
760 /// Get the name of a profiling variable for a particular function.
761 static std::string getVarName(InstrProfIncrementInst *Inc, StringRef Prefix) {
762 StringRef NamePrefix = getInstrProfNameVarPrefix();
763 StringRef Name = Inc->getName()->getName().substr(NamePrefix.size());
764 Function *F = Inc->getParent()->getParent();
765 Module *M = F->getParent();
766 if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) ||
767 !canRenameComdatFunc(*F))
768 return (Prefix + Name).str();
769 uint64_t FuncHash = Inc->getHash()->getZExtValue();
770 SmallVector<char, 24> HashPostfix;
771 if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix)))
772 return (Prefix + Name).str();
773 return (Prefix + Name + "." + Twine(FuncHash)).str();
776 static uint64_t getIntModuleFlagOrZero(const Module &M, StringRef Flag) {
777 auto *MD = dyn_cast_or_null<ConstantAsMetadata>(M.getModuleFlag(Flag));
778 if (!MD)
779 return 0;
781 // If the flag is a ConstantAsMetadata, it should be an integer representable
782 // in 64-bits.
783 return cast<ConstantInt>(MD->getValue())->getZExtValue();
786 static bool enablesValueProfiling(const Module &M) {
787 return isIRPGOFlagSet(&M) ||
788 getIntModuleFlagOrZero(M, "EnableValueProfiling") != 0;
791 // Conservatively returns true if data variables may be referenced by code.
792 static bool profDataReferencedByCode(const Module &M) {
793 return enablesValueProfiling(M);
796 static inline bool shouldRecordFunctionAddr(Function *F) {
797 // Only record function addresses if IR PGO is enabled or if clang value
798 // profiling is enabled. Recording function addresses greatly increases object
799 // file size, because it prevents the inliner from deleting functions that
800 // have been inlined everywhere.
801 if (!profDataReferencedByCode(*F->getParent()))
802 return false;
804 // Check the linkage
805 bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage();
806 if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() &&
807 !HasAvailableExternallyLinkage)
808 return true;
810 // A function marked 'alwaysinline' with available_externally linkage can't
811 // have its address taken. Doing so would create an undefined external ref to
812 // the function, which would fail to link.
813 if (HasAvailableExternallyLinkage &&
814 F->hasFnAttribute(Attribute::AlwaysInline))
815 return false;
817 // Prohibit function address recording if the function is both internal and
818 // COMDAT. This avoids the profile data variable referencing internal symbols
819 // in COMDAT.
820 if (F->hasLocalLinkage() && F->hasComdat())
821 return false;
823 // Check uses of this function for other than direct calls or invokes to it.
824 // Inline virtual functions have linkeOnceODR linkage. When a key method
825 // exists, the vtable will only be emitted in the TU where the key method
826 // is defined. In a TU where vtable is not available, the function won't
827 // be 'addresstaken'. If its address is not recorded here, the profile data
828 // with missing address may be picked by the linker leading to missing
829 // indirect call target info.
830 return F->hasAddressTaken() || F->hasLinkOnceLinkage();
833 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) {
834 // Don't do this for Darwin. compiler-rt uses linker magic.
835 if (TT.isOSDarwin())
836 return false;
837 // Use linker script magic to get data/cnts/name start/end.
838 if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() ||
839 TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4CPU() ||
840 TT.isOSWindows())
841 return false;
843 return true;
846 GlobalVariable *
847 InstrProfiling::getOrCreateRegionCounters(InstrProfIncrementInst *Inc) {
848 GlobalVariable *NamePtr = Inc->getName();
849 auto It = ProfileDataMap.find(NamePtr);
850 PerFunctionProfileData PD;
851 if (It != ProfileDataMap.end()) {
852 if (It->second.RegionCounters)
853 return It->second.RegionCounters;
854 PD = It->second;
857 // Match the linkage and visibility of the name global.
858 Function *Fn = Inc->getParent()->getParent();
859 GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage();
860 GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility();
862 // Move the name variable to the right section. Place them in a COMDAT group
863 // if the associated function is a COMDAT. This will make sure that only one
864 // copy of counters of the COMDAT function will be emitted after linking. Keep
865 // in mind that this pass may run before the inliner, so we need to create a
866 // new comdat group for the counters and profiling data. If we use the comdat
867 // of the parent function, that will result in relocations against discarded
868 // sections.
870 // If the data variable is referenced by code, counters and data have to be
871 // in different comdats for COFF because the Visual C++ linker will report
872 // duplicate symbol errors if there are multiple external symbols with the
873 // same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE.
875 // For ELF, when not using COMDAT, put counters, data and values into a
876 // nodeduplicate COMDAT which is lowered to a zero-flag section group. This
877 // allows -z start-stop-gc to discard the entire group when the function is
878 // discarded.
879 bool DataReferencedByCode = profDataReferencedByCode(*M);
880 bool NeedComdat = needsComdatForCounter(*Fn, *M);
881 std::string CntsVarName = getVarName(Inc, getInstrProfCountersVarPrefix());
882 std::string DataVarName = getVarName(Inc, getInstrProfDataVarPrefix());
883 auto MaybeSetComdat = [&](GlobalVariable *GV) {
884 bool UseComdat = (NeedComdat || TT.isOSBinFormatELF());
885 if (UseComdat) {
886 StringRef GroupName = TT.isOSBinFormatCOFF() && DataReferencedByCode
887 ? GV->getName()
888 : CntsVarName;
889 Comdat *C = M->getOrInsertComdat(GroupName);
890 if (!NeedComdat)
891 C->setSelectionKind(Comdat::NoDeduplicate);
892 GV->setComdat(C);
896 uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
897 LLVMContext &Ctx = M->getContext();
898 ArrayType *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters);
900 // Create the counters variable.
901 auto *CounterPtr =
902 new GlobalVariable(*M, CounterTy, false, Linkage,
903 Constant::getNullValue(CounterTy), CntsVarName);
904 CounterPtr->setVisibility(Visibility);
905 CounterPtr->setSection(
906 getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat()));
907 CounterPtr->setAlignment(Align(8));
908 MaybeSetComdat(CounterPtr);
909 CounterPtr->setLinkage(Linkage);
911 auto *Int8PtrTy = Type::getInt8PtrTy(Ctx);
912 // Allocate statically the array of pointers to value profile nodes for
913 // the current function.
914 Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy);
915 uint64_t NS = 0;
916 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
917 NS += PD.NumValueSites[Kind];
918 if (NS > 0 && ValueProfileStaticAlloc &&
919 !needsRuntimeRegistrationOfSectionRange(TT)) {
920 ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS);
921 auto *ValuesVar = new GlobalVariable(
922 *M, ValuesTy, false, Linkage, Constant::getNullValue(ValuesTy),
923 getVarName(Inc, getInstrProfValuesVarPrefix()));
924 ValuesVar->setVisibility(Visibility);
925 ValuesVar->setSection(
926 getInstrProfSectionName(IPSK_vals, TT.getObjectFormat()));
927 ValuesVar->setAlignment(Align(8));
928 MaybeSetComdat(ValuesVar);
929 ValuesPtrExpr =
930 ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx));
933 // Create data variable.
934 auto *IntPtrTy = M->getDataLayout().getIntPtrType(M->getContext());
935 auto *Int16Ty = Type::getInt16Ty(Ctx);
936 auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1);
937 Type *DataTypes[] = {
938 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType,
939 #include "llvm/ProfileData/InstrProfData.inc"
941 auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes));
943 Constant *FunctionAddr = shouldRecordFunctionAddr(Fn)
944 ? ConstantExpr::getBitCast(Fn, Int8PtrTy)
945 : ConstantPointerNull::get(Int8PtrTy);
947 Constant *Int16ArrayVals[IPVK_Last + 1];
948 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
949 Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]);
951 // If the data variable is not referenced by code (if we don't emit
952 // @llvm.instrprof.value.profile, NS will be 0), and the counter keeps the
953 // data variable live under linker GC, the data variable can be private. This
954 // optimization applies to ELF.
956 // On COFF, a comdat leader cannot be local so we require DataReferencedByCode
957 // to be false.
958 if (NS == 0 && (TT.isOSBinFormatELF() ||
959 (!DataReferencedByCode && TT.isOSBinFormatCOFF()))) {
960 Linkage = GlobalValue::PrivateLinkage;
961 Visibility = GlobalValue::DefaultVisibility;
963 auto *Data =
964 new GlobalVariable(*M, DataTy, false, Linkage, nullptr, DataVarName);
965 // Reference the counter variable with a label difference (link-time
966 // constant).
967 auto *RelativeCounterPtr =
968 ConstantExpr::getSub(ConstantExpr::getPtrToInt(CounterPtr, IntPtrTy),
969 ConstantExpr::getPtrToInt(Data, IntPtrTy));
971 Constant *DataVals[] = {
972 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init,
973 #include "llvm/ProfileData/InstrProfData.inc"
975 Data->setInitializer(ConstantStruct::get(DataTy, DataVals));
977 Data->setVisibility(Visibility);
978 Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat()));
979 Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT));
980 MaybeSetComdat(Data);
981 Data->setLinkage(Linkage);
983 PD.RegionCounters = CounterPtr;
984 PD.DataVar = Data;
985 ProfileDataMap[NamePtr] = PD;
987 // Mark the data variable as used so that it isn't stripped out.
988 CompilerUsedVars.push_back(Data);
989 // Now that the linkage set by the FE has been passed to the data and counter
990 // variables, reset Name variable's linkage and visibility to private so that
991 // it can be removed later by the compiler.
992 NamePtr->setLinkage(GlobalValue::PrivateLinkage);
993 // Collect the referenced names to be used by emitNameData.
994 ReferencedNames.push_back(NamePtr);
996 return CounterPtr;
999 void InstrProfiling::emitVNodes() {
1000 if (!ValueProfileStaticAlloc)
1001 return;
1003 // For now only support this on platforms that do
1004 // not require runtime registration to discover
1005 // named section start/end.
1006 if (needsRuntimeRegistrationOfSectionRange(TT))
1007 return;
1009 size_t TotalNS = 0;
1010 for (auto &PD : ProfileDataMap) {
1011 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1012 TotalNS += PD.second.NumValueSites[Kind];
1015 if (!TotalNS)
1016 return;
1018 uint64_t NumCounters = TotalNS * NumCountersPerValueSite;
1019 // Heuristic for small programs with very few total value sites.
1020 // The default value of vp-counters-per-site is chosen based on
1021 // the observation that large apps usually have a low percentage
1022 // of value sites that actually have any profile data, and thus
1023 // the average number of counters per site is low. For small
1024 // apps with very few sites, this may not be true. Bump up the
1025 // number of counters in this case.
1026 #define INSTR_PROF_MIN_VAL_COUNTS 10
1027 if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS)
1028 NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2);
1030 auto &Ctx = M->getContext();
1031 Type *VNodeTypes[] = {
1032 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType,
1033 #include "llvm/ProfileData/InstrProfData.inc"
1035 auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes));
1037 ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters);
1038 auto *VNodesVar = new GlobalVariable(
1039 *M, VNodesTy, false, GlobalValue::PrivateLinkage,
1040 Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName());
1041 VNodesVar->setSection(
1042 getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat()));
1043 // VNodesVar is used by runtime but not referenced via relocation by other
1044 // sections. Conservatively make it linker retained.
1045 UsedVars.push_back(VNodesVar);
1048 void InstrProfiling::emitNameData() {
1049 std::string UncompressedData;
1051 if (ReferencedNames.empty())
1052 return;
1054 std::string CompressedNameStr;
1055 if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr,
1056 DoInstrProfNameCompression)) {
1057 report_fatal_error(toString(std::move(E)), false);
1060 auto &Ctx = M->getContext();
1061 auto *NamesVal = ConstantDataArray::getString(
1062 Ctx, StringRef(CompressedNameStr), false);
1063 NamesVar = new GlobalVariable(*M, NamesVal->getType(), true,
1064 GlobalValue::PrivateLinkage, NamesVal,
1065 getInstrProfNamesVarName());
1066 NamesSize = CompressedNameStr.size();
1067 NamesVar->setSection(
1068 getInstrProfSectionName(IPSK_name, TT.getObjectFormat()));
1069 // On COFF, it's important to reduce the alignment down to 1 to prevent the
1070 // linker from inserting padding before the start of the names section or
1071 // between names entries.
1072 NamesVar->setAlignment(Align(1));
1073 // NamesVar is used by runtime but not referenced via relocation by other
1074 // sections. Conservatively make it linker retained.
1075 UsedVars.push_back(NamesVar);
1077 for (auto *NamePtr : ReferencedNames)
1078 NamePtr->eraseFromParent();
1081 void InstrProfiling::emitRegistration() {
1082 if (!needsRuntimeRegistrationOfSectionRange(TT))
1083 return;
1085 // Construct the function.
1086 auto *VoidTy = Type::getVoidTy(M->getContext());
1087 auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext());
1088 auto *Int64Ty = Type::getInt64Ty(M->getContext());
1089 auto *RegisterFTy = FunctionType::get(VoidTy, false);
1090 auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage,
1091 getInstrProfRegFuncsName(), M);
1092 RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1093 if (Options.NoRedZone)
1094 RegisterF->addFnAttr(Attribute::NoRedZone);
1096 auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false);
1097 auto *RuntimeRegisterF =
1098 Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage,
1099 getInstrProfRegFuncName(), M);
1101 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF));
1102 for (Value *Data : CompilerUsedVars)
1103 if (!isa<Function>(Data))
1104 IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1105 for (Value *Data : UsedVars)
1106 if (Data != NamesVar && !isa<Function>(Data))
1107 IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1109 if (NamesVar) {
1110 Type *ParamTypes[] = {VoidPtrTy, Int64Ty};
1111 auto *NamesRegisterTy =
1112 FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false);
1113 auto *NamesRegisterF =
1114 Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage,
1115 getInstrProfNamesRegFuncName(), M);
1116 IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy),
1117 IRB.getInt64(NamesSize)});
1120 IRB.CreateRetVoid();
1123 bool InstrProfiling::emitRuntimeHook() {
1124 // We expect the linker to be invoked with -u<hook_var> flag for Linux
1125 // in which case there is no need to emit the external variable.
1126 if (TT.isOSLinux())
1127 return false;
1129 // If the module's provided its own runtime, we don't need to do anything.
1130 if (M->getGlobalVariable(getInstrProfRuntimeHookVarName()))
1131 return false;
1133 // Declare an external variable that will pull in the runtime initialization.
1134 auto *Int32Ty = Type::getInt32Ty(M->getContext());
1135 auto *Var =
1136 new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage,
1137 nullptr, getInstrProfRuntimeHookVarName());
1139 if (TT.isOSBinFormatELF()) {
1140 // Mark the user variable as used so that it isn't stripped out.
1141 CompilerUsedVars.push_back(Var);
1142 } else {
1143 // Make a function that uses it.
1144 auto *User = Function::Create(FunctionType::get(Int32Ty, false),
1145 GlobalValue::LinkOnceODRLinkage,
1146 getInstrProfRuntimeHookVarUseFuncName(), M);
1147 User->addFnAttr(Attribute::NoInline);
1148 if (Options.NoRedZone)
1149 User->addFnAttr(Attribute::NoRedZone);
1150 User->setVisibility(GlobalValue::HiddenVisibility);
1151 if (TT.supportsCOMDAT())
1152 User->setComdat(M->getOrInsertComdat(User->getName()));
1154 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User));
1155 auto *Load = IRB.CreateLoad(Int32Ty, Var);
1156 IRB.CreateRet(Load);
1158 // Mark the function as used so that it isn't stripped out.
1159 CompilerUsedVars.push_back(User);
1161 return true;
1164 void InstrProfiling::emitUses() {
1165 // The metadata sections are parallel arrays. Optimizers (e.g.
1166 // GlobalOpt/ConstantMerge) may not discard associated sections as a unit, so
1167 // we conservatively retain all unconditionally in the compiler.
1169 // On ELF, the linker can guarantee the associated sections will be retained
1170 // or discarded as a unit, so llvm.compiler.used is sufficient. Similarly on
1171 // COFF, if prof data is not referenced by code we use one comdat and ensure
1172 // this GC property as well. Otherwise, we have to conservatively make all of
1173 // the sections retained by the linker.
1174 if (TT.isOSBinFormatELF() ||
1175 (TT.isOSBinFormatCOFF() && !profDataReferencedByCode(*M)))
1176 appendToCompilerUsed(*M, CompilerUsedVars);
1177 else
1178 appendToUsed(*M, CompilerUsedVars);
1180 // We do not add proper references from used metadata sections to NamesVar and
1181 // VNodesVar, so we have to be conservative and place them in llvm.used
1182 // regardless of the target,
1183 appendToUsed(*M, UsedVars);
1186 void InstrProfiling::emitInitialization() {
1187 // Create ProfileFileName variable. Don't don't this for the
1188 // context-sensitive instrumentation lowering: This lowering is after
1189 // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should
1190 // have already create the variable before LTO/ThinLTO linking.
1191 if (!IsCS)
1192 createProfileFileNameVar(*M, Options.InstrProfileOutput);
1193 Function *RegisterF = M->getFunction(getInstrProfRegFuncsName());
1194 if (!RegisterF)
1195 return;
1197 // Create the initialization function.
1198 auto *VoidTy = Type::getVoidTy(M->getContext());
1199 auto *F = Function::Create(FunctionType::get(VoidTy, false),
1200 GlobalValue::InternalLinkage,
1201 getInstrProfInitFuncName(), M);
1202 F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1203 F->addFnAttr(Attribute::NoInline);
1204 if (Options.NoRedZone)
1205 F->addFnAttr(Attribute::NoRedZone);
1207 // Add the basic block and the necessary calls.
1208 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F));
1209 IRB.CreateCall(RegisterF, {});
1210 IRB.CreateRetVoid();
1212 appendToGlobalCtors(*M, F, 0);