1 //===-- ProfiledBinary.h - Binary decoder -----------------------*- C++ -*-===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 #ifndef LLVM_TOOLS_LLVM_PROFGEN_PROFILEDBINARY_H
10 #define LLVM_TOOLS_LLVM_PROFGEN_PROFILEDBINARY_H
12 #include "CallContext.h"
13 #include "ErrorHandling.h"
14 #include "llvm/ADT/DenseMap.h"
15 #include "llvm/ADT/StringRef.h"
16 #include "llvm/ADT/StringSet.h"
17 #include "llvm/DebugInfo/DWARF/DWARFContext.h"
18 #include "llvm/DebugInfo/Symbolize/Symbolize.h"
19 #include "llvm/MC/MCAsmInfo.h"
20 #include "llvm/MC/MCContext.h"
21 #include "llvm/MC/MCDisassembler/MCDisassembler.h"
22 #include "llvm/MC/MCInst.h"
23 #include "llvm/MC/MCInstPrinter.h"
24 #include "llvm/MC/MCInstrAnalysis.h"
25 #include "llvm/MC/MCInstrInfo.h"
26 #include "llvm/MC/MCObjectFileInfo.h"
27 #include "llvm/MC/MCPseudoProbe.h"
28 #include "llvm/MC/MCRegisterInfo.h"
29 #include "llvm/MC/MCSubtargetInfo.h"
30 #include "llvm/MC/MCTargetOptions.h"
31 #include "llvm/Object/ELFObjectFile.h"
32 #include "llvm/ProfileData/SampleProf.h"
33 #include "llvm/Support/CommandLine.h"
34 #include "llvm/Support/Path.h"
35 #include "llvm/Transforms/IPO/SampleContextTracker.h"
40 #include <unordered_map>
41 #include <unordered_set>
45 extern cl::opt
<bool> EnableCSPreInliner
;
46 extern cl::opt
<bool> UseContextCostForPreInliner
;
50 using namespace sampleprof
;
51 using namespace llvm::object
;
54 namespace sampleprof
{
57 class MissingFrameInferrer
;
59 struct InstructionPointer
{
60 const ProfiledBinary
*Binary
;
61 // Address of the executable segment of the binary.
63 // Index to the sorted code address array of the binary.
65 InstructionPointer(const ProfiledBinary
*Binary
, uint64_t Address
,
66 bool RoundToNext
= false);
69 void update(uint64_t Addr
);
72 // The special frame addresses.
73 enum SpecialFrameAddr
{
74 // Dummy root of frame trie.
76 // Represent all the addresses outside of current binary.
77 // This's also used to indicate the call stack should be truncated since this
78 // isn't a real call context the compiler will see.
82 using RangesTy
= std::vector
<std::pair
<uint64_t, uint64_t>>;
84 struct BinaryFunction
{
86 // End of range is an exclusive bound.
89 uint64_t getFuncSize() {
91 for (auto &R
: Ranges
) {
92 Sum
+= R
.second
- R
.first
;
98 // Info about function range. A function can be split into multiple
99 // non-continuous ranges, each range corresponds to one FuncRange.
101 uint64_t StartAddress
;
102 // EndAddress is an exclusive bound.
104 // Function the range belongs to
105 BinaryFunction
*Func
;
106 // Whether the start address is the real entry of the function.
107 bool IsFuncEntry
= false;
109 StringRef
getFuncName() { return Func
->FuncName
; }
112 // PrologEpilog address tracker, used to filter out broken stack samples
113 // Currently we use a heuristic size (two) to infer prolog and epilog
114 // based on the start address and return address. In the future,
115 // we will switch to Dwarf CFI based tracker
116 struct PrologEpilogTracker
{
117 // A set of prolog and epilog addresses. Used by virtual unwinding.
118 std::unordered_set
<uint64_t> PrologEpilogSet
;
119 ProfiledBinary
*Binary
;
120 PrologEpilogTracker(ProfiledBinary
*Bin
) : Binary(Bin
){};
122 // Take the two addresses from the start of function as prolog
124 inferPrologAddresses(std::map
<uint64_t, FuncRange
> &FuncStartAddressMap
) {
125 for (auto I
: FuncStartAddressMap
) {
126 PrologEpilogSet
.insert(I
.first
);
127 InstructionPointer
IP(Binary
, I
.first
);
130 PrologEpilogSet
.insert(IP
.Address
);
134 // Take the last two addresses before the return address as epilog
135 void inferEpilogAddresses(std::unordered_set
<uint64_t> &RetAddrs
) {
136 for (auto Addr
: RetAddrs
) {
137 PrologEpilogSet
.insert(Addr
);
138 InstructionPointer
IP(Binary
, Addr
);
141 PrologEpilogSet
.insert(IP
.Address
);
146 // Track function byte size under different context (outlined version as well as
147 // various inlined versions). It also provides query support to get function
148 // size with the best matching context, which is used to help pre-inliner use
149 // accurate post-optimization size to make decisions.
150 // TODO: If an inlinee is completely optimized away, ideally we should have zero
151 // for its context size, currently we would misss such context since it doesn't
152 // have instructions. To fix this, we need to mark all inlinee with entry probe
153 // but without instructions as having zero size.
154 class BinarySizeContextTracker
{
156 // Add instruction with given size to a context
157 void addInstructionForContext(const SampleContextFrameVector
&Context
,
160 // Get function size with a specific context. When there's no exact match
161 // for the given context, try to retrieve the size of that function from
162 // closest matching context.
163 uint32_t getFuncSizeForContext(const ContextTrieNode
*Context
);
165 // For inlinees that are full optimized away, we can establish zero size using
166 // their remaining probes.
167 void trackInlineesOptimizedAway(MCPseudoProbeDecoder
&ProbeDecoder
);
169 using ProbeFrameStack
= SmallVector
<std::pair
<StringRef
, uint32_t>>;
171 trackInlineesOptimizedAway(MCPseudoProbeDecoder
&ProbeDecoder
,
172 const MCDecodedPseudoProbeInlineTree
&ProbeNode
,
173 ProbeFrameStack
&Context
);
175 void dump() { RootContext
.dumpTree(); }
178 // Root node for context trie tree, node that this is a reverse context trie
179 // with callee as parent and caller as child. This way we can traverse from
180 // root to find the best/longest matching context if an exact match does not
181 // exist. It gives us the best possible estimate for function's post-inline,
182 // post-optimization byte size.
183 ContextTrieNode RootContext
;
186 using AddressRange
= std::pair
<uint64_t, uint64_t>;
188 class ProfiledBinary
{
189 // Absolute path of the executable binary.
191 // Path of the debug info binary.
192 std::string DebugBinaryPath
;
193 // The target triple.
195 // Path of symbolizer path which should be pointed to binary with debug info.
196 StringRef SymbolizerPath
;
197 // Options used to configure the symbolizer
198 symbolize::LLVMSymbolizer::Options SymbolizerOpts
;
199 // The runtime base address that the first executable segment is loaded at.
200 uint64_t BaseAddress
= 0;
201 // The runtime base address that the first loadabe segment is loaded at.
202 uint64_t FirstLoadableAddress
= 0;
203 // The preferred load address of each executable segment.
204 std::vector
<uint64_t> PreferredTextSegmentAddresses
;
205 // The file offset of each executable segment.
206 std::vector
<uint64_t> TextSegmentOffsets
;
208 // Mutiple MC component info
209 std::unique_ptr
<const MCRegisterInfo
> MRI
;
210 std::unique_ptr
<const MCAsmInfo
> AsmInfo
;
211 std::unique_ptr
<const MCSubtargetInfo
> STI
;
212 std::unique_ptr
<const MCInstrInfo
> MII
;
213 std::unique_ptr
<MCDisassembler
> DisAsm
;
214 std::unique_ptr
<const MCInstrAnalysis
> MIA
;
215 std::unique_ptr
<MCInstPrinter
> IPrinter
;
216 // A list of text sections sorted by start RVA and size. Used to check
217 // if a given RVA is a valid code address.
218 std::set
<std::pair
<uint64_t, uint64_t>> TextSections
;
220 // A map of mapping function name to BinaryFunction info.
221 std::unordered_map
<std::string
, BinaryFunction
> BinaryFunctions
;
223 // Lookup BinaryFunctions using the function name's MD5 hash. Needed if the
224 // profile is using MD5.
225 std::unordered_map
<uint64_t, BinaryFunction
*> HashBinaryFunctions
;
227 // A list of binary functions that have samples.
228 std::unordered_set
<const BinaryFunction
*> ProfiledFunctions
;
230 // GUID to Elf symbol start address map
231 DenseMap
<uint64_t, uint64_t> SymbolStartAddrs
;
233 // These maps are for temporary use of warning diagnosis.
234 DenseSet
<int64_t> AddrsWithMultipleSymbols
;
235 DenseSet
<std::pair
<uint64_t, uint64_t>> AddrsWithInvalidInstruction
;
237 // Start address to Elf symbol GUID map
238 std::unordered_multimap
<uint64_t, uint64_t> StartAddrToSymMap
;
240 // An ordered map of mapping function's start address to function range
241 // relevant info. Currently to determine if the offset of ELF is the start of
242 // a real function, we leverage the function range info from DWARF.
243 std::map
<uint64_t, FuncRange
> StartAddrToFuncRangeMap
;
245 // Address to context location map. Used to expand the context.
246 std::unordered_map
<uint64_t, SampleContextFrameVector
> AddressToLocStackMap
;
248 // Address to instruction size map. Also used for quick Address lookup.
249 std::unordered_map
<uint64_t, uint64_t> AddressToInstSizeMap
;
251 // An array of Addresses of all instructions sorted in increasing order. The
252 // sorting is needed to fast advance to the next forward/backward instruction.
253 std::vector
<uint64_t> CodeAddressVec
;
254 // A set of call instruction addresses. Used by virtual unwinding.
255 std::unordered_set
<uint64_t> CallAddressSet
;
256 // A set of return instruction addresses. Used by virtual unwinding.
257 std::unordered_set
<uint64_t> RetAddressSet
;
258 // An ordered set of unconditional branch instruction addresses.
259 std::set
<uint64_t> UncondBranchAddrSet
;
260 // A set of branch instruction addresses.
261 std::unordered_set
<uint64_t> BranchAddressSet
;
263 // Estimate and track function prolog and epilog ranges.
264 PrologEpilogTracker ProEpilogTracker
;
266 // Infer missing frames due to compiler optimizations such as tail call
268 std::unique_ptr
<MissingFrameInferrer
> MissingContextInferrer
;
270 // Track function sizes under different context
271 BinarySizeContextTracker FuncSizeTracker
;
273 // The symbolizer used to get inline context for an instruction.
274 std::unique_ptr
<symbolize::LLVMSymbolizer
> Symbolizer
;
276 // String table owning function name strings created from the symbolizer.
277 std::unordered_set
<std::string
> NameStrings
;
279 // A collection of functions to print disassembly for.
280 StringSet
<> DisassembleFunctionSet
;
282 // Pseudo probe decoder
283 MCPseudoProbeDecoder ProbeDecoder
;
285 // Function name to probe frame map for top-level outlined functions.
286 StringMap
<MCDecodedPseudoProbeInlineTree
*> TopLevelProbeFrameMap
;
288 bool UsePseudoProbes
= false;
290 bool UseFSDiscriminator
= false;
292 // Whether we need to symbolize all instructions to get function context size.
293 bool TrackFuncContextSize
= false;
295 // Whether this is a kernel image;
296 bool IsKernel
= false;
298 // Indicate if the base loading address is parsed from the mmap event or uses
299 // the preferred address
300 bool IsLoadedByMMap
= false;
301 // Use to avoid redundant warning.
302 bool MissingMMapWarned
= false;
306 void setPreferredTextSegmentAddresses(const ObjectFile
*O
);
308 template <class ELFT
>
309 void setPreferredTextSegmentAddresses(const ELFFile
<ELFT
> &Obj
,
311 void setPreferredTextSegmentAddresses(const COFFObjectFile
*Obj
,
314 void checkPseudoProbe(const ELFObjectFileBase
*Obj
);
316 void decodePseudoProbe(const ELFObjectFileBase
*Obj
);
319 checkUseFSDiscriminator(const ObjectFile
*Obj
,
320 std::map
<SectionRef
, SectionSymbolsTy
> &AllSymbols
);
322 // Set up disassembler and related components.
323 void setUpDisassembler(const ObjectFile
*Obj
);
324 symbolize::LLVMSymbolizer::Options
getSymbolizerOpts() const;
326 // Load debug info of subprograms from DWARF section.
327 void loadSymbolsFromDWARF(ObjectFile
&Obj
);
329 // Load debug info from DWARF unit.
330 void loadSymbolsFromDWARFUnit(DWARFUnit
&CompilationUnit
);
332 // Create elf symbol to its start address mapping.
333 void populateElfSymbolAddressList(const ELFObjectFileBase
*O
);
335 // A function may be spilt into multiple non-continuous address ranges. We use
336 // this to set whether start a function range is the real entry of the
337 // function and also set false to the non-function label.
338 void setIsFuncEntry(FuncRange
*FRange
, StringRef RangeSymName
);
340 // Warn if no entry range exists in the function.
341 void warnNoFuncEntry();
343 /// Dissassemble the text section and build various address maps.
344 void disassemble(const ObjectFile
*O
);
346 /// Helper function to dissassemble the symbol and extract info for unwinding
347 bool dissassembleSymbol(std::size_t SI
, ArrayRef
<uint8_t> Bytes
,
348 SectionSymbolsTy
&Symbols
, const SectionRef
&Section
);
349 /// Symbolize a given instruction pointer and return a full call context.
350 SampleContextFrameVector
symbolize(const InstructionPointer
&IP
,
351 bool UseCanonicalFnName
= false,
352 bool UseProbeDiscriminator
= false);
353 /// Decode the interesting parts of the binary and build internal data
354 /// structures. On high level, the parts of interest are:
355 /// 1. Text sections, including the main code section and the PLT
356 /// entries that will be used to handle cross-module call transitions.
357 /// 2. The .debug_line section, used by Dwarf-based profile generation.
358 /// 3. Pseudo probe related sections, used by probe-based profile
363 ProfiledBinary(const StringRef ExeBinPath
, const StringRef DebugBinPath
);
366 void decodePseudoProbe();
368 StringRef
getPath() const { return Path
; }
369 StringRef
getName() const { return llvm::sys::path::filename(Path
); }
370 uint64_t getBaseAddress() const { return BaseAddress
; }
371 void setBaseAddress(uint64_t Address
) { BaseAddress
= Address
; }
373 bool isCOFF() const { return IsCOFF
; }
375 // Canonicalize to use preferred load address as base address.
376 uint64_t canonicalizeVirtualAddress(uint64_t Address
) {
377 return Address
- BaseAddress
+ getPreferredBaseAddress();
379 // Return the preferred load address for the first executable segment.
380 uint64_t getPreferredBaseAddress() const {
381 return PreferredTextSegmentAddresses
[0];
383 // Return the preferred load address for the first loadable segment.
384 uint64_t getFirstLoadableAddress() const { return FirstLoadableAddress
; }
385 // Return the file offset for the first executable segment.
386 uint64_t getTextSegmentOffset() const { return TextSegmentOffsets
[0]; }
387 const std::vector
<uint64_t> &getPreferredTextSegmentAddresses() const {
388 return PreferredTextSegmentAddresses
;
390 const std::vector
<uint64_t> &getTextSegmentOffsets() const {
391 return TextSegmentOffsets
;
394 uint64_t getInstSize(uint64_t Address
) const {
395 auto I
= AddressToInstSizeMap
.find(Address
);
396 if (I
== AddressToInstSizeMap
.end())
401 bool addressIsCode(uint64_t Address
) const {
402 return AddressToInstSizeMap
.find(Address
) != AddressToInstSizeMap
.end();
405 bool addressIsCall(uint64_t Address
) const {
406 return CallAddressSet
.count(Address
);
408 bool addressIsReturn(uint64_t Address
) const {
409 return RetAddressSet
.count(Address
);
411 bool addressInPrologEpilog(uint64_t Address
) const {
412 return ProEpilogTracker
.PrologEpilogSet
.count(Address
);
415 bool addressIsTransfer(uint64_t Address
) {
416 return BranchAddressSet
.count(Address
) || RetAddressSet
.count(Address
) ||
417 CallAddressSet
.count(Address
);
420 bool rangeCrossUncondBranch(uint64_t Start
, uint64_t End
) {
423 auto R
= UncondBranchAddrSet
.lower_bound(Start
);
424 return R
!= UncondBranchAddrSet
.end() && *R
< End
;
427 uint64_t getAddressforIndex(uint64_t Index
) const {
428 return CodeAddressVec
[Index
];
431 size_t getCodeAddrVecSize() const { return CodeAddressVec
.size(); }
433 bool usePseudoProbes() const { return UsePseudoProbes
; }
434 bool useFSDiscriminator() const { return UseFSDiscriminator
; }
435 bool isKernel() const { return IsKernel
; }
437 static bool isKernelImageName(StringRef BinaryName
) {
438 return BinaryName
== "[kernel.kallsyms]" ||
439 BinaryName
== "[kernel.kallsyms]_stext" ||
440 BinaryName
== "[kernel.kallsyms]_text";
443 // Get the index in CodeAddressVec for the address
444 // As we might get an address which is not the code
445 // here it would round to the next valid code address by
446 // using lower bound operation
447 uint32_t getIndexForAddr(uint64_t Address
) const {
448 auto Low
= llvm::lower_bound(CodeAddressVec
, Address
);
449 return Low
- CodeAddressVec
.begin();
452 uint64_t getCallAddrFromFrameAddr(uint64_t FrameAddr
) const {
453 if (FrameAddr
== ExternalAddr
)
455 auto I
= getIndexForAddr(FrameAddr
);
456 FrameAddr
= I
? getAddressforIndex(I
- 1) : 0;
457 if (FrameAddr
&& addressIsCall(FrameAddr
))
462 FuncRange
*findFuncRangeForStartAddr(uint64_t Address
) {
463 auto I
= StartAddrToFuncRangeMap
.find(Address
);
464 if (I
== StartAddrToFuncRangeMap
.end())
469 // Binary search the function range which includes the input address.
470 FuncRange
*findFuncRange(uint64_t Address
) {
471 auto I
= StartAddrToFuncRangeMap
.upper_bound(Address
);
472 if (I
== StartAddrToFuncRangeMap
.begin())
476 if (Address
>= I
->second
.EndAddress
)
482 // Get all ranges of one function.
483 RangesTy
getRanges(uint64_t Address
) {
484 auto *FRange
= findFuncRange(Address
);
485 // Ignore the range which falls into plt section or system lib.
489 return FRange
->Func
->Ranges
;
492 const std::unordered_map
<std::string
, BinaryFunction
> &
493 getAllBinaryFunctions() {
494 return BinaryFunctions
;
497 std::unordered_set
<const BinaryFunction
*> &getProfiledFunctions() {
498 return ProfiledFunctions
;
501 void setProfiledFunctions(std::unordered_set
<const BinaryFunction
*> &Funcs
) {
502 ProfiledFunctions
= Funcs
;
505 BinaryFunction
*getBinaryFunction(FunctionId FName
) {
506 if (FName
.isStringRef()) {
507 auto I
= BinaryFunctions
.find(FName
.str());
508 if (I
== BinaryFunctions
.end())
512 auto I
= HashBinaryFunctions
.find(FName
.getHashCode());
513 if (I
== HashBinaryFunctions
.end())
518 uint32_t getFuncSizeForContext(const ContextTrieNode
*ContextNode
) {
519 return FuncSizeTracker
.getFuncSizeForContext(ContextNode
);
522 void inferMissingFrames(const SmallVectorImpl
<uint64_t> &Context
,
523 SmallVectorImpl
<uint64_t> &NewContext
);
525 // Load the symbols from debug table and populate into symbol list.
526 void populateSymbolListFromDWARF(ProfileSymbolList
&SymbolList
);
528 SampleContextFrameVector
529 getFrameLocationStack(uint64_t Address
, bool UseProbeDiscriminator
= false) {
530 InstructionPointer
IP(this, Address
);
531 return symbolize(IP
, SymbolizerOpts
.UseSymbolTable
, UseProbeDiscriminator
);
534 const SampleContextFrameVector
&
535 getCachedFrameLocationStack(uint64_t Address
,
536 bool UseProbeDiscriminator
= false) {
537 auto I
= AddressToLocStackMap
.emplace(Address
, SampleContextFrameVector());
539 I
.first
->second
= getFrameLocationStack(Address
, UseProbeDiscriminator
);
541 return I
.first
->second
;
544 std::optional
<SampleContextFrame
> getInlineLeafFrameLoc(uint64_t Address
) {
545 const auto &Stack
= getCachedFrameLocationStack(Address
);
551 void flushSymbolizer() { Symbolizer
.reset(); }
553 MissingFrameInferrer
*getMissingContextInferrer() {
554 return MissingContextInferrer
.get();
557 // Compare two addresses' inline context
558 bool inlineContextEqual(uint64_t Add1
, uint64_t Add2
);
560 // Get the full context of the current stack with inline context filled in.
561 // It will search the disassembling info stored in AddressToLocStackMap. This
562 // is used as the key of function sample map
563 SampleContextFrameVector
564 getExpandedContext(const SmallVectorImpl
<uint64_t> &Stack
,
565 bool &WasLeafInlined
);
566 // Go through instructions among the given range and record its size for the
568 void computeInlinedContextSizeForRange(uint64_t StartAddress
,
569 uint64_t EndAddress
);
571 void computeInlinedContextSizeForFunc(const BinaryFunction
*Func
);
573 const MCDecodedPseudoProbe
*getCallProbeForAddr(uint64_t Address
) const {
574 return ProbeDecoder
.getCallProbeForAddr(Address
);
577 void getInlineContextForProbe(const MCDecodedPseudoProbe
*Probe
,
578 SampleContextFrameVector
&InlineContextStack
,
579 bool IncludeLeaf
= false) const {
580 SmallVector
<MCPseudoProbeFrameLocation
, 16> ProbeInlineContext
;
581 ProbeDecoder
.getInlineContextForProbe(Probe
, ProbeInlineContext
,
583 for (uint32_t I
= 0; I
< ProbeInlineContext
.size(); I
++) {
584 auto &Callsite
= ProbeInlineContext
[I
];
585 // Clear the current context for an unknown probe.
586 if (Callsite
.second
== 0 && I
!= ProbeInlineContext
.size() - 1) {
587 InlineContextStack
.clear();
590 InlineContextStack
.emplace_back(FunctionId(Callsite
.first
),
591 LineLocation(Callsite
.second
, 0));
594 const AddressProbesMap
&getAddress2ProbesMap() const {
595 return ProbeDecoder
.getAddress2ProbesMap();
597 const MCPseudoProbeFuncDesc
*getFuncDescForGUID(uint64_t GUID
) {
598 return ProbeDecoder
.getFuncDescForGUID(GUID
);
601 const MCPseudoProbeFuncDesc
*
602 getInlinerDescForProbe(const MCDecodedPseudoProbe
*Probe
) {
603 return ProbeDecoder
.getInlinerDescForProbe(Probe
);
606 bool getTrackFuncContextSize() { return TrackFuncContextSize
; }
608 bool getIsLoadedByMMap() { return IsLoadedByMMap
; }
610 void setIsLoadedByMMap(bool Value
) { IsLoadedByMMap
= Value
; }
612 bool getMissingMMapWarned() { return MissingMMapWarned
; }
614 void setMissingMMapWarned(bool Value
) { MissingMMapWarned
= Value
; }
617 } // end namespace sampleprof
618 } // end namespace llvm