1 //===- StackMaps.cpp ------------------------------------------------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 #include "llvm/CodeGen/StackMaps.h"
10 #include "llvm/ADT/DenseMapInfo.h"
11 #include "llvm/ADT/STLExtras.h"
12 #include "llvm/ADT/Twine.h"
13 #include "llvm/CodeGen/AsmPrinter.h"
14 #include "llvm/CodeGen/MachineFrameInfo.h"
15 #include "llvm/CodeGen/MachineFunction.h"
16 #include "llvm/CodeGen/MachineInstr.h"
17 #include "llvm/CodeGen/MachineOperand.h"
18 #include "llvm/CodeGen/TargetOpcodes.h"
19 #include "llvm/CodeGen/TargetRegisterInfo.h"
20 #include "llvm/CodeGen/TargetSubtargetInfo.h"
21 #include "llvm/IR/DataLayout.h"
22 #include "llvm/MC/MCContext.h"
23 #include "llvm/MC/MCExpr.h"
24 #include "llvm/MC/MCObjectFileInfo.h"
25 #include "llvm/MC/MCRegisterInfo.h"
26 #include "llvm/MC/MCStreamer.h"
27 #include "llvm/Support/CommandLine.h"
28 #include "llvm/Support/Debug.h"
29 #include "llvm/Support/ErrorHandling.h"
30 #include "llvm/Support/MathExtras.h"
31 #include "llvm/Support/raw_ostream.h"
40 #define DEBUG_TYPE "stackmaps"
42 static cl::opt
<int> StackMapVersion(
43 "stackmap-version", cl::init(3), cl::Hidden
,
44 cl::desc("Specify the stackmap encoding version (default = 3)"));
46 const char *StackMaps::WSMP
= "Stack Maps: ";
48 static uint64_t getConstMetaVal(const MachineInstr
&MI
, unsigned Idx
) {
49 assert(MI
.getOperand(Idx
).isImm() &&
50 MI
.getOperand(Idx
).getImm() == StackMaps::ConstantOp
);
51 const auto &MO
= MI
.getOperand(Idx
+ 1);
56 StackMapOpers::StackMapOpers(const MachineInstr
*MI
)
58 assert(getVarIdx() <= MI
->getNumOperands() &&
59 "invalid stackmap definition");
62 PatchPointOpers::PatchPointOpers(const MachineInstr
*MI
)
63 : MI(MI
), HasDef(MI
->getOperand(0).isReg() && MI
->getOperand(0).isDef() &&
64 !MI
->getOperand(0).isImplicit()) {
66 unsigned CheckStartIdx
= 0, e
= MI
->getNumOperands();
67 while (CheckStartIdx
< e
&& MI
->getOperand(CheckStartIdx
).isReg() &&
68 MI
->getOperand(CheckStartIdx
).isDef() &&
69 !MI
->getOperand(CheckStartIdx
).isImplicit())
72 assert(getMetaIdx() == CheckStartIdx
&&
73 "Unexpected additional definition in Patchpoint intrinsic.");
77 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx
) const {
79 StartIdx
= getVarIdx();
81 // Find the next scratch register (implicit def and early clobber)
82 unsigned ScratchIdx
= StartIdx
, e
= MI
->getNumOperands();
83 while (ScratchIdx
< e
&&
84 !(MI
->getOperand(ScratchIdx
).isReg() &&
85 MI
->getOperand(ScratchIdx
).isDef() &&
86 MI
->getOperand(ScratchIdx
).isImplicit() &&
87 MI
->getOperand(ScratchIdx
).isEarlyClobber()))
90 assert(ScratchIdx
!= e
&& "No scratch register available");
94 unsigned StatepointOpers::getNumGcMapEntriesIdx() {
95 // Take index of num of allocas and skip all allocas records.
96 unsigned CurIdx
= getNumAllocaIdx();
97 unsigned NumAllocas
= getConstMetaVal(*MI
, CurIdx
- 1);
100 CurIdx
= StackMaps::getNextMetaArgIdx(MI
, CurIdx
);
101 return CurIdx
+ 1; // skip <StackMaps::ConstantOp>
104 unsigned StatepointOpers::getNumAllocaIdx() {
105 // Take index of num of gc ptrs and skip all gc ptr records.
106 unsigned CurIdx
= getNumGCPtrIdx();
107 unsigned NumGCPtrs
= getConstMetaVal(*MI
, CurIdx
- 1);
110 CurIdx
= StackMaps::getNextMetaArgIdx(MI
, CurIdx
);
111 return CurIdx
+ 1; // skip <StackMaps::ConstantOp>
114 unsigned StatepointOpers::getNumGCPtrIdx() {
115 // Take index of num of deopt args and skip all deopt records.
116 unsigned CurIdx
= getNumDeoptArgsIdx();
117 unsigned NumDeoptArgs
= getConstMetaVal(*MI
, CurIdx
- 1);
119 while (NumDeoptArgs
--) {
120 CurIdx
= StackMaps::getNextMetaArgIdx(MI
, CurIdx
);
122 return CurIdx
+ 1; // skip <StackMaps::ConstantOp>
125 int StatepointOpers::getFirstGCPtrIdx() {
126 unsigned NumGCPtrsIdx
= getNumGCPtrIdx();
127 unsigned NumGCPtrs
= getConstMetaVal(*MI
, NumGCPtrsIdx
- 1);
130 ++NumGCPtrsIdx
; // skip <num gc ptrs>
131 assert(NumGCPtrsIdx
< MI
->getNumOperands());
132 return (int)NumGCPtrsIdx
;
135 unsigned StatepointOpers::getGCPointerMap(
136 SmallVectorImpl
<std::pair
<unsigned, unsigned>> &GCMap
) {
137 unsigned CurIdx
= getNumGcMapEntriesIdx();
138 unsigned GCMapSize
= getConstMetaVal(*MI
, CurIdx
- 1);
140 for (unsigned N
= 0; N
< GCMapSize
; ++N
) {
141 unsigned B
= MI
->getOperand(CurIdx
++).getImm();
142 unsigned D
= MI
->getOperand(CurIdx
++).getImm();
143 GCMap
.push_back(std::make_pair(B
, D
));
149 bool StatepointOpers::isFoldableReg(Register Reg
) const {
150 unsigned FoldableAreaStart
= getVarIdx();
151 for (const MachineOperand
&MO
: MI
->uses()) {
152 if (MO
.getOperandNo() >= FoldableAreaStart
)
154 if (MO
.isReg() && MO
.getReg() == Reg
)
160 bool StatepointOpers::isFoldableReg(const MachineInstr
*MI
, Register Reg
) {
161 if (MI
->getOpcode() != TargetOpcode::STATEPOINT
)
163 return StatepointOpers(MI
).isFoldableReg(Reg
);
166 StackMaps::StackMaps(AsmPrinter
&AP
) : AP(AP
) {
167 if (StackMapVersion
!= 3)
168 llvm_unreachable("Unsupported stackmap version!");
171 unsigned StackMaps::getNextMetaArgIdx(const MachineInstr
*MI
, unsigned CurIdx
) {
172 assert(CurIdx
< MI
->getNumOperands() && "Bad meta arg index");
173 const auto &MO
= MI
->getOperand(CurIdx
);
175 switch (MO
.getImm()) {
177 llvm_unreachable("Unrecognized operand type.");
178 case StackMaps::DirectMemRefOp
:
181 case StackMaps::IndirectMemRefOp
:
184 case StackMaps::ConstantOp
:
190 assert(CurIdx
< MI
->getNumOperands() && "points past operand list");
194 /// Go up the super-register chain until we hit a valid dwarf register number.
195 static unsigned getDwarfRegNum(unsigned Reg
, const TargetRegisterInfo
*TRI
) {
197 for (MCPhysReg SR
: TRI
->superregs_inclusive(Reg
)) {
198 RegNum
= TRI
->getDwarfRegNum(SR
, false);
203 assert(RegNum
>= 0 && "Invalid Dwarf register number.");
204 return (unsigned)RegNum
;
207 MachineInstr::const_mop_iterator
208 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI
,
209 MachineInstr::const_mop_iterator MOE
, LocationVec
&Locs
,
210 LiveOutVec
&LiveOuts
) const {
211 const TargetRegisterInfo
*TRI
= AP
.MF
->getSubtarget().getRegisterInfo();
213 switch (MOI
->getImm()) {
215 llvm_unreachable("Unrecognized operand type.");
216 case StackMaps::DirectMemRefOp
: {
217 auto &DL
= AP
.MF
->getDataLayout();
219 unsigned Size
= DL
.getPointerSizeInBits();
220 assert((Size
% 8) == 0 && "Need pointer size in bytes.");
222 Register Reg
= (++MOI
)->getReg();
223 int64_t Imm
= (++MOI
)->getImm();
224 Locs
.emplace_back(StackMaps::Location::Direct
, Size
,
225 getDwarfRegNum(Reg
, TRI
), Imm
);
228 case StackMaps::IndirectMemRefOp
: {
229 int64_t Size
= (++MOI
)->getImm();
230 assert(Size
> 0 && "Need a valid size for indirect memory locations.");
231 Register Reg
= (++MOI
)->getReg();
232 int64_t Imm
= (++MOI
)->getImm();
233 Locs
.emplace_back(StackMaps::Location::Indirect
, Size
,
234 getDwarfRegNum(Reg
, TRI
), Imm
);
237 case StackMaps::ConstantOp
: {
239 assert(MOI
->isImm() && "Expected constant operand.");
240 int64_t Imm
= MOI
->getImm();
241 Locs
.emplace_back(Location::Constant
, sizeof(int64_t), 0, Imm
);
248 // The physical register number will ultimately be encoded as a DWARF regno.
249 // The stack map also records the size of a spill slot that can hold the
250 // register content. (The runtime can track the actual size of the data type
253 // Skip implicit registers (this includes our scratch registers)
254 if (MOI
->isImplicit())
257 if (MOI
->isUndef()) {
258 // Record `undef` register as constant. Use same value as ISel uses.
259 Locs
.emplace_back(Location::Constant
, sizeof(int64_t), 0, 0xFEFEFEFE);
263 assert(MOI
->getReg().isPhysical() &&
264 "Virtreg operands should have been rewritten before now.");
265 const TargetRegisterClass
*RC
= TRI
->getMinimalPhysRegClass(MOI
->getReg());
266 assert(!MOI
->getSubReg() && "Physical subreg still around.");
269 unsigned DwarfRegNum
= getDwarfRegNum(MOI
->getReg(), TRI
);
270 unsigned LLVMRegNum
= *TRI
->getLLVMRegNum(DwarfRegNum
, false);
271 unsigned SubRegIdx
= TRI
->getSubRegIndex(LLVMRegNum
, MOI
->getReg());
273 Offset
= TRI
->getSubRegIdxOffset(SubRegIdx
);
275 Locs
.emplace_back(Location::Register
, TRI
->getSpillSize(*RC
),
276 DwarfRegNum
, Offset
);
280 if (MOI
->isRegLiveOut())
281 LiveOuts
= parseRegisterLiveOutMask(MOI
->getRegLiveOut());
286 void StackMaps::print(raw_ostream
&OS
) {
287 const TargetRegisterInfo
*TRI
=
288 AP
.MF
? AP
.MF
->getSubtarget().getRegisterInfo() : nullptr;
289 OS
<< WSMP
<< "callsites:\n";
290 for (const auto &CSI
: CSInfos
) {
291 const LocationVec
&CSLocs
= CSI
.Locations
;
292 const LiveOutVec
&LiveOuts
= CSI
.LiveOuts
;
294 OS
<< WSMP
<< "callsite " << CSI
.ID
<< "\n";
295 OS
<< WSMP
<< " has " << CSLocs
.size() << " locations\n";
298 for (const auto &Loc
: CSLocs
) {
299 OS
<< WSMP
<< "\t\tLoc " << Idx
<< ": ";
301 case Location::Unprocessed
:
302 OS
<< "<Unprocessed operand>";
304 case Location::Register
:
307 OS
<< printReg(Loc
.Reg
, TRI
);
311 case Location::Direct
:
314 OS
<< printReg(Loc
.Reg
, TRI
);
318 OS
<< " + " << Loc
.Offset
;
320 case Location::Indirect
:
323 OS
<< printReg(Loc
.Reg
, TRI
);
326 OS
<< "+" << Loc
.Offset
;
328 case Location::Constant
:
329 OS
<< "Constant " << Loc
.Offset
;
331 case Location::ConstantIndex
:
332 OS
<< "Constant Index " << Loc
.Offset
;
335 OS
<< "\t[encoding: .byte " << Loc
.Type
<< ", .byte 0"
336 << ", .short " << Loc
.Size
<< ", .short " << Loc
.Reg
<< ", .short 0"
337 << ", .int " << Loc
.Offset
<< "]\n";
341 OS
<< WSMP
<< "\thas " << LiveOuts
.size() << " live-out registers\n";
344 for (const auto &LO
: LiveOuts
) {
345 OS
<< WSMP
<< "\t\tLO " << Idx
<< ": ";
347 OS
<< printReg(LO
.Reg
, TRI
);
350 OS
<< "\t[encoding: .short " << LO
.DwarfRegNum
<< ", .byte 0, .byte "
357 /// Create a live-out register record for the given register Reg.
358 StackMaps::LiveOutReg
359 StackMaps::createLiveOutReg(unsigned Reg
, const TargetRegisterInfo
*TRI
) const {
360 unsigned DwarfRegNum
= getDwarfRegNum(Reg
, TRI
);
361 unsigned Size
= TRI
->getSpillSize(*TRI
->getMinimalPhysRegClass(Reg
));
362 return LiveOutReg(Reg
, DwarfRegNum
, Size
);
365 /// Parse the register live-out mask and return a vector of live-out registers
366 /// that need to be recorded in the stackmap.
367 StackMaps::LiveOutVec
368 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask
) const {
369 assert(Mask
&& "No register mask specified");
370 const TargetRegisterInfo
*TRI
= AP
.MF
->getSubtarget().getRegisterInfo();
373 // Create a LiveOutReg for each bit that is set in the register mask.
374 for (unsigned Reg
= 0, NumRegs
= TRI
->getNumRegs(); Reg
!= NumRegs
; ++Reg
)
375 if ((Mask
[Reg
/ 32] >> (Reg
% 32)) & 1)
376 LiveOuts
.push_back(createLiveOutReg(Reg
, TRI
));
378 // We don't need to keep track of a register if its super-register is already
379 // in the list. Merge entries that refer to the same dwarf register and use
380 // the maximum size that needs to be spilled.
382 llvm::sort(LiveOuts
, [](const LiveOutReg
&LHS
, const LiveOutReg
&RHS
) {
383 // Only sort by the dwarf register number.
384 return LHS
.DwarfRegNum
< RHS
.DwarfRegNum
;
387 for (auto I
= LiveOuts
.begin(), E
= LiveOuts
.end(); I
!= E
; ++I
) {
388 for (auto *II
= std::next(I
); II
!= E
; ++II
) {
389 if (I
->DwarfRegNum
!= II
->DwarfRegNum
) {
390 // Skip all the now invalid entries.
394 I
->Size
= std::max(I
->Size
, II
->Size
);
395 if (I
->Reg
&& TRI
->isSuperRegister(I
->Reg
, II
->Reg
))
397 II
->Reg
= 0; // mark for deletion.
401 llvm::erase_if(LiveOuts
, [](const LiveOutReg
&LO
) { return LO
.Reg
== 0; });
406 // See statepoint MI format description in StatepointOpers' class comment
407 // in include/llvm/CodeGen/StackMaps.h
408 void StackMaps::parseStatepointOpers(const MachineInstr
&MI
,
409 MachineInstr::const_mop_iterator MOI
,
410 MachineInstr::const_mop_iterator MOE
,
411 LocationVec
&Locations
,
412 LiveOutVec
&LiveOuts
) {
413 LLVM_DEBUG(dbgs() << "record statepoint : " << MI
<< "\n");
414 StatepointOpers
SO(&MI
);
415 MOI
= parseOperand(MOI
, MOE
, Locations
, LiveOuts
); // CC
416 MOI
= parseOperand(MOI
, MOE
, Locations
, LiveOuts
); // Flags
417 MOI
= parseOperand(MOI
, MOE
, Locations
, LiveOuts
); // Num Deopts
419 // Record Deopt Args.
420 unsigned NumDeoptArgs
= Locations
.back().Offset
;
421 assert(Locations
.back().Type
== Location::Constant
);
422 assert(NumDeoptArgs
== SO
.getNumDeoptArgs());
424 while (NumDeoptArgs
--)
425 MOI
= parseOperand(MOI
, MOE
, Locations
, LiveOuts
);
427 // Record gc base/derived pairs
428 assert(MOI
->isImm() && MOI
->getImm() == StackMaps::ConstantOp
);
430 assert(MOI
->isImm());
431 unsigned NumGCPointers
= MOI
->getImm();
434 // Map logical index of GC ptr to MI operand index.
435 SmallVector
<unsigned, 8> GCPtrIndices
;
436 unsigned GCPtrIdx
= (unsigned)SO
.getFirstGCPtrIdx();
437 assert((int)GCPtrIdx
!= -1);
438 assert(MOI
- MI
.operands_begin() == GCPtrIdx
+ 0LL);
439 while (NumGCPointers
--) {
440 GCPtrIndices
.push_back(GCPtrIdx
);
441 GCPtrIdx
= StackMaps::getNextMetaArgIdx(&MI
, GCPtrIdx
);
444 SmallVector
<std::pair
<unsigned, unsigned>, 8> GCPairs
;
445 unsigned NumGCPairs
= SO
.getGCPointerMap(GCPairs
);
447 LLVM_DEBUG(dbgs() << "NumGCPairs = " << NumGCPairs
<< "\n");
449 auto MOB
= MI
.operands_begin();
450 for (auto &P
: GCPairs
) {
451 assert(P
.first
< GCPtrIndices
.size() && "base pointer index not found");
452 assert(P
.second
< GCPtrIndices
.size() &&
453 "derived pointer index not found");
454 unsigned BaseIdx
= GCPtrIndices
[P
.first
];
455 unsigned DerivedIdx
= GCPtrIndices
[P
.second
];
456 LLVM_DEBUG(dbgs() << "Base : " << BaseIdx
<< " Derived : " << DerivedIdx
458 (void)parseOperand(MOB
+ BaseIdx
, MOE
, Locations
, LiveOuts
);
459 (void)parseOperand(MOB
+ DerivedIdx
, MOE
, Locations
, LiveOuts
);
462 MOI
= MOB
+ GCPtrIdx
;
467 assert(MOI
->isImm() && MOI
->getImm() == StackMaps::ConstantOp
);
469 unsigned NumAllocas
= MOI
->getImm();
471 while (NumAllocas
--) {
472 MOI
= parseOperand(MOI
, MOE
, Locations
, LiveOuts
);
477 void StackMaps::recordStackMapOpers(const MCSymbol
&MILabel
,
478 const MachineInstr
&MI
, uint64_t ID
,
479 MachineInstr::const_mop_iterator MOI
,
480 MachineInstr::const_mop_iterator MOE
,
482 MCContext
&OutContext
= AP
.OutStreamer
->getContext();
484 LocationVec Locations
;
488 assert(PatchPointOpers(&MI
).hasDef() && "Stackmap has no return value.");
489 parseOperand(MI
.operands_begin(), std::next(MI
.operands_begin()), Locations
,
494 if (MI
.getOpcode() == TargetOpcode::STATEPOINT
)
495 parseStatepointOpers(MI
, MOI
, MOE
, Locations
, LiveOuts
);
498 MOI
= parseOperand(MOI
, MOE
, Locations
, LiveOuts
);
500 // Move large constants into the constant pool.
501 for (auto &Loc
: Locations
) {
502 // Constants are encoded as sign-extended integers.
503 // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool.
504 if (Loc
.Type
== Location::Constant
&& !isInt
<32>(Loc
.Offset
)) {
505 Loc
.Type
= Location::ConstantIndex
;
506 // ConstPool is intentionally a MapVector of 'uint64_t's (as
507 // opposed to 'int64_t's). We should never be in a situation
508 // where we have to insert either the tombstone or the empty
509 // keys into a map, and for a DenseMap<uint64_t, T> these are
510 // (uint64_t)0 and (uint64_t)-1. They can be and are
511 // represented using 32 bit integers.
512 assert((uint64_t)Loc
.Offset
!= DenseMapInfo
<uint64_t>::getEmptyKey() &&
513 (uint64_t)Loc
.Offset
!=
514 DenseMapInfo
<uint64_t>::getTombstoneKey() &&
515 "empty and tombstone keys should fit in 32 bits!");
516 auto Result
= ConstPool
.insert(std::make_pair(Loc
.Offset
, Loc
.Offset
));
517 Loc
.Offset
= Result
.first
- ConstPool
.begin();
521 // Create an expression to calculate the offset of the callsite from function
523 const MCExpr
*CSOffsetExpr
= MCBinaryExpr::createSub(
524 MCSymbolRefExpr::create(&MILabel
, OutContext
),
525 MCSymbolRefExpr::create(AP
.CurrentFnSymForSize
, OutContext
), OutContext
);
527 CSInfos
.emplace_back(CSOffsetExpr
, ID
, std::move(Locations
),
528 std::move(LiveOuts
));
530 // Record the stack size of the current function and update callsite count.
531 const MachineFrameInfo
&MFI
= AP
.MF
->getFrameInfo();
532 const TargetRegisterInfo
*RegInfo
= AP
.MF
->getSubtarget().getRegisterInfo();
533 bool HasDynamicFrameSize
=
534 MFI
.hasVarSizedObjects() || RegInfo
->hasStackRealignment(*(AP
.MF
));
535 uint64_t FrameSize
= HasDynamicFrameSize
? UINT64_MAX
: MFI
.getStackSize();
537 auto CurrentIt
= FnInfos
.find(AP
.CurrentFnSym
);
538 if (CurrentIt
!= FnInfos
.end())
539 CurrentIt
->second
.RecordCount
++;
541 FnInfos
.insert(std::make_pair(AP
.CurrentFnSym
, FunctionInfo(FrameSize
)));
544 void StackMaps::recordStackMap(const MCSymbol
&L
, const MachineInstr
&MI
) {
545 assert(MI
.getOpcode() == TargetOpcode::STACKMAP
&& "expected stackmap");
547 StackMapOpers
opers(&MI
);
548 const int64_t ID
= MI
.getOperand(PatchPointOpers::IDPos
).getImm();
549 recordStackMapOpers(L
, MI
, ID
, std::next(MI
.operands_begin(),
554 void StackMaps::recordPatchPoint(const MCSymbol
&L
, const MachineInstr
&MI
) {
555 assert(MI
.getOpcode() == TargetOpcode::PATCHPOINT
&& "expected patchpoint");
557 PatchPointOpers
opers(&MI
);
558 const int64_t ID
= opers
.getID();
559 auto MOI
= std::next(MI
.operands_begin(), opers
.getStackMapStartIdx());
560 recordStackMapOpers(L
, MI
, ID
, MOI
, MI
.operands_end(),
561 opers
.isAnyReg() && opers
.hasDef());
565 auto &Locations
= CSInfos
.back().Locations
;
566 if (opers
.isAnyReg()) {
567 unsigned NArgs
= opers
.getNumCallArgs();
568 for (unsigned i
= 0, e
= (opers
.hasDef() ? NArgs
+ 1 : NArgs
); i
!= e
; ++i
)
569 assert(Locations
[i
].Type
== Location::Register
&&
570 "anyreg arg must be in reg.");
575 void StackMaps::recordStatepoint(const MCSymbol
&L
, const MachineInstr
&MI
) {
576 assert(MI
.getOpcode() == TargetOpcode::STATEPOINT
&& "expected statepoint");
578 StatepointOpers
opers(&MI
);
579 const unsigned StartIdx
= opers
.getVarIdx();
580 recordStackMapOpers(L
, MI
, opers
.getID(), MI
.operands_begin() + StartIdx
,
581 MI
.operands_end(), false);
584 /// Emit the stackmap header.
587 /// uint8 : Stack Map Version (currently 3)
588 /// uint8 : Reserved (expected to be 0)
589 /// uint16 : Reserved (expected to be 0)
591 /// uint32 : NumFunctions
592 /// uint32 : NumConstants
593 /// uint32 : NumRecords
594 void StackMaps::emitStackmapHeader(MCStreamer
&OS
) {
596 OS
.emitIntValue(StackMapVersion
, 1); // Version.
597 OS
.emitIntValue(0, 1); // Reserved.
598 OS
.emitInt16(0); // Reserved.
601 LLVM_DEBUG(dbgs() << WSMP
<< "#functions = " << FnInfos
.size() << '\n');
602 OS
.emitInt32(FnInfos
.size());
604 LLVM_DEBUG(dbgs() << WSMP
<< "#constants = " << ConstPool
.size() << '\n');
605 OS
.emitInt32(ConstPool
.size());
607 LLVM_DEBUG(dbgs() << WSMP
<< "#callsites = " << CSInfos
.size() << '\n');
608 OS
.emitInt32(CSInfos
.size());
611 /// Emit the function frame record for each function.
613 /// StkSizeRecord[NumFunctions] {
614 /// uint64 : Function Address
615 /// uint64 : Stack Size
616 /// uint64 : Record Count
618 void StackMaps::emitFunctionFrameRecords(MCStreamer
&OS
) {
619 // Function Frame records.
620 LLVM_DEBUG(dbgs() << WSMP
<< "functions:\n");
621 for (auto const &FR
: FnInfos
) {
622 LLVM_DEBUG(dbgs() << WSMP
<< "function addr: " << FR
.first
623 << " frame size: " << FR
.second
.StackSize
624 << " callsite count: " << FR
.second
.RecordCount
<< '\n');
625 OS
.emitSymbolValue(FR
.first
, 8);
626 OS
.emitIntValue(FR
.second
.StackSize
, 8);
627 OS
.emitIntValue(FR
.second
.RecordCount
, 8);
631 /// Emit the constant pool.
633 /// int64 : Constants[NumConstants]
634 void StackMaps::emitConstantPoolEntries(MCStreamer
&OS
) {
635 // Constant pool entries.
636 LLVM_DEBUG(dbgs() << WSMP
<< "constants:\n");
637 for (const auto &ConstEntry
: ConstPool
) {
638 LLVM_DEBUG(dbgs() << WSMP
<< ConstEntry
.second
<< '\n');
639 OS
.emitIntValue(ConstEntry
.second
, 8);
643 /// Emit the callsite info for each callsite.
645 /// StkMapRecord[NumRecords] {
646 /// uint64 : PatchPoint ID
647 /// uint32 : Instruction Offset
648 /// uint16 : Reserved (record flags)
649 /// uint16 : NumLocations
650 /// Location[NumLocations] {
651 /// uint8 : Register | Direct | Indirect | Constant | ConstantIndex
652 /// uint8 : Size in Bytes
653 /// uint16 : Dwarf RegNum
657 /// uint16 : NumLiveOuts
658 /// LiveOuts[NumLiveOuts] {
659 /// uint16 : Dwarf RegNum
661 /// uint8 : Size in Bytes
663 /// uint32 : Padding (only if required to align to 8 byte)
666 /// Location Encoding, Type, Value:
667 /// 0x1, Register, Reg (value in register)
668 /// 0x2, Direct, Reg + Offset (frame index)
669 /// 0x3, Indirect, [Reg + Offset] (spilled value)
670 /// 0x4, Constant, Offset (small constant)
671 /// 0x5, ConstIndex, Constants[Offset] (large constant)
672 void StackMaps::emitCallsiteEntries(MCStreamer
&OS
) {
673 LLVM_DEBUG(print(dbgs()));
675 for (const auto &CSI
: CSInfos
) {
676 const LocationVec
&CSLocs
= CSI
.Locations
;
677 const LiveOutVec
&LiveOuts
= CSI
.LiveOuts
;
679 // Verify stack map entry. It's better to communicate a problem to the
680 // runtime than crash in case of in-process compilation. Currently, we do
681 // simple overflow checks, but we may eventually communicate other
682 // compilation errors this way.
683 if (CSLocs
.size() > UINT16_MAX
|| LiveOuts
.size() > UINT16_MAX
) {
684 OS
.emitIntValue(UINT64_MAX
, 8); // Invalid ID.
685 OS
.emitValue(CSI
.CSOffsetExpr
, 4);
686 OS
.emitInt16(0); // Reserved.
687 OS
.emitInt16(0); // 0 locations.
688 OS
.emitInt16(0); // padding.
689 OS
.emitInt16(0); // 0 live-out registers.
690 OS
.emitInt32(0); // padding.
694 OS
.emitIntValue(CSI
.ID
, 8);
695 OS
.emitValue(CSI
.CSOffsetExpr
, 4);
697 // Reserved for flags.
699 OS
.emitInt16(CSLocs
.size());
701 for (const auto &Loc
: CSLocs
) {
702 OS
.emitIntValue(Loc
.Type
, 1);
703 OS
.emitIntValue(0, 1); // Reserved
704 OS
.emitInt16(Loc
.Size
);
705 OS
.emitInt16(Loc
.Reg
);
706 OS
.emitInt16(0); // Reserved
707 OS
.emitInt32(Loc
.Offset
);
710 // Emit alignment to 8 byte.
711 OS
.emitValueToAlignment(Align(8));
713 // Num live-out registers and padding to align to 4 byte.
715 OS
.emitInt16(LiveOuts
.size());
717 for (const auto &LO
: LiveOuts
) {
718 OS
.emitInt16(LO
.DwarfRegNum
);
719 OS
.emitIntValue(0, 1);
720 OS
.emitIntValue(LO
.Size
, 1);
722 // Emit alignment to 8 byte.
723 OS
.emitValueToAlignment(Align(8));
727 /// Serialize the stackmap data.
728 void StackMaps::serializeToStackMapSection() {
730 // Bail out if there's no stack map data.
731 assert((!CSInfos
.empty() || ConstPool
.empty()) &&
732 "Expected empty constant pool too!");
733 assert((!CSInfos
.empty() || FnInfos
.empty()) &&
734 "Expected empty function record too!");
738 MCContext
&OutContext
= AP
.OutStreamer
->getContext();
739 MCStreamer
&OS
= *AP
.OutStreamer
;
741 // Create the section.
742 MCSection
*StackMapSection
=
743 OutContext
.getObjectFileInfo()->getStackMapSection();
744 OS
.switchSection(StackMapSection
);
746 // Emit a dummy symbol to force section inclusion.
747 OS
.emitLabel(OutContext
.getOrCreateSymbol(Twine("__LLVM_StackMaps")));
750 LLVM_DEBUG(dbgs() << "********** Stack Map Output **********\n");
751 emitStackmapHeader(OS
);
752 emitFunctionFrameRecords(OS
);
753 emitConstantPoolEntries(OS
);
754 emitCallsiteEntries(OS
);