[AArch64] Prevent argument promotion of vector with size > 128 bits (#70034)
[llvm-project.git] / llvm / lib / Target / AArch64 / AArch64MachineFunctionInfo.h
blob0b8bfb04a572c77a2ce0f4e6043985d70c98961a
1 //=- AArch64MachineFunctionInfo.h - AArch64 machine function info -*- C++ -*-=//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file declares AArch64-specific per-machine-function information.
11 //===----------------------------------------------------------------------===//
13 #ifndef LLVM_LIB_TARGET_AARCH64_AARCH64MACHINEFUNCTIONINFO_H
14 #define LLVM_LIB_TARGET_AARCH64_AARCH64MACHINEFUNCTIONINFO_H
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/SmallPtrSet.h"
18 #include "llvm/ADT/SmallVector.h"
19 #include "llvm/CodeGen/CallingConvLower.h"
20 #include "llvm/CodeGen/MIRYamlMapping.h"
21 #include "llvm/CodeGen/MachineFrameInfo.h"
22 #include "llvm/CodeGen/MachineFunction.h"
23 #include "llvm/IR/Function.h"
24 #include "llvm/MC/MCLinkerOptimizationHint.h"
25 #include <cassert>
26 #include <optional>
28 namespace llvm {
30 namespace yaml {
31 struct AArch64FunctionInfo;
32 } // end namespace yaml
34 class AArch64Subtarget;
35 class MachineInstr;
37 /// AArch64FunctionInfo - This class is derived from MachineFunctionInfo and
38 /// contains private AArch64-specific information for each MachineFunction.
39 class AArch64FunctionInfo final : public MachineFunctionInfo {
40 /// Number of bytes of arguments this function has on the stack. If the callee
41 /// is expected to restore the argument stack this should be a multiple of 16,
42 /// all usable during a tail call.
43 ///
44 /// The alternative would forbid tail call optimisation in some cases: if we
45 /// want to transfer control from a function with 8-bytes of stack-argument
46 /// space to a function with 16-bytes then misalignment of this value would
47 /// make a stack adjustment necessary, which could not be undone by the
48 /// callee.
49 unsigned BytesInStackArgArea = 0;
51 /// The number of bytes to restore to deallocate space for incoming
52 /// arguments. Canonically 0 in the C calling convention, but non-zero when
53 /// callee is expected to pop the args.
54 unsigned ArgumentStackToRestore = 0;
56 /// Space just below incoming stack pointer reserved for arguments being
57 /// passed on the stack during a tail call. This will be the difference
58 /// between the largest tail call argument space needed in this function and
59 /// what's already available by reusing space of incoming arguments.
60 unsigned TailCallReservedStack = 0;
62 /// HasStackFrame - True if this function has a stack frame. Set by
63 /// determineCalleeSaves().
64 bool HasStackFrame = false;
66 /// Amount of stack frame size, not including callee-saved registers.
67 uint64_t LocalStackSize = 0;
69 /// The start and end frame indices for the SVE callee saves.
70 int MinSVECSFrameIndex = 0;
71 int MaxSVECSFrameIndex = 0;
73 /// Amount of stack frame size used for saving callee-saved registers.
74 unsigned CalleeSavedStackSize = 0;
75 unsigned SVECalleeSavedStackSize = 0;
76 bool HasCalleeSavedStackSize = false;
78 /// Number of TLS accesses using the special (combinable)
79 /// _TLS_MODULE_BASE_ symbol.
80 unsigned NumLocalDynamicTLSAccesses = 0;
82 /// FrameIndex for start of varargs area for arguments passed on the
83 /// stack.
84 int VarArgsStackIndex = 0;
86 /// Offset of start of varargs area for arguments passed on the stack.
87 unsigned VarArgsStackOffset = 0;
89 /// FrameIndex for start of varargs area for arguments passed in
90 /// general purpose registers.
91 int VarArgsGPRIndex = 0;
93 /// Size of the varargs area for arguments passed in general purpose
94 /// registers.
95 unsigned VarArgsGPRSize = 0;
97 /// FrameIndex for start of varargs area for arguments passed in
98 /// floating-point registers.
99 int VarArgsFPRIndex = 0;
101 /// Size of the varargs area for arguments passed in floating-point
102 /// registers.
103 unsigned VarArgsFPRSize = 0;
105 /// True if this function has a subset of CSRs that is handled explicitly via
106 /// copies.
107 bool IsSplitCSR = false;
109 /// True when the stack gets realigned dynamically because the size of stack
110 /// frame is unknown at compile time. e.g., in case of VLAs.
111 bool StackRealigned = false;
113 /// True when the callee-save stack area has unused gaps that may be used for
114 /// other stack allocations.
115 bool CalleeSaveStackHasFreeSpace = false;
117 /// SRetReturnReg - sret lowering includes returning the value of the
118 /// returned struct in a register. This field holds the virtual register into
119 /// which the sret argument is passed.
120 Register SRetReturnReg;
122 /// SVE stack size (for predicates and data vectors) are maintained here
123 /// rather than in FrameInfo, as the placement and Stack IDs are target
124 /// specific.
125 uint64_t StackSizeSVE = 0;
127 /// HasCalculatedStackSizeSVE indicates whether StackSizeSVE is valid.
128 bool HasCalculatedStackSizeSVE = false;
130 /// Has a value when it is known whether or not the function uses a
131 /// redzone, and no value otherwise.
132 /// Initialized during frame lowering, unless the function has the noredzone
133 /// attribute, in which case it is set to false at construction.
134 std::optional<bool> HasRedZone;
136 /// ForwardedMustTailRegParms - A list of virtual and physical registers
137 /// that must be forwarded to every musttail call.
138 SmallVector<ForwardedRegister, 1> ForwardedMustTailRegParms;
140 /// FrameIndex for the tagged base pointer.
141 std::optional<int> TaggedBasePointerIndex;
143 /// Offset from SP-at-entry to the tagged base pointer.
144 /// Tagged base pointer is set up to point to the first (lowest address)
145 /// tagged stack slot.
146 unsigned TaggedBasePointerOffset;
148 /// OutliningStyle denotes, if a function was outined, how it was outlined,
149 /// e.g. Tail Call, Thunk, or Function if none apply.
150 std::optional<std::string> OutliningStyle;
152 // Offset from SP-after-callee-saved-spills (i.e. SP-at-entry minus
153 // CalleeSavedStackSize) to the address of the frame record.
154 int CalleeSaveBaseToFrameRecordOffset = 0;
156 /// SignReturnAddress is true if PAC-RET is enabled for the function with
157 /// defaults being sign non-leaf functions only, with the B key.
158 bool SignReturnAddress = false;
160 /// SignReturnAddressAll modifies the default PAC-RET mode to signing leaf
161 /// functions as well.
162 bool SignReturnAddressAll = false;
164 /// SignWithBKey modifies the default PAC-RET mode to signing with the B key.
165 bool SignWithBKey = false;
167 /// BranchTargetEnforcement enables placing BTI instructions at potential
168 /// indirect branch destinations.
169 bool BranchTargetEnforcement = false;
171 /// Whether this function has an extended frame record [Ctx, FP, LR]. If so,
172 /// bit 60 of the in-memory FP will be 1 to enable other tools to detect the
173 /// extended record.
174 bool HasSwiftAsyncContext = false;
176 /// The stack slot where the Swift asynchronous context is stored.
177 int SwiftAsyncContextFrameIdx = std::numeric_limits<int>::max();
179 bool IsMTETagged = false;
181 /// The function has Scalable Vector or Scalable Predicate register argument
182 /// or return type
183 bool IsSVECC = false;
185 /// The frame-index for the TPIDR2 object used for lazy saves.
186 Register LazySaveTPIDR2Obj = 0;
188 /// Whether this function changes streaming mode within the function.
189 bool HasStreamingModeChanges = false;
191 /// True if the function need unwind information.
192 mutable std::optional<bool> NeedsDwarfUnwindInfo;
194 /// True if the function need asynchronous unwind information.
195 mutable std::optional<bool> NeedsAsyncDwarfUnwindInfo;
197 public:
198 AArch64FunctionInfo(const Function &F, const AArch64Subtarget *STI);
200 MachineFunctionInfo *
201 clone(BumpPtrAllocator &Allocator, MachineFunction &DestMF,
202 const DenseMap<MachineBasicBlock *, MachineBasicBlock *> &Src2DstMBB)
203 const override;
205 bool isSVECC() const { return IsSVECC; };
206 void setIsSVECC(bool s) { IsSVECC = s; };
208 unsigned getLazySaveTPIDR2Obj() const { return LazySaveTPIDR2Obj; }
209 void setLazySaveTPIDR2Obj(unsigned Reg) { LazySaveTPIDR2Obj = Reg; }
211 void initializeBaseYamlFields(const yaml::AArch64FunctionInfo &YamlMFI);
213 unsigned getBytesInStackArgArea() const { return BytesInStackArgArea; }
214 void setBytesInStackArgArea(unsigned bytes) { BytesInStackArgArea = bytes; }
216 unsigned getArgumentStackToRestore() const { return ArgumentStackToRestore; }
217 void setArgumentStackToRestore(unsigned bytes) {
218 ArgumentStackToRestore = bytes;
221 unsigned getTailCallReservedStack() const { return TailCallReservedStack; }
222 void setTailCallReservedStack(unsigned bytes) {
223 TailCallReservedStack = bytes;
226 bool hasCalculatedStackSizeSVE() const { return HasCalculatedStackSizeSVE; }
228 void setStackSizeSVE(uint64_t S) {
229 HasCalculatedStackSizeSVE = true;
230 StackSizeSVE = S;
233 uint64_t getStackSizeSVE() const { return StackSizeSVE; }
235 bool hasStackFrame() const { return HasStackFrame; }
236 void setHasStackFrame(bool s) { HasStackFrame = s; }
238 bool isStackRealigned() const { return StackRealigned; }
239 void setStackRealigned(bool s) { StackRealigned = s; }
241 bool hasCalleeSaveStackFreeSpace() const {
242 return CalleeSaveStackHasFreeSpace;
244 void setCalleeSaveStackHasFreeSpace(bool s) {
245 CalleeSaveStackHasFreeSpace = s;
247 bool isSplitCSR() const { return IsSplitCSR; }
248 void setIsSplitCSR(bool s) { IsSplitCSR = s; }
250 void setLocalStackSize(uint64_t Size) { LocalStackSize = Size; }
251 uint64_t getLocalStackSize() const { return LocalStackSize; }
253 void setOutliningStyle(std::string Style) { OutliningStyle = Style; }
254 std::optional<std::string> getOutliningStyle() const {
255 return OutliningStyle;
258 void setCalleeSavedStackSize(unsigned Size) {
259 CalleeSavedStackSize = Size;
260 HasCalleeSavedStackSize = true;
263 // When CalleeSavedStackSize has not been set (for example when
264 // some MachineIR pass is run in isolation), then recalculate
265 // the CalleeSavedStackSize directly from the CalleeSavedInfo.
266 // Note: This information can only be recalculated after PEI
267 // has assigned offsets to the callee save objects.
268 unsigned getCalleeSavedStackSize(const MachineFrameInfo &MFI) const {
269 bool ValidateCalleeSavedStackSize = false;
271 #ifndef NDEBUG
272 // Make sure the calculated size derived from the CalleeSavedInfo
273 // equals the cached size that was calculated elsewhere (e.g. in
274 // determineCalleeSaves).
275 ValidateCalleeSavedStackSize = HasCalleeSavedStackSize;
276 #endif
278 if (!HasCalleeSavedStackSize || ValidateCalleeSavedStackSize) {
279 assert(MFI.isCalleeSavedInfoValid() && "CalleeSavedInfo not calculated");
280 if (MFI.getCalleeSavedInfo().empty())
281 return 0;
283 int64_t MinOffset = std::numeric_limits<int64_t>::max();
284 int64_t MaxOffset = std::numeric_limits<int64_t>::min();
285 for (const auto &Info : MFI.getCalleeSavedInfo()) {
286 int FrameIdx = Info.getFrameIdx();
287 if (MFI.getStackID(FrameIdx) != TargetStackID::Default)
288 continue;
289 int64_t Offset = MFI.getObjectOffset(FrameIdx);
290 int64_t ObjSize = MFI.getObjectSize(FrameIdx);
291 MinOffset = std::min<int64_t>(Offset, MinOffset);
292 MaxOffset = std::max<int64_t>(Offset + ObjSize, MaxOffset);
295 if (SwiftAsyncContextFrameIdx != std::numeric_limits<int>::max()) {
296 int64_t Offset = MFI.getObjectOffset(getSwiftAsyncContextFrameIdx());
297 int64_t ObjSize = MFI.getObjectSize(getSwiftAsyncContextFrameIdx());
298 MinOffset = std::min<int64_t>(Offset, MinOffset);
299 MaxOffset = std::max<int64_t>(Offset + ObjSize, MaxOffset);
302 unsigned Size = alignTo(MaxOffset - MinOffset, 16);
303 assert((!HasCalleeSavedStackSize || getCalleeSavedStackSize() == Size) &&
304 "Invalid size calculated for callee saves");
305 return Size;
308 return getCalleeSavedStackSize();
311 unsigned getCalleeSavedStackSize() const {
312 assert(HasCalleeSavedStackSize &&
313 "CalleeSavedStackSize has not been calculated");
314 return CalleeSavedStackSize;
317 // Saves the CalleeSavedStackSize for SVE vectors in 'scalable bytes'
318 void setSVECalleeSavedStackSize(unsigned Size) {
319 SVECalleeSavedStackSize = Size;
321 unsigned getSVECalleeSavedStackSize() const {
322 return SVECalleeSavedStackSize;
325 void setMinMaxSVECSFrameIndex(int Min, int Max) {
326 MinSVECSFrameIndex = Min;
327 MaxSVECSFrameIndex = Max;
330 int getMinSVECSFrameIndex() const { return MinSVECSFrameIndex; }
331 int getMaxSVECSFrameIndex() const { return MaxSVECSFrameIndex; }
333 void incNumLocalDynamicTLSAccesses() { ++NumLocalDynamicTLSAccesses; }
334 unsigned getNumLocalDynamicTLSAccesses() const {
335 return NumLocalDynamicTLSAccesses;
338 std::optional<bool> hasRedZone() const { return HasRedZone; }
339 void setHasRedZone(bool s) { HasRedZone = s; }
341 int getVarArgsStackIndex() const { return VarArgsStackIndex; }
342 void setVarArgsStackIndex(int Index) { VarArgsStackIndex = Index; }
344 unsigned getVarArgsStackOffset() const { return VarArgsStackOffset; }
345 void setVarArgsStackOffset(unsigned Offset) { VarArgsStackOffset = Offset; }
347 int getVarArgsGPRIndex() const { return VarArgsGPRIndex; }
348 void setVarArgsGPRIndex(int Index) { VarArgsGPRIndex = Index; }
350 unsigned getVarArgsGPRSize() const { return VarArgsGPRSize; }
351 void setVarArgsGPRSize(unsigned Size) { VarArgsGPRSize = Size; }
353 int getVarArgsFPRIndex() const { return VarArgsFPRIndex; }
354 void setVarArgsFPRIndex(int Index) { VarArgsFPRIndex = Index; }
356 unsigned getVarArgsFPRSize() const { return VarArgsFPRSize; }
357 void setVarArgsFPRSize(unsigned Size) { VarArgsFPRSize = Size; }
359 unsigned getSRetReturnReg() const { return SRetReturnReg; }
360 void setSRetReturnReg(unsigned Reg) { SRetReturnReg = Reg; }
362 unsigned getJumpTableEntrySize(int Idx) const {
363 return JumpTableEntryInfo[Idx].first;
365 MCSymbol *getJumpTableEntryPCRelSymbol(int Idx) const {
366 return JumpTableEntryInfo[Idx].second;
368 void setJumpTableEntryInfo(int Idx, unsigned Size, MCSymbol *PCRelSym) {
369 if ((unsigned)Idx >= JumpTableEntryInfo.size())
370 JumpTableEntryInfo.resize(Idx+1);
371 JumpTableEntryInfo[Idx] = std::make_pair(Size, PCRelSym);
374 using SetOfInstructions = SmallPtrSet<const MachineInstr *, 16>;
376 const SetOfInstructions &getLOHRelated() const { return LOHRelated; }
378 // Shortcuts for LOH related types.
379 class MILOHDirective {
380 MCLOHType Kind;
382 /// Arguments of this directive. Order matters.
383 SmallVector<const MachineInstr *, 3> Args;
385 public:
386 using LOHArgs = ArrayRef<const MachineInstr *>;
388 MILOHDirective(MCLOHType Kind, LOHArgs Args)
389 : Kind(Kind), Args(Args.begin(), Args.end()) {
390 assert(isValidMCLOHType(Kind) && "Invalid LOH directive type!");
393 MCLOHType getKind() const { return Kind; }
394 LOHArgs getArgs() const { return Args; }
397 using MILOHArgs = MILOHDirective::LOHArgs;
398 using MILOHContainer = SmallVector<MILOHDirective, 32>;
400 const MILOHContainer &getLOHContainer() const { return LOHContainerSet; }
402 /// Add a LOH directive of this @p Kind and this @p Args.
403 void addLOHDirective(MCLOHType Kind, MILOHArgs Args) {
404 LOHContainerSet.push_back(MILOHDirective(Kind, Args));
405 LOHRelated.insert(Args.begin(), Args.end());
408 SmallVectorImpl<ForwardedRegister> &getForwardedMustTailRegParms() {
409 return ForwardedMustTailRegParms;
412 std::optional<int> getTaggedBasePointerIndex() const {
413 return TaggedBasePointerIndex;
415 void setTaggedBasePointerIndex(int Index) { TaggedBasePointerIndex = Index; }
417 unsigned getTaggedBasePointerOffset() const {
418 return TaggedBasePointerOffset;
420 void setTaggedBasePointerOffset(unsigned Offset) {
421 TaggedBasePointerOffset = Offset;
424 int getCalleeSaveBaseToFrameRecordOffset() const {
425 return CalleeSaveBaseToFrameRecordOffset;
427 void setCalleeSaveBaseToFrameRecordOffset(int Offset) {
428 CalleeSaveBaseToFrameRecordOffset = Offset;
431 bool shouldSignReturnAddress(const MachineFunction &MF) const;
432 bool shouldSignReturnAddress(bool SpillsLR) const;
434 bool needsShadowCallStackPrologueEpilogue(MachineFunction &MF) const;
436 bool shouldSignWithBKey() const { return SignWithBKey; }
437 bool isMTETagged() const { return IsMTETagged; }
439 bool branchTargetEnforcement() const { return BranchTargetEnforcement; }
441 void setHasSwiftAsyncContext(bool HasContext) {
442 HasSwiftAsyncContext = HasContext;
444 bool hasSwiftAsyncContext() const { return HasSwiftAsyncContext; }
446 void setSwiftAsyncContextFrameIdx(int FI) {
447 SwiftAsyncContextFrameIdx = FI;
449 int getSwiftAsyncContextFrameIdx() const { return SwiftAsyncContextFrameIdx; }
451 bool needsDwarfUnwindInfo(const MachineFunction &MF) const;
452 bool needsAsyncDwarfUnwindInfo(const MachineFunction &MF) const;
454 bool hasStreamingModeChanges() const { return HasStreamingModeChanges; }
455 void setHasStreamingModeChanges(bool HasChanges) {
456 HasStreamingModeChanges = HasChanges;
459 private:
460 // Hold the lists of LOHs.
461 MILOHContainer LOHContainerSet;
462 SetOfInstructions LOHRelated;
464 SmallVector<std::pair<unsigned, MCSymbol *>, 2> JumpTableEntryInfo;
467 namespace yaml {
468 struct AArch64FunctionInfo final : public yaml::MachineFunctionInfo {
469 std::optional<bool> HasRedZone;
471 AArch64FunctionInfo() = default;
472 AArch64FunctionInfo(const llvm::AArch64FunctionInfo &MFI);
474 void mappingImpl(yaml::IO &YamlIO) override;
475 ~AArch64FunctionInfo() = default;
478 template <> struct MappingTraits<AArch64FunctionInfo> {
479 static void mapping(IO &YamlIO, AArch64FunctionInfo &MFI) {
480 YamlIO.mapOptional("hasRedZone", MFI.HasRedZone);
484 } // end namespace yaml
486 } // end namespace llvm
488 #endif // LLVM_LIB_TARGET_AARCH64_AARCH64MACHINEFUNCTIONINFO_H