[Alignment][NFC] Use Align with TargetLowering::setMinFunctionAlignment
[llvm-core.git] / include / llvm / CodeGen / GlobalISel / CallLowering.h
blobcfdf3f5bf901d1768379ac7fda01bc0dad19ae93
1 //===- llvm/CodeGen/GlobalISel/CallLowering.h - Call lowering ---*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 ///
9 /// \file
10 /// This file describes how to lower LLVM calls to machine code calls.
11 ///
12 //===----------------------------------------------------------------------===//
14 #ifndef LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
15 #define LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
17 #include "llvm/ADT/ArrayRef.h"
18 #include "llvm/ADT/SmallVector.h"
19 #include "llvm/CodeGen/CallingConvLower.h"
20 #include "llvm/CodeGen/TargetCallingConv.h"
21 #include "llvm/IR/CallSite.h"
22 #include "llvm/IR/CallingConv.h"
23 #include "llvm/Support/ErrorHandling.h"
24 #include "llvm/Support/MachineValueType.h"
25 #include <cstdint>
26 #include <functional>
28 namespace llvm {
30 class CCState;
31 class DataLayout;
32 class Function;
33 class MachineIRBuilder;
34 class MachineOperand;
35 struct MachinePointerInfo;
36 class MachineRegisterInfo;
37 class TargetLowering;
38 class Type;
39 class Value;
41 class CallLowering {
42 const TargetLowering *TLI;
44 virtual void anchor();
45 public:
46 struct ArgInfo {
47 SmallVector<Register, 4> Regs;
48 // If the argument had to be split into multiple parts according to the
49 // target calling convention, then this contains the original vregs
50 // if the argument was an incoming arg.
51 SmallVector<Register, 2> OrigRegs;
52 Type *Ty;
53 SmallVector<ISD::ArgFlagsTy, 4> Flags;
54 bool IsFixed;
56 ArgInfo(ArrayRef<Register> Regs, Type *Ty,
57 ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
58 bool IsFixed = true)
59 : Regs(Regs.begin(), Regs.end()), Ty(Ty),
60 Flags(Flags.begin(), Flags.end()), IsFixed(IsFixed) {
61 if (!Regs.empty() && Flags.empty())
62 this->Flags.push_back(ISD::ArgFlagsTy());
63 // FIXME: We should have just one way of saying "no register".
64 assert((Ty->isVoidTy() == (Regs.empty() || Regs[0] == 0)) &&
65 "only void types should have no register");
68 ArgInfo() : Ty(nullptr), IsFixed(false) {}
71 struct CallLoweringInfo {
72 /// Calling convention to be used for the call.
73 CallingConv::ID CallConv = CallingConv::C;
75 /// Destination of the call. It should be either a register, globaladdress,
76 /// or externalsymbol.
77 MachineOperand Callee = MachineOperand::CreateImm(0);
79 /// Descriptor for the return type of the function.
80 ArgInfo OrigRet;
82 /// List of descriptors of the arguments passed to the function.
83 SmallVector<ArgInfo, 8> OrigArgs;
85 /// Valid if the call has a swifterror inout parameter, and contains the
86 /// vreg that the swifterror should be copied into after the call.
87 Register SwiftErrorVReg = 0;
89 MDNode *KnownCallees = nullptr;
91 /// True if the call must be tail call optimized.
92 bool IsMustTailCall = false;
94 /// True if the call passes all target-independent checks for tail call
95 /// optimization.
96 bool IsTailCall = false;
98 /// True if the call is to a vararg function.
99 bool IsVarArg = false;
102 /// Argument handling is mostly uniform between the four places that
103 /// make these decisions: function formal arguments, call
104 /// instruction args, call instruction returns and function
105 /// returns. However, once a decision has been made on where an
106 /// arugment should go, exactly what happens can vary slightly. This
107 /// class abstracts the differences.
108 struct ValueHandler {
109 ValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI,
110 CCAssignFn *AssignFn)
111 : MIRBuilder(MIRBuilder), MRI(MRI), AssignFn(AssignFn) {}
113 virtual ~ValueHandler() = default;
115 /// Returns true if the handler is dealing with incoming arguments,
116 /// i.e. those that move values from some physical location to vregs.
117 virtual bool isIncomingArgumentHandler() const { return false; }
119 /// Materialize a VReg containing the address of the specified
120 /// stack-based object. This is either based on a FrameIndex or
121 /// direct SP manipulation, depending on the context. \p MPO
122 /// should be initialized to an appropriate description of the
123 /// address created.
124 virtual Register getStackAddress(uint64_t Size, int64_t Offset,
125 MachinePointerInfo &MPO) = 0;
127 /// The specified value has been assigned to a physical register,
128 /// handle the appropriate COPY (either to or from) and mark any
129 /// relevant uses/defines as needed.
130 virtual void assignValueToReg(Register ValVReg, Register PhysReg,
131 CCValAssign &VA) = 0;
133 /// The specified value has been assigned to a stack
134 /// location. Load or store it there, with appropriate extension
135 /// if necessary.
136 virtual void assignValueToAddress(Register ValVReg, Register Addr,
137 uint64_t Size, MachinePointerInfo &MPO,
138 CCValAssign &VA) = 0;
140 /// Handle custom values, which may be passed into one or more of \p VAs.
141 /// \return The number of \p VAs that have been assigned after the first
142 /// one, and which should therefore be skipped from further
143 /// processing.
144 virtual unsigned assignCustomValue(const ArgInfo &Arg,
145 ArrayRef<CCValAssign> VAs) {
146 // This is not a pure virtual method because not all targets need to worry
147 // about custom values.
148 llvm_unreachable("Custom values not supported");
151 Register extendRegister(Register ValReg, CCValAssign &VA);
153 virtual bool assignArg(unsigned ValNo, MVT ValVT, MVT LocVT,
154 CCValAssign::LocInfo LocInfo, const ArgInfo &Info,
155 ISD::ArgFlagsTy Flags, CCState &State) {
156 return AssignFn(ValNo, ValVT, LocVT, LocInfo, Flags, State);
159 MachineIRBuilder &MIRBuilder;
160 MachineRegisterInfo &MRI;
161 CCAssignFn *AssignFn;
163 private:
164 virtual void anchor();
167 protected:
168 /// Getter for generic TargetLowering class.
169 const TargetLowering *getTLI() const {
170 return TLI;
173 /// Getter for target specific TargetLowering class.
174 template <class XXXTargetLowering>
175 const XXXTargetLowering *getTLI() const {
176 return static_cast<const XXXTargetLowering *>(TLI);
179 template <typename FuncInfoTy>
180 void setArgFlags(ArgInfo &Arg, unsigned OpIdx, const DataLayout &DL,
181 const FuncInfoTy &FuncInfo) const;
183 /// Generate instructions for packing \p SrcRegs into one big register
184 /// corresponding to the aggregate type \p PackedTy.
186 /// \param SrcRegs should contain one virtual register for each base type in
187 /// \p PackedTy, as returned by computeValueLLTs.
189 /// \return The packed register.
190 Register packRegs(ArrayRef<Register> SrcRegs, Type *PackedTy,
191 MachineIRBuilder &MIRBuilder) const;
193 /// Generate instructions for unpacking \p SrcReg into the \p DstRegs
194 /// corresponding to the aggregate type \p PackedTy.
196 /// \param DstRegs should contain one virtual register for each base type in
197 /// \p PackedTy, as returned by computeValueLLTs.
198 void unpackRegs(ArrayRef<Register> DstRegs, Register SrcReg, Type *PackedTy,
199 MachineIRBuilder &MIRBuilder) const;
201 /// Invoke Handler::assignArg on each of the given \p Args and then use
202 /// \p Callback to move them to the assigned locations.
204 /// \return True if everything has succeeded, false otherwise.
205 bool handleAssignments(MachineIRBuilder &MIRBuilder,
206 SmallVectorImpl<ArgInfo> &Args,
207 ValueHandler &Handler) const;
208 bool handleAssignments(CCState &CCState,
209 SmallVectorImpl<CCValAssign> &ArgLocs,
210 MachineIRBuilder &MIRBuilder,
211 SmallVectorImpl<ArgInfo> &Args,
212 ValueHandler &Handler) const;
214 public:
215 CallLowering(const TargetLowering *TLI) : TLI(TLI) {}
216 virtual ~CallLowering() = default;
218 /// \return true if the target is capable of handling swifterror values that
219 /// have been promoted to a specified register. The extended versions of
220 /// lowerReturn and lowerCall should be implemented.
221 virtual bool supportSwiftError() const {
222 return false;
225 /// This hook must be implemented to lower outgoing return values, described
226 /// by \p Val, into the specified virtual registers \p VRegs.
227 /// This hook is used by GlobalISel.
229 /// \p SwiftErrorVReg is non-zero if the function has a swifterror parameter
230 /// that needs to be implicitly returned.
232 /// \return True if the lowering succeeds, false otherwise.
233 virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
234 ArrayRef<Register> VRegs,
235 Register SwiftErrorVReg) const {
236 if (!supportSwiftError()) {
237 assert(SwiftErrorVReg == 0 && "attempt to use unsupported swifterror");
238 return lowerReturn(MIRBuilder, Val, VRegs);
240 return false;
243 /// This hook behaves as the extended lowerReturn function, but for targets
244 /// that do not support swifterror value promotion.
245 virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
246 ArrayRef<Register> VRegs) const {
247 return false;
250 /// This hook must be implemented to lower the incoming (formal)
251 /// arguments, described by \p VRegs, for GlobalISel. Each argument
252 /// must end up in the related virtual registers described by \p VRegs.
253 /// In other words, the first argument should end up in \c VRegs[0],
254 /// the second in \c VRegs[1], and so on. For each argument, there will be one
255 /// register for each non-aggregate type, as returned by \c computeValueLLTs.
256 /// \p MIRBuilder is set to the proper insertion for the argument
257 /// lowering.
259 /// \return True if the lowering succeeded, false otherwise.
260 virtual bool lowerFormalArguments(MachineIRBuilder &MIRBuilder,
261 const Function &F,
262 ArrayRef<ArrayRef<Register>> VRegs) const {
263 return false;
266 /// This hook must be implemented to lower the given call instruction,
267 /// including argument and return value marshalling.
270 /// \return true if the lowering succeeded, false otherwise.
271 virtual bool lowerCall(MachineIRBuilder &MIRBuilder,
272 CallLoweringInfo &Info) const {
273 return false;
276 /// Lower the given call instruction, including argument and return value
277 /// marshalling.
279 /// \p CI is the call/invoke instruction.
281 /// \p ResRegs are the registers where the call's return value should be
282 /// stored (or 0 if there is no return value). There will be one register for
283 /// each non-aggregate type, as returned by \c computeValueLLTs.
285 /// \p ArgRegs is a list of lists of virtual registers containing each
286 /// argument that needs to be passed (argument \c i should be placed in \c
287 /// ArgRegs[i]). For each argument, there will be one register for each
288 /// non-aggregate type, as returned by \c computeValueLLTs.
290 /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout
291 /// parameter, and contains the vreg that the swifterror should be copied into
292 /// after the call.
294 /// \p GetCalleeReg is a callback to materialize a register for the callee if
295 /// the target determines it cannot jump to the destination based purely on \p
296 /// CI. This might be because \p CI is indirect, or because of the limited
297 /// range of an immediate jump.
299 /// \return true if the lowering succeeded, false otherwise.
300 bool lowerCall(MachineIRBuilder &MIRBuilder, ImmutableCallSite CS,
301 ArrayRef<Register> ResRegs,
302 ArrayRef<ArrayRef<Register>> ArgRegs, Register SwiftErrorVReg,
303 std::function<unsigned()> GetCalleeReg) const;
306 } // end namespace llvm
308 #endif // LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H