[Alignment][NFC] Support compile time constants
[llvm-core.git] / include / llvm / CodeGen / GlobalISel / CallLowering.h
blobdddb267f0c6743328d048735121262ed3ec9c98e
1 //===- llvm/CodeGen/GlobalISel/CallLowering.h - Call lowering ---*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 ///
9 /// \file
10 /// This file describes how to lower LLVM calls to machine code calls.
11 ///
12 //===----------------------------------------------------------------------===//
14 #ifndef LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
15 #define LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
17 #include "llvm/ADT/ArrayRef.h"
18 #include "llvm/ADT/SmallVector.h"
19 #include "llvm/CodeGen/CallingConvLower.h"
20 #include "llvm/CodeGen/TargetCallingConv.h"
21 #include "llvm/IR/CallSite.h"
22 #include "llvm/IR/CallingConv.h"
23 #include "llvm/Support/ErrorHandling.h"
24 #include "llvm/Support/MachineValueType.h"
25 #include <cstdint>
26 #include <functional>
28 namespace llvm {
30 class CCState;
31 class DataLayout;
32 class Function;
33 class MachineIRBuilder;
34 class MachineOperand;
35 struct MachinePointerInfo;
36 class MachineRegisterInfo;
37 class TargetLowering;
38 class Type;
39 class Value;
41 class CallLowering {
42 const TargetLowering *TLI;
44 virtual void anchor();
45 public:
46 struct ArgInfo {
47 SmallVector<Register, 4> Regs;
48 // If the argument had to be split into multiple parts according to the
49 // target calling convention, then this contains the original vregs
50 // if the argument was an incoming arg.
51 SmallVector<Register, 2> OrigRegs;
52 Type *Ty;
53 SmallVector<ISD::ArgFlagsTy, 4> Flags;
54 bool IsFixed;
56 ArgInfo(ArrayRef<Register> Regs, Type *Ty,
57 ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
58 bool IsFixed = true)
59 : Regs(Regs.begin(), Regs.end()), Ty(Ty),
60 Flags(Flags.begin(), Flags.end()), IsFixed(IsFixed) {
61 if (!Regs.empty() && Flags.empty())
62 this->Flags.push_back(ISD::ArgFlagsTy());
63 // FIXME: We should have just one way of saying "no register".
64 assert((Ty->isVoidTy() == (Regs.empty() || Regs[0] == 0)) &&
65 "only void types should have no register");
68 ArgInfo() : Ty(nullptr), IsFixed(false) {}
71 struct CallLoweringInfo {
72 /// Calling convention to be used for the call.
73 CallingConv::ID CallConv = CallingConv::C;
75 /// Destination of the call. It should be either a register, globaladdress,
76 /// or externalsymbol.
77 MachineOperand Callee = MachineOperand::CreateImm(0);
79 /// Descriptor for the return type of the function.
80 ArgInfo OrigRet;
82 /// List of descriptors of the arguments passed to the function.
83 SmallVector<ArgInfo, 8> OrigArgs;
85 /// Valid if the call has a swifterror inout parameter, and contains the
86 /// vreg that the swifterror should be copied into after the call.
87 Register SwiftErrorVReg = 0;
89 MDNode *KnownCallees = nullptr;
91 /// True if the call must be tail call optimized.
92 bool IsMustTailCall = false;
94 /// True if the call passes all target-independent checks for tail call
95 /// optimization.
96 bool IsTailCall = false;
98 /// True if the call was lowered as a tail call. This is consumed by the
99 /// legalizer. This allows the legalizer to lower libcalls as tail calls.
100 bool LoweredTailCall = false;
102 /// True if the call is to a vararg function.
103 bool IsVarArg = false;
106 /// Argument handling is mostly uniform between the four places that
107 /// make these decisions: function formal arguments, call
108 /// instruction args, call instruction returns and function
109 /// returns. However, once a decision has been made on where an
110 /// arugment should go, exactly what happens can vary slightly. This
111 /// class abstracts the differences.
112 struct ValueHandler {
113 ValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI,
114 CCAssignFn *AssignFn)
115 : MIRBuilder(MIRBuilder), MRI(MRI), AssignFn(AssignFn) {}
117 virtual ~ValueHandler() = default;
119 /// Returns true if the handler is dealing with incoming arguments,
120 /// i.e. those that move values from some physical location to vregs.
121 virtual bool isIncomingArgumentHandler() const { return false; }
123 /// Materialize a VReg containing the address of the specified
124 /// stack-based object. This is either based on a FrameIndex or
125 /// direct SP manipulation, depending on the context. \p MPO
126 /// should be initialized to an appropriate description of the
127 /// address created.
128 virtual Register getStackAddress(uint64_t Size, int64_t Offset,
129 MachinePointerInfo &MPO) = 0;
131 /// The specified value has been assigned to a physical register,
132 /// handle the appropriate COPY (either to or from) and mark any
133 /// relevant uses/defines as needed.
134 virtual void assignValueToReg(Register ValVReg, Register PhysReg,
135 CCValAssign &VA) = 0;
137 /// The specified value has been assigned to a stack
138 /// location. Load or store it there, with appropriate extension
139 /// if necessary.
140 virtual void assignValueToAddress(Register ValVReg, Register Addr,
141 uint64_t Size, MachinePointerInfo &MPO,
142 CCValAssign &VA) = 0;
144 /// Handle custom values, which may be passed into one or more of \p VAs.
145 /// \return The number of \p VAs that have been assigned after the first
146 /// one, and which should therefore be skipped from further
147 /// processing.
148 virtual unsigned assignCustomValue(const ArgInfo &Arg,
149 ArrayRef<CCValAssign> VAs) {
150 // This is not a pure virtual method because not all targets need to worry
151 // about custom values.
152 llvm_unreachable("Custom values not supported");
155 Register extendRegister(Register ValReg, CCValAssign &VA);
157 virtual bool assignArg(unsigned ValNo, MVT ValVT, MVT LocVT,
158 CCValAssign::LocInfo LocInfo, const ArgInfo &Info,
159 ISD::ArgFlagsTy Flags, CCState &State) {
160 return AssignFn(ValNo, ValVT, LocVT, LocInfo, Flags, State);
163 MachineIRBuilder &MIRBuilder;
164 MachineRegisterInfo &MRI;
165 CCAssignFn *AssignFn;
167 private:
168 virtual void anchor();
171 protected:
172 /// Getter for generic TargetLowering class.
173 const TargetLowering *getTLI() const {
174 return TLI;
177 /// Getter for target specific TargetLowering class.
178 template <class XXXTargetLowering>
179 const XXXTargetLowering *getTLI() const {
180 return static_cast<const XXXTargetLowering *>(TLI);
183 template <typename FuncInfoTy>
184 void setArgFlags(ArgInfo &Arg, unsigned OpIdx, const DataLayout &DL,
185 const FuncInfoTy &FuncInfo) const;
187 /// Generate instructions for packing \p SrcRegs into one big register
188 /// corresponding to the aggregate type \p PackedTy.
190 /// \param SrcRegs should contain one virtual register for each base type in
191 /// \p PackedTy, as returned by computeValueLLTs.
193 /// \return The packed register.
194 Register packRegs(ArrayRef<Register> SrcRegs, Type *PackedTy,
195 MachineIRBuilder &MIRBuilder) const;
197 /// Generate instructions for unpacking \p SrcReg into the \p DstRegs
198 /// corresponding to the aggregate type \p PackedTy.
200 /// \param DstRegs should contain one virtual register for each base type in
201 /// \p PackedTy, as returned by computeValueLLTs.
202 void unpackRegs(ArrayRef<Register> DstRegs, Register SrcReg, Type *PackedTy,
203 MachineIRBuilder &MIRBuilder) const;
205 /// Invoke Handler::assignArg on each of the given \p Args and then use
206 /// \p Callback to move them to the assigned locations.
208 /// \return True if everything has succeeded, false otherwise.
209 bool handleAssignments(MachineIRBuilder &MIRBuilder,
210 SmallVectorImpl<ArgInfo> &Args,
211 ValueHandler &Handler) const;
212 bool handleAssignments(CCState &CCState,
213 SmallVectorImpl<CCValAssign> &ArgLocs,
214 MachineIRBuilder &MIRBuilder,
215 SmallVectorImpl<ArgInfo> &Args,
216 ValueHandler &Handler) const;
218 /// Analyze passed or returned values from a call, supplied in \p ArgInfo,
219 /// incorporating info about the passed values into \p CCState.
221 /// Used to check if arguments are suitable for tail call lowering.
222 bool analyzeArgInfo(CCState &CCState, SmallVectorImpl<ArgInfo> &Args,
223 CCAssignFn &AssignFnFixed,
224 CCAssignFn &AssignFnVarArg) const;
226 /// \returns True if the calling convention for a callee and its caller pass
227 /// results in the same way. Typically used for tail call eligibility checks.
229 /// \p Info is the CallLoweringInfo for the call.
230 /// \p MF is the MachineFunction for the caller.
231 /// \p InArgs contains the results of the call.
232 /// \p CalleeAssignFnFixed is the CCAssignFn to be used for the callee for
233 /// fixed arguments.
234 /// \p CalleeAssignFnVarArg is similar, but for varargs.
235 /// \p CallerAssignFnFixed is the CCAssignFn to be used for the caller for
236 /// fixed arguments.
237 /// \p CallerAssignFnVarArg is similar, but for varargs.
238 bool resultsCompatible(CallLoweringInfo &Info, MachineFunction &MF,
239 SmallVectorImpl<ArgInfo> &InArgs,
240 CCAssignFn &CalleeAssignFnFixed,
241 CCAssignFn &CalleeAssignFnVarArg,
242 CCAssignFn &CallerAssignFnFixed,
243 CCAssignFn &CallerAssignFnVarArg) const;
245 public:
246 CallLowering(const TargetLowering *TLI) : TLI(TLI) {}
247 virtual ~CallLowering() = default;
249 /// \return true if the target is capable of handling swifterror values that
250 /// have been promoted to a specified register. The extended versions of
251 /// lowerReturn and lowerCall should be implemented.
252 virtual bool supportSwiftError() const {
253 return false;
256 /// This hook must be implemented to lower outgoing return values, described
257 /// by \p Val, into the specified virtual registers \p VRegs.
258 /// This hook is used by GlobalISel.
260 /// \p SwiftErrorVReg is non-zero if the function has a swifterror parameter
261 /// that needs to be implicitly returned.
263 /// \return True if the lowering succeeds, false otherwise.
264 virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
265 ArrayRef<Register> VRegs,
266 Register SwiftErrorVReg) const {
267 if (!supportSwiftError()) {
268 assert(SwiftErrorVReg == 0 && "attempt to use unsupported swifterror");
269 return lowerReturn(MIRBuilder, Val, VRegs);
271 return false;
274 /// This hook behaves as the extended lowerReturn function, but for targets
275 /// that do not support swifterror value promotion.
276 virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
277 ArrayRef<Register> VRegs) const {
278 return false;
281 /// This hook must be implemented to lower the incoming (formal)
282 /// arguments, described by \p VRegs, for GlobalISel. Each argument
283 /// must end up in the related virtual registers described by \p VRegs.
284 /// In other words, the first argument should end up in \c VRegs[0],
285 /// the second in \c VRegs[1], and so on. For each argument, there will be one
286 /// register for each non-aggregate type, as returned by \c computeValueLLTs.
287 /// \p MIRBuilder is set to the proper insertion for the argument
288 /// lowering.
290 /// \return True if the lowering succeeded, false otherwise.
291 virtual bool lowerFormalArguments(MachineIRBuilder &MIRBuilder,
292 const Function &F,
293 ArrayRef<ArrayRef<Register>> VRegs) const {
294 return false;
297 /// This hook must be implemented to lower the given call instruction,
298 /// including argument and return value marshalling.
301 /// \return true if the lowering succeeded, false otherwise.
302 virtual bool lowerCall(MachineIRBuilder &MIRBuilder,
303 CallLoweringInfo &Info) const {
304 return false;
307 /// Lower the given call instruction, including argument and return value
308 /// marshalling.
310 /// \p CI is the call/invoke instruction.
312 /// \p ResRegs are the registers where the call's return value should be
313 /// stored (or 0 if there is no return value). There will be one register for
314 /// each non-aggregate type, as returned by \c computeValueLLTs.
316 /// \p ArgRegs is a list of lists of virtual registers containing each
317 /// argument that needs to be passed (argument \c i should be placed in \c
318 /// ArgRegs[i]). For each argument, there will be one register for each
319 /// non-aggregate type, as returned by \c computeValueLLTs.
321 /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout
322 /// parameter, and contains the vreg that the swifterror should be copied into
323 /// after the call.
325 /// \p GetCalleeReg is a callback to materialize a register for the callee if
326 /// the target determines it cannot jump to the destination based purely on \p
327 /// CI. This might be because \p CI is indirect, or because of the limited
328 /// range of an immediate jump.
330 /// \return true if the lowering succeeded, false otherwise.
331 bool lowerCall(MachineIRBuilder &MIRBuilder, ImmutableCallSite CS,
332 ArrayRef<Register> ResRegs,
333 ArrayRef<ArrayRef<Register>> ArgRegs, Register SwiftErrorVReg,
334 std::function<unsigned()> GetCalleeReg) const;
337 } // end namespace llvm
339 #endif // LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H