[llvm-exegesis] Fix missing std::move.
[llvm-complete.git] / lib / CodeGen / GlobalISel / MachineIRBuilder.cpp
blobc7b98c6b85961c85cf7883d45385c2be658bf859
1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 /// \file
10 /// This file implements the MachineIRBuidler class.
11 //===----------------------------------------------------------------------===//
12 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
14 #include "llvm/CodeGen/MachineFunction.h"
15 #include "llvm/CodeGen/MachineInstr.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/TargetInstrInfo.h"
19 #include "llvm/CodeGen/TargetOpcodes.h"
20 #include "llvm/CodeGen/TargetSubtargetInfo.h"
21 #include "llvm/IR/DebugInfo.h"
23 using namespace llvm;
25 void MachineIRBuilderBase::setMF(MachineFunction &MF) {
26 State.MF = &MF;
27 State.MBB = nullptr;
28 State.MRI = &MF.getRegInfo();
29 State.TII = MF.getSubtarget().getInstrInfo();
30 State.DL = DebugLoc();
31 State.II = MachineBasicBlock::iterator();
32 State.InsertedInstr = nullptr;
35 void MachineIRBuilderBase::setMBB(MachineBasicBlock &MBB) {
36 State.MBB = &MBB;
37 State.II = MBB.end();
38 assert(&getMF() == MBB.getParent() &&
39 "Basic block is in a different function");
42 void MachineIRBuilderBase::setInstr(MachineInstr &MI) {
43 assert(MI.getParent() && "Instruction is not part of a basic block");
44 setMBB(*MI.getParent());
45 State.II = MI.getIterator();
48 void MachineIRBuilderBase::setInsertPt(MachineBasicBlock &MBB,
49 MachineBasicBlock::iterator II) {
50 assert(MBB.getParent() == &getMF() &&
51 "Basic block is in a different function");
52 State.MBB = &MBB;
53 State.II = II;
56 void MachineIRBuilderBase::recordInsertion(MachineInstr *InsertedInstr) const {
57 if (State.InsertedInstr)
58 State.InsertedInstr(InsertedInstr);
61 void MachineIRBuilderBase::recordInsertions(
62 std::function<void(MachineInstr *)> Inserted) {
63 State.InsertedInstr = std::move(Inserted);
66 void MachineIRBuilderBase::stopRecordingInsertions() {
67 State.InsertedInstr = nullptr;
70 //------------------------------------------------------------------------------
71 // Build instruction variants.
72 //------------------------------------------------------------------------------
74 MachineInstrBuilder MachineIRBuilderBase::buildInstr(unsigned Opcode) {
75 return insertInstr(buildInstrNoInsert(Opcode));
78 MachineInstrBuilder MachineIRBuilderBase::buildInstrNoInsert(unsigned Opcode) {
79 MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
80 return MIB;
83 MachineInstrBuilder MachineIRBuilderBase::insertInstr(MachineInstrBuilder MIB) {
84 getMBB().insert(getInsertPt(), MIB);
85 recordInsertion(MIB);
86 return MIB;
89 MachineInstrBuilder
90 MachineIRBuilderBase::buildDirectDbgValue(unsigned Reg, const MDNode *Variable,
91 const MDNode *Expr) {
92 assert(isa<DILocalVariable>(Variable) && "not a variable");
93 assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
94 assert(
95 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
96 "Expected inlined-at fields to agree");
97 return insertInstr(BuildMI(getMF(), getDL(),
98 getTII().get(TargetOpcode::DBG_VALUE),
99 /*IsIndirect*/ false, Reg, Variable, Expr));
102 MachineInstrBuilder MachineIRBuilderBase::buildIndirectDbgValue(
103 unsigned Reg, const MDNode *Variable, const MDNode *Expr) {
104 assert(isa<DILocalVariable>(Variable) && "not a variable");
105 assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
106 assert(
107 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
108 "Expected inlined-at fields to agree");
109 return insertInstr(BuildMI(getMF(), getDL(),
110 getTII().get(TargetOpcode::DBG_VALUE),
111 /*IsIndirect*/ true, Reg, Variable, Expr));
114 MachineInstrBuilder
115 MachineIRBuilderBase::buildFIDbgValue(int FI, const MDNode *Variable,
116 const MDNode *Expr) {
117 assert(isa<DILocalVariable>(Variable) && "not a variable");
118 assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
119 assert(
120 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
121 "Expected inlined-at fields to agree");
122 return buildInstr(TargetOpcode::DBG_VALUE)
123 .addFrameIndex(FI)
124 .addImm(0)
125 .addMetadata(Variable)
126 .addMetadata(Expr);
129 MachineInstrBuilder MachineIRBuilderBase::buildConstDbgValue(
130 const Constant &C, const MDNode *Variable, const MDNode *Expr) {
131 assert(isa<DILocalVariable>(Variable) && "not a variable");
132 assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
133 assert(
134 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
135 "Expected inlined-at fields to agree");
136 auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
137 if (auto *CI = dyn_cast<ConstantInt>(&C)) {
138 if (CI->getBitWidth() > 64)
139 MIB.addCImm(CI);
140 else
141 MIB.addImm(CI->getZExtValue());
142 } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
143 MIB.addFPImm(CFP);
144 } else {
145 // Insert %noreg if we didn't find a usable constant and had to drop it.
146 MIB.addReg(0U);
149 return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
152 MachineInstrBuilder MachineIRBuilderBase::buildDbgLabel(const MDNode *Label) {
153 assert(isa<DILabel>(Label) && "not a label");
154 assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
155 "Expected inlined-at fields to agree");
156 auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
158 return MIB.addMetadata(Label);
161 MachineInstrBuilder MachineIRBuilderBase::buildFrameIndex(unsigned Res,
162 int Idx) {
163 assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
164 return buildInstr(TargetOpcode::G_FRAME_INDEX)
165 .addDef(Res)
166 .addFrameIndex(Idx);
169 MachineInstrBuilder
170 MachineIRBuilderBase::buildGlobalValue(unsigned Res, const GlobalValue *GV) {
171 assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
172 assert(getMRI()->getType(Res).getAddressSpace() ==
173 GV->getType()->getAddressSpace() &&
174 "address space mismatch");
176 return buildInstr(TargetOpcode::G_GLOBAL_VALUE)
177 .addDef(Res)
178 .addGlobalAddress(GV);
181 void MachineIRBuilderBase::validateBinaryOp(unsigned Res, unsigned Op0,
182 unsigned Op1) {
183 assert((getMRI()->getType(Res).isScalar() ||
184 getMRI()->getType(Res).isVector()) &&
185 "invalid operand type");
186 assert(getMRI()->getType(Res) == getMRI()->getType(Op0) &&
187 getMRI()->getType(Res) == getMRI()->getType(Op1) && "type mismatch");
190 MachineInstrBuilder MachineIRBuilderBase::buildGEP(unsigned Res, unsigned Op0,
191 unsigned Op1) {
192 assert(getMRI()->getType(Res).isPointer() &&
193 getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
194 assert(getMRI()->getType(Op1).isScalar() && "invalid offset type");
196 return buildInstr(TargetOpcode::G_GEP)
197 .addDef(Res)
198 .addUse(Op0)
199 .addUse(Op1);
202 Optional<MachineInstrBuilder>
203 MachineIRBuilderBase::materializeGEP(unsigned &Res, unsigned Op0,
204 const LLT &ValueTy, uint64_t Value) {
205 assert(Res == 0 && "Res is a result argument");
206 assert(ValueTy.isScalar() && "invalid offset type");
208 if (Value == 0) {
209 Res = Op0;
210 return None;
213 Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
214 unsigned TmpReg = getMRI()->createGenericVirtualRegister(ValueTy);
216 buildConstant(TmpReg, Value);
217 return buildGEP(Res, Op0, TmpReg);
220 MachineInstrBuilder MachineIRBuilderBase::buildPtrMask(unsigned Res,
221 unsigned Op0,
222 uint32_t NumBits) {
223 assert(getMRI()->getType(Res).isPointer() &&
224 getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
226 return buildInstr(TargetOpcode::G_PTR_MASK)
227 .addDef(Res)
228 .addUse(Op0)
229 .addImm(NumBits);
232 MachineInstrBuilder MachineIRBuilderBase::buildBr(MachineBasicBlock &Dest) {
233 return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
236 MachineInstrBuilder MachineIRBuilderBase::buildBrIndirect(unsigned Tgt) {
237 assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
238 return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
241 MachineInstrBuilder MachineIRBuilderBase::buildCopy(unsigned Res, unsigned Op) {
242 assert(getMRI()->getType(Res) == LLT() || getMRI()->getType(Op) == LLT() ||
243 getMRI()->getType(Res) == getMRI()->getType(Op));
244 return buildInstr(TargetOpcode::COPY).addDef(Res).addUse(Op);
247 MachineInstrBuilder
248 MachineIRBuilderBase::buildConstant(unsigned Res, const ConstantInt &Val) {
249 LLT Ty = getMRI()->getType(Res);
251 assert((Ty.isScalar() || Ty.isPointer()) && "invalid operand type");
253 const ConstantInt *NewVal = &Val;
254 if (Ty.getSizeInBits() != Val.getBitWidth())
255 NewVal = ConstantInt::get(getMF().getFunction().getContext(),
256 Val.getValue().sextOrTrunc(Ty.getSizeInBits()));
258 return buildInstr(TargetOpcode::G_CONSTANT).addDef(Res).addCImm(NewVal);
261 MachineInstrBuilder MachineIRBuilderBase::buildConstant(unsigned Res,
262 int64_t Val) {
263 auto IntN = IntegerType::get(getMF().getFunction().getContext(),
264 getMRI()->getType(Res).getSizeInBits());
265 ConstantInt *CI = ConstantInt::get(IntN, Val, true);
266 return buildConstant(Res, *CI);
269 MachineInstrBuilder
270 MachineIRBuilderBase::buildFConstant(unsigned Res, const ConstantFP &Val) {
271 assert(getMRI()->getType(Res).isScalar() && "invalid operand type");
273 return buildInstr(TargetOpcode::G_FCONSTANT).addDef(Res).addFPImm(&Val);
276 MachineInstrBuilder MachineIRBuilderBase::buildFConstant(unsigned Res,
277 double Val) {
278 LLT DstTy = getMRI()->getType(Res);
279 auto &Ctx = getMF().getFunction().getContext();
280 auto *CFP =
281 ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getSizeInBits()));
282 return buildFConstant(Res, *CFP);
285 MachineInstrBuilder MachineIRBuilderBase::buildBrCond(unsigned Tst,
286 MachineBasicBlock &Dest) {
287 assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
289 return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
292 MachineInstrBuilder MachineIRBuilderBase::buildLoad(unsigned Res, unsigned Addr,
293 MachineMemOperand &MMO) {
294 return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
297 MachineInstrBuilder
298 MachineIRBuilderBase::buildLoadInstr(unsigned Opcode, unsigned Res,
299 unsigned Addr, MachineMemOperand &MMO) {
300 assert(getMRI()->getType(Res).isValid() && "invalid operand type");
301 assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
303 return buildInstr(Opcode)
304 .addDef(Res)
305 .addUse(Addr)
306 .addMemOperand(&MMO);
309 MachineInstrBuilder MachineIRBuilderBase::buildStore(unsigned Val,
310 unsigned Addr,
311 MachineMemOperand &MMO) {
312 assert(getMRI()->getType(Val).isValid() && "invalid operand type");
313 assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
315 return buildInstr(TargetOpcode::G_STORE)
316 .addUse(Val)
317 .addUse(Addr)
318 .addMemOperand(&MMO);
321 MachineInstrBuilder MachineIRBuilderBase::buildUAdde(unsigned Res,
322 unsigned CarryOut,
323 unsigned Op0, unsigned Op1,
324 unsigned CarryIn) {
325 assert(getMRI()->getType(Res).isScalar() && "invalid operand type");
326 assert(getMRI()->getType(Res) == getMRI()->getType(Op0) &&
327 getMRI()->getType(Res) == getMRI()->getType(Op1) && "type mismatch");
328 assert(getMRI()->getType(CarryOut).isScalar() && "invalid operand type");
329 assert(getMRI()->getType(CarryOut) == getMRI()->getType(CarryIn) &&
330 "type mismatch");
332 return buildInstr(TargetOpcode::G_UADDE)
333 .addDef(Res)
334 .addDef(CarryOut)
335 .addUse(Op0)
336 .addUse(Op1)
337 .addUse(CarryIn);
340 MachineInstrBuilder MachineIRBuilderBase::buildAnyExt(unsigned Res,
341 unsigned Op) {
342 validateTruncExt(Res, Op, true);
343 return buildInstr(TargetOpcode::G_ANYEXT).addDef(Res).addUse(Op);
346 MachineInstrBuilder MachineIRBuilderBase::buildSExt(unsigned Res, unsigned Op) {
347 validateTruncExt(Res, Op, true);
348 return buildInstr(TargetOpcode::G_SEXT).addDef(Res).addUse(Op);
351 MachineInstrBuilder MachineIRBuilderBase::buildZExt(unsigned Res, unsigned Op) {
352 validateTruncExt(Res, Op, true);
353 return buildInstr(TargetOpcode::G_ZEXT).addDef(Res).addUse(Op);
356 MachineInstrBuilder MachineIRBuilderBase::buildExtOrTrunc(unsigned ExtOpc,
357 unsigned Res,
358 unsigned Op) {
359 assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
360 TargetOpcode::G_SEXT == ExtOpc) &&
361 "Expecting Extending Opc");
362 assert(getMRI()->getType(Res).isScalar() ||
363 getMRI()->getType(Res).isVector());
364 assert(getMRI()->getType(Res).isScalar() == getMRI()->getType(Op).isScalar());
366 unsigned Opcode = TargetOpcode::COPY;
367 if (getMRI()->getType(Res).getSizeInBits() >
368 getMRI()->getType(Op).getSizeInBits())
369 Opcode = ExtOpc;
370 else if (getMRI()->getType(Res).getSizeInBits() <
371 getMRI()->getType(Op).getSizeInBits())
372 Opcode = TargetOpcode::G_TRUNC;
373 else
374 assert(getMRI()->getType(Res) == getMRI()->getType(Op));
376 return buildInstr(Opcode).addDef(Res).addUse(Op);
379 MachineInstrBuilder MachineIRBuilderBase::buildSExtOrTrunc(unsigned Res,
380 unsigned Op) {
381 return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
384 MachineInstrBuilder MachineIRBuilderBase::buildZExtOrTrunc(unsigned Res,
385 unsigned Op) {
386 return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
389 MachineInstrBuilder MachineIRBuilderBase::buildAnyExtOrTrunc(unsigned Res,
390 unsigned Op) {
391 return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
394 MachineInstrBuilder MachineIRBuilderBase::buildCast(unsigned Dst,
395 unsigned Src) {
396 LLT SrcTy = getMRI()->getType(Src);
397 LLT DstTy = getMRI()->getType(Dst);
398 if (SrcTy == DstTy)
399 return buildCopy(Dst, Src);
401 unsigned Opcode;
402 if (SrcTy.isPointer() && DstTy.isScalar())
403 Opcode = TargetOpcode::G_PTRTOINT;
404 else if (DstTy.isPointer() && SrcTy.isScalar())
405 Opcode = TargetOpcode::G_INTTOPTR;
406 else {
407 assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
408 Opcode = TargetOpcode::G_BITCAST;
411 return buildInstr(Opcode).addDef(Dst).addUse(Src);
414 MachineInstrBuilder
415 MachineIRBuilderBase::buildExtract(unsigned Res, unsigned Src, uint64_t Index) {
416 #ifndef NDEBUG
417 assert(getMRI()->getType(Src).isValid() && "invalid operand type");
418 assert(getMRI()->getType(Res).isValid() && "invalid operand type");
419 assert(Index + getMRI()->getType(Res).getSizeInBits() <=
420 getMRI()->getType(Src).getSizeInBits() &&
421 "extracting off end of register");
422 #endif
424 if (getMRI()->getType(Res).getSizeInBits() ==
425 getMRI()->getType(Src).getSizeInBits()) {
426 assert(Index == 0 && "insertion past the end of a register");
427 return buildCast(Res, Src);
430 return buildInstr(TargetOpcode::G_EXTRACT)
431 .addDef(Res)
432 .addUse(Src)
433 .addImm(Index);
436 void MachineIRBuilderBase::buildSequence(unsigned Res, ArrayRef<unsigned> Ops,
437 ArrayRef<uint64_t> Indices) {
438 #ifndef NDEBUG
439 assert(Ops.size() == Indices.size() && "incompatible args");
440 assert(!Ops.empty() && "invalid trivial sequence");
441 assert(std::is_sorted(Indices.begin(), Indices.end()) &&
442 "sequence offsets must be in ascending order");
444 assert(getMRI()->getType(Res).isValid() && "invalid operand type");
445 for (auto Op : Ops)
446 assert(getMRI()->getType(Op).isValid() && "invalid operand type");
447 #endif
449 LLT ResTy = getMRI()->getType(Res);
450 LLT OpTy = getMRI()->getType(Ops[0]);
451 unsigned OpSize = OpTy.getSizeInBits();
452 bool MaybeMerge = true;
453 for (unsigned i = 0; i < Ops.size(); ++i) {
454 if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
455 MaybeMerge = false;
456 break;
460 if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
461 buildMerge(Res, Ops);
462 return;
465 unsigned ResIn = getMRI()->createGenericVirtualRegister(ResTy);
466 buildUndef(ResIn);
468 for (unsigned i = 0; i < Ops.size(); ++i) {
469 unsigned ResOut = i + 1 == Ops.size()
470 ? Res
471 : getMRI()->createGenericVirtualRegister(ResTy);
472 buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
473 ResIn = ResOut;
477 MachineInstrBuilder MachineIRBuilderBase::buildUndef(unsigned Res) {
478 return buildInstr(TargetOpcode::G_IMPLICIT_DEF).addDef(Res);
481 MachineInstrBuilder MachineIRBuilderBase::buildMerge(unsigned Res,
482 ArrayRef<unsigned> Ops) {
484 #ifndef NDEBUG
485 assert(!Ops.empty() && "invalid trivial sequence");
486 LLT Ty = getMRI()->getType(Ops[0]);
487 for (auto Reg : Ops)
488 assert(getMRI()->getType(Reg) == Ty && "type mismatch in input list");
489 assert(Ops.size() * getMRI()->getType(Ops[0]).getSizeInBits() ==
490 getMRI()->getType(Res).getSizeInBits() &&
491 "input operands do not cover output register");
492 #endif
494 if (Ops.size() == 1)
495 return buildCast(Res, Ops[0]);
497 MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_MERGE_VALUES);
498 MIB.addDef(Res);
499 for (unsigned i = 0; i < Ops.size(); ++i)
500 MIB.addUse(Ops[i]);
501 return MIB;
504 MachineInstrBuilder MachineIRBuilderBase::buildUnmerge(ArrayRef<unsigned> Res,
505 unsigned Op) {
507 #ifndef NDEBUG
508 assert(!Res.empty() && "invalid trivial sequence");
509 LLT Ty = getMRI()->getType(Res[0]);
510 for (auto Reg : Res)
511 assert(getMRI()->getType(Reg) == Ty && "type mismatch in input list");
512 assert(Res.size() * getMRI()->getType(Res[0]).getSizeInBits() ==
513 getMRI()->getType(Op).getSizeInBits() &&
514 "input operands do not cover output register");
515 #endif
517 MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_UNMERGE_VALUES);
518 for (unsigned i = 0; i < Res.size(); ++i)
519 MIB.addDef(Res[i]);
520 MIB.addUse(Op);
521 return MIB;
524 MachineInstrBuilder MachineIRBuilderBase::buildInsert(unsigned Res,
525 unsigned Src, unsigned Op,
526 unsigned Index) {
527 assert(Index + getMRI()->getType(Op).getSizeInBits() <=
528 getMRI()->getType(Res).getSizeInBits() &&
529 "insertion past the end of a register");
531 if (getMRI()->getType(Res).getSizeInBits() ==
532 getMRI()->getType(Op).getSizeInBits()) {
533 return buildCast(Res, Op);
536 return buildInstr(TargetOpcode::G_INSERT)
537 .addDef(Res)
538 .addUse(Src)
539 .addUse(Op)
540 .addImm(Index);
543 MachineInstrBuilder MachineIRBuilderBase::buildIntrinsic(Intrinsic::ID ID,
544 unsigned Res,
545 bool HasSideEffects) {
546 auto MIB =
547 buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
548 : TargetOpcode::G_INTRINSIC);
549 if (Res)
550 MIB.addDef(Res);
551 MIB.addIntrinsicID(ID);
552 return MIB;
555 MachineInstrBuilder MachineIRBuilderBase::buildTrunc(unsigned Res,
556 unsigned Op) {
557 validateTruncExt(Res, Op, false);
558 return buildInstr(TargetOpcode::G_TRUNC).addDef(Res).addUse(Op);
561 MachineInstrBuilder MachineIRBuilderBase::buildFPTrunc(unsigned Res,
562 unsigned Op) {
563 validateTruncExt(Res, Op, false);
564 return buildInstr(TargetOpcode::G_FPTRUNC).addDef(Res).addUse(Op);
567 MachineInstrBuilder MachineIRBuilderBase::buildICmp(CmpInst::Predicate Pred,
568 unsigned Res, unsigned Op0,
569 unsigned Op1) {
570 #ifndef NDEBUG
571 assert(getMRI()->getType(Op0) == getMRI()->getType(Op0) && "type mismatch");
572 assert(CmpInst::isIntPredicate(Pred) && "invalid predicate");
573 if (getMRI()->getType(Op0).isScalar() || getMRI()->getType(Op0).isPointer())
574 assert(getMRI()->getType(Res).isScalar() && "type mismatch");
575 else
576 assert(getMRI()->getType(Res).isVector() &&
577 getMRI()->getType(Res).getNumElements() ==
578 getMRI()->getType(Op0).getNumElements() &&
579 "type mismatch");
580 #endif
582 return buildInstr(TargetOpcode::G_ICMP)
583 .addDef(Res)
584 .addPredicate(Pred)
585 .addUse(Op0)
586 .addUse(Op1);
589 MachineInstrBuilder MachineIRBuilderBase::buildFCmp(CmpInst::Predicate Pred,
590 unsigned Res, unsigned Op0,
591 unsigned Op1) {
592 #ifndef NDEBUG
593 assert((getMRI()->getType(Op0).isScalar() ||
594 getMRI()->getType(Op0).isVector()) &&
595 "invalid operand type");
596 assert(getMRI()->getType(Op0) == getMRI()->getType(Op1) && "type mismatch");
597 assert(CmpInst::isFPPredicate(Pred) && "invalid predicate");
598 if (getMRI()->getType(Op0).isScalar())
599 assert(getMRI()->getType(Res).isScalar() && "type mismatch");
600 else
601 assert(getMRI()->getType(Res).isVector() &&
602 getMRI()->getType(Res).getNumElements() ==
603 getMRI()->getType(Op0).getNumElements() &&
604 "type mismatch");
605 #endif
607 return buildInstr(TargetOpcode::G_FCMP)
608 .addDef(Res)
609 .addPredicate(Pred)
610 .addUse(Op0)
611 .addUse(Op1);
614 MachineInstrBuilder MachineIRBuilderBase::buildSelect(unsigned Res,
615 unsigned Tst,
616 unsigned Op0,
617 unsigned Op1) {
618 #ifndef NDEBUG
619 LLT ResTy = getMRI()->getType(Res);
620 assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
621 "invalid operand type");
622 assert(ResTy == getMRI()->getType(Op0) && ResTy == getMRI()->getType(Op1) &&
623 "type mismatch");
624 if (ResTy.isScalar() || ResTy.isPointer())
625 assert(getMRI()->getType(Tst).isScalar() && "type mismatch");
626 else
627 assert((getMRI()->getType(Tst).isScalar() ||
628 (getMRI()->getType(Tst).isVector() &&
629 getMRI()->getType(Tst).getNumElements() ==
630 getMRI()->getType(Op0).getNumElements())) &&
631 "type mismatch");
632 #endif
634 return buildInstr(TargetOpcode::G_SELECT)
635 .addDef(Res)
636 .addUse(Tst)
637 .addUse(Op0)
638 .addUse(Op1);
641 MachineInstrBuilder
642 MachineIRBuilderBase::buildInsertVectorElement(unsigned Res, unsigned Val,
643 unsigned Elt, unsigned Idx) {
644 #ifndef NDEBUG
645 LLT ResTy = getMRI()->getType(Res);
646 LLT ValTy = getMRI()->getType(Val);
647 LLT EltTy = getMRI()->getType(Elt);
648 LLT IdxTy = getMRI()->getType(Idx);
649 assert(ResTy.isVector() && ValTy.isVector() && "invalid operand type");
650 assert(IdxTy.isScalar() && "invalid operand type");
651 assert(ResTy.getNumElements() == ValTy.getNumElements() && "type mismatch");
652 assert(ResTy.getElementType() == EltTy && "type mismatch");
653 #endif
655 return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT)
656 .addDef(Res)
657 .addUse(Val)
658 .addUse(Elt)
659 .addUse(Idx);
662 MachineInstrBuilder
663 MachineIRBuilderBase::buildExtractVectorElement(unsigned Res, unsigned Val,
664 unsigned Idx) {
665 #ifndef NDEBUG
666 LLT ResTy = getMRI()->getType(Res);
667 LLT ValTy = getMRI()->getType(Val);
668 LLT IdxTy = getMRI()->getType(Idx);
669 assert(ValTy.isVector() && "invalid operand type");
670 assert((ResTy.isScalar() || ResTy.isPointer()) && "invalid operand type");
671 assert(IdxTy.isScalar() && "invalid operand type");
672 assert(ValTy.getElementType() == ResTy && "type mismatch");
673 #endif
675 return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT)
676 .addDef(Res)
677 .addUse(Val)
678 .addUse(Idx);
681 MachineInstrBuilder MachineIRBuilderBase::buildAtomicCmpXchgWithSuccess(
682 unsigned OldValRes, unsigned SuccessRes, unsigned Addr, unsigned CmpVal,
683 unsigned NewVal, MachineMemOperand &MMO) {
684 #ifndef NDEBUG
685 LLT OldValResTy = getMRI()->getType(OldValRes);
686 LLT SuccessResTy = getMRI()->getType(SuccessRes);
687 LLT AddrTy = getMRI()->getType(Addr);
688 LLT CmpValTy = getMRI()->getType(CmpVal);
689 LLT NewValTy = getMRI()->getType(NewVal);
690 assert(OldValResTy.isScalar() && "invalid operand type");
691 assert(SuccessResTy.isScalar() && "invalid operand type");
692 assert(AddrTy.isPointer() && "invalid operand type");
693 assert(CmpValTy.isValid() && "invalid operand type");
694 assert(NewValTy.isValid() && "invalid operand type");
695 assert(OldValResTy == CmpValTy && "type mismatch");
696 assert(OldValResTy == NewValTy && "type mismatch");
697 #endif
699 return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
700 .addDef(OldValRes)
701 .addDef(SuccessRes)
702 .addUse(Addr)
703 .addUse(CmpVal)
704 .addUse(NewVal)
705 .addMemOperand(&MMO);
708 MachineInstrBuilder
709 MachineIRBuilderBase::buildAtomicCmpXchg(unsigned OldValRes, unsigned Addr,
710 unsigned CmpVal, unsigned NewVal,
711 MachineMemOperand &MMO) {
712 #ifndef NDEBUG
713 LLT OldValResTy = getMRI()->getType(OldValRes);
714 LLT AddrTy = getMRI()->getType(Addr);
715 LLT CmpValTy = getMRI()->getType(CmpVal);
716 LLT NewValTy = getMRI()->getType(NewVal);
717 assert(OldValResTy.isScalar() && "invalid operand type");
718 assert(AddrTy.isPointer() && "invalid operand type");
719 assert(CmpValTy.isValid() && "invalid operand type");
720 assert(NewValTy.isValid() && "invalid operand type");
721 assert(OldValResTy == CmpValTy && "type mismatch");
722 assert(OldValResTy == NewValTy && "type mismatch");
723 #endif
725 return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
726 .addDef(OldValRes)
727 .addUse(Addr)
728 .addUse(CmpVal)
729 .addUse(NewVal)
730 .addMemOperand(&MMO);
733 MachineInstrBuilder
734 MachineIRBuilderBase::buildAtomicRMW(unsigned Opcode, unsigned OldValRes,
735 unsigned Addr, unsigned Val,
736 MachineMemOperand &MMO) {
737 #ifndef NDEBUG
738 LLT OldValResTy = getMRI()->getType(OldValRes);
739 LLT AddrTy = getMRI()->getType(Addr);
740 LLT ValTy = getMRI()->getType(Val);
741 assert(OldValResTy.isScalar() && "invalid operand type");
742 assert(AddrTy.isPointer() && "invalid operand type");
743 assert(ValTy.isValid() && "invalid operand type");
744 assert(OldValResTy == ValTy && "type mismatch");
745 #endif
747 return buildInstr(Opcode)
748 .addDef(OldValRes)
749 .addUse(Addr)
750 .addUse(Val)
751 .addMemOperand(&MMO);
754 MachineInstrBuilder
755 MachineIRBuilderBase::buildAtomicRMWXchg(unsigned OldValRes, unsigned Addr,
756 unsigned Val, MachineMemOperand &MMO) {
757 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
758 MMO);
760 MachineInstrBuilder
761 MachineIRBuilderBase::buildAtomicRMWAdd(unsigned OldValRes, unsigned Addr,
762 unsigned Val, MachineMemOperand &MMO) {
763 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
764 MMO);
766 MachineInstrBuilder
767 MachineIRBuilderBase::buildAtomicRMWSub(unsigned OldValRes, unsigned Addr,
768 unsigned Val, MachineMemOperand &MMO) {
769 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
770 MMO);
772 MachineInstrBuilder
773 MachineIRBuilderBase::buildAtomicRMWAnd(unsigned OldValRes, unsigned Addr,
774 unsigned Val, MachineMemOperand &MMO) {
775 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
776 MMO);
778 MachineInstrBuilder
779 MachineIRBuilderBase::buildAtomicRMWNand(unsigned OldValRes, unsigned Addr,
780 unsigned Val, MachineMemOperand &MMO) {
781 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
782 MMO);
784 MachineInstrBuilder
785 MachineIRBuilderBase::buildAtomicRMWOr(unsigned OldValRes, unsigned Addr,
786 unsigned Val, MachineMemOperand &MMO) {
787 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
788 MMO);
790 MachineInstrBuilder
791 MachineIRBuilderBase::buildAtomicRMWXor(unsigned OldValRes, unsigned Addr,
792 unsigned Val, MachineMemOperand &MMO) {
793 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
794 MMO);
796 MachineInstrBuilder
797 MachineIRBuilderBase::buildAtomicRMWMax(unsigned OldValRes, unsigned Addr,
798 unsigned Val, MachineMemOperand &MMO) {
799 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
800 MMO);
802 MachineInstrBuilder
803 MachineIRBuilderBase::buildAtomicRMWMin(unsigned OldValRes, unsigned Addr,
804 unsigned Val, MachineMemOperand &MMO) {
805 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
806 MMO);
808 MachineInstrBuilder
809 MachineIRBuilderBase::buildAtomicRMWUmax(unsigned OldValRes, unsigned Addr,
810 unsigned Val, MachineMemOperand &MMO) {
811 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
812 MMO);
814 MachineInstrBuilder
815 MachineIRBuilderBase::buildAtomicRMWUmin(unsigned OldValRes, unsigned Addr,
816 unsigned Val, MachineMemOperand &MMO) {
817 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
818 MMO);
821 MachineInstrBuilder
822 MachineIRBuilderBase::buildBlockAddress(unsigned Res, const BlockAddress *BA) {
823 #ifndef NDEBUG
824 assert(getMRI()->getType(Res).isPointer() && "invalid res type");
825 #endif
827 return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
830 void MachineIRBuilderBase::validateTruncExt(unsigned Dst, unsigned Src,
831 bool IsExtend) {
832 #ifndef NDEBUG
833 LLT SrcTy = getMRI()->getType(Src);
834 LLT DstTy = getMRI()->getType(Dst);
836 if (DstTy.isVector()) {
837 assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
838 assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
839 "different number of elements in a trunc/ext");
840 } else
841 assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
843 if (IsExtend)
844 assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
845 "invalid narrowing extend");
846 else
847 assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
848 "invalid widening trunc");
849 #endif