Revert r354244 "[DAGCombiner] Eliminate dead stores to stack."
[llvm-complete.git] / lib / Target / ARC / ARCInstrInfo.cpp
blobd71db50a57f5b83e8ffe896f22183880b7dcce73
1 //===- ARCInstrInfo.cpp - ARC Instruction Information -----------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains the ARC implementation of the TargetInstrInfo class.
11 //===----------------------------------------------------------------------===//
13 #include "ARCInstrInfo.h"
14 #include "ARC.h"
15 #include "ARCMachineFunctionInfo.h"
16 #include "ARCSubtarget.h"
17 #include "MCTargetDesc/ARCInfo.h"
18 #include "llvm/CodeGen/MachineFrameInfo.h"
19 #include "llvm/CodeGen/MachineInstrBuilder.h"
20 #include "llvm/CodeGen/MachineMemOperand.h"
21 #include "llvm/Support/Debug.h"
22 #include "llvm/Support/TargetRegistry.h"
24 using namespace llvm;
26 #define GET_INSTRINFO_CTOR_DTOR
27 #include "ARCGenInstrInfo.inc"
29 #define DEBUG_TYPE "arc-inst-info"
30 // Pin the vtable to this file.
31 void ARCInstrInfo::anchor() {}
33 ARCInstrInfo::ARCInstrInfo()
34 : ARCGenInstrInfo(ARC::ADJCALLSTACKDOWN, ARC::ADJCALLSTACKUP), RI() {}
36 static bool isZeroImm(const MachineOperand &Op) {
37 return Op.isImm() && Op.getImm() == 0;
40 static bool isLoad(int Opcode) {
41 return Opcode == ARC::LD_rs9 || Opcode == ARC::LDH_rs9 ||
42 Opcode == ARC::LDB_rs9;
45 static bool isStore(int Opcode) {
46 return Opcode == ARC::ST_rs9 || Opcode == ARC::STH_rs9 ||
47 Opcode == ARC::STB_rs9;
50 /// If the specified machine instruction is a direct
51 /// load from a stack slot, return the virtual or physical register number of
52 /// the destination along with the FrameIndex of the loaded stack slot. If
53 /// not, return 0. This predicate must return 0 if the instruction has
54 /// any side effects other than loading from the stack slot.
55 unsigned ARCInstrInfo::isLoadFromStackSlot(const MachineInstr &MI,
56 int &FrameIndex) const {
57 int Opcode = MI.getOpcode();
58 if (isLoad(Opcode)) {
59 if ((MI.getOperand(1).isFI()) && // is a stack slot
60 (MI.getOperand(2).isImm()) && // the imm is zero
61 (isZeroImm(MI.getOperand(2)))) {
62 FrameIndex = MI.getOperand(1).getIndex();
63 return MI.getOperand(0).getReg();
66 return 0;
69 /// If the specified machine instruction is a direct
70 /// store to a stack slot, return the virtual or physical register number of
71 /// the source reg along with the FrameIndex of the loaded stack slot. If
72 /// not, return 0. This predicate must return 0 if the instruction has
73 /// any side effects other than storing to the stack slot.
74 unsigned ARCInstrInfo::isStoreToStackSlot(const MachineInstr &MI,
75 int &FrameIndex) const {
76 int Opcode = MI.getOpcode();
77 if (isStore(Opcode)) {
78 if ((MI.getOperand(1).isFI()) && // is a stack slot
79 (MI.getOperand(2).isImm()) && // the imm is zero
80 (isZeroImm(MI.getOperand(2)))) {
81 FrameIndex = MI.getOperand(1).getIndex();
82 return MI.getOperand(0).getReg();
85 return 0;
88 /// Return the inverse of passed condition, i.e. turning COND_E to COND_NE.
89 static ARCCC::CondCode GetOppositeBranchCondition(ARCCC::CondCode CC) {
90 switch (CC) {
91 default:
92 llvm_unreachable("Illegal condition code!");
93 case ARCCC::EQ:
94 return ARCCC::NE;
95 case ARCCC::NE:
96 return ARCCC::EQ;
97 case ARCCC::LO:
98 return ARCCC::HS;
99 case ARCCC::HS:
100 return ARCCC::LO;
101 case ARCCC::GT:
102 return ARCCC::LE;
103 case ARCCC::GE:
104 return ARCCC::LT;
105 case ARCCC::VS:
106 return ARCCC::VC;
107 case ARCCC::VC:
108 return ARCCC::VS;
109 case ARCCC::LT:
110 return ARCCC::GE;
111 case ARCCC::LE:
112 return ARCCC::GT;
113 case ARCCC::HI:
114 return ARCCC::LS;
115 case ARCCC::LS:
116 return ARCCC::HI;
117 case ARCCC::NZ:
118 return ARCCC::Z;
119 case ARCCC::Z:
120 return ARCCC::NZ;
124 static bool isUncondBranchOpcode(int Opc) { return Opc == ARC::BR; }
126 static bool isCondBranchOpcode(int Opc) {
127 return Opc == ARC::BRcc_rr_p || Opc == ARC::BRcc_ru6_p;
130 static bool isJumpOpcode(int Opc) { return Opc == ARC::J; }
132 /// Analyze the branching code at the end of MBB, returning
133 /// true if it cannot be understood (e.g. it's a switch dispatch or isn't
134 /// implemented for a target). Upon success, this returns false and returns
135 /// with the following information in various cases:
137 /// 1. If this block ends with no branches (it just falls through to its succ)
138 /// just return false, leaving TBB/FBB null.
139 /// 2. If this block ends with only an unconditional branch, it sets TBB to be
140 /// the destination block.
141 /// 3. If this block ends with a conditional branch and it falls through to a
142 /// successor block, it sets TBB to be the branch destination block and a
143 /// list of operands that evaluate the condition. These operands can be
144 /// passed to other TargetInstrInfo methods to create new branches.
145 /// 4. If this block ends with a conditional branch followed by an
146 /// unconditional branch, it returns the 'true' destination in TBB, the
147 /// 'false' destination in FBB, and a list of operands that evaluate the
148 /// condition. These operands can be passed to other TargetInstrInfo
149 /// methods to create new branches.
151 /// Note that RemoveBranch and InsertBranch must be implemented to support
152 /// cases where this method returns success.
154 /// If AllowModify is true, then this routine is allowed to modify the basic
155 /// block (e.g. delete instructions after the unconditional branch).
157 bool ARCInstrInfo::analyzeBranch(MachineBasicBlock &MBB,
158 MachineBasicBlock *&TBB,
159 MachineBasicBlock *&FBB,
160 SmallVectorImpl<MachineOperand> &Cond,
161 bool AllowModify) const {
162 TBB = FBB = nullptr;
163 MachineBasicBlock::iterator I = MBB.end();
164 if (I == MBB.begin())
165 return false;
166 --I;
168 while (isPredicated(*I) || I->isTerminator() || I->isDebugValue()) {
169 // Flag to be raised on unanalyzeable instructions. This is useful in cases
170 // where we want to clean up on the end of the basic block before we bail
171 // out.
172 bool CantAnalyze = false;
174 // Skip over DEBUG values and predicated nonterminators.
175 while (I->isDebugInstr() || !I->isTerminator()) {
176 if (I == MBB.begin())
177 return false;
178 --I;
181 if (isJumpOpcode(I->getOpcode())) {
182 // Indirect branches and jump tables can't be analyzed, but we still want
183 // to clean up any instructions at the tail of the basic block.
184 CantAnalyze = true;
185 } else if (isUncondBranchOpcode(I->getOpcode())) {
186 TBB = I->getOperand(0).getMBB();
187 } else if (isCondBranchOpcode(I->getOpcode())) {
188 // Bail out if we encounter multiple conditional branches.
189 if (!Cond.empty())
190 return true;
192 assert(!FBB && "FBB should have been null.");
193 FBB = TBB;
194 TBB = I->getOperand(0).getMBB();
195 Cond.push_back(I->getOperand(1));
196 Cond.push_back(I->getOperand(2));
197 Cond.push_back(I->getOperand(3));
198 } else if (I->isReturn()) {
199 // Returns can't be analyzed, but we should run cleanup.
200 CantAnalyze = !isPredicated(*I);
201 } else {
202 // We encountered other unrecognized terminator. Bail out immediately.
203 return true;
206 // Cleanup code - to be run for unpredicated unconditional branches and
207 // returns.
208 if (!isPredicated(*I) && (isUncondBranchOpcode(I->getOpcode()) ||
209 isJumpOpcode(I->getOpcode()) || I->isReturn())) {
210 // Forget any previous condition branch information - it no longer
211 // applies.
212 Cond.clear();
213 FBB = nullptr;
215 // If we can modify the function, delete everything below this
216 // unconditional branch.
217 if (AllowModify) {
218 MachineBasicBlock::iterator DI = std::next(I);
219 while (DI != MBB.end()) {
220 MachineInstr &InstToDelete = *DI;
221 ++DI;
222 InstToDelete.eraseFromParent();
227 if (CantAnalyze)
228 return true;
230 if (I == MBB.begin())
231 return false;
233 --I;
236 // We made it past the terminators without bailing out - we must have
237 // analyzed this branch successfully.
238 return false;
241 unsigned ARCInstrInfo::removeBranch(MachineBasicBlock &MBB,
242 int *BytesRemoved) const {
243 assert(!BytesRemoved && "Code size not handled");
244 MachineBasicBlock::iterator I = MBB.getLastNonDebugInstr();
245 if (I == MBB.end())
246 return 0;
248 if (!isUncondBranchOpcode(I->getOpcode()) &&
249 !isCondBranchOpcode(I->getOpcode()))
250 return 0;
252 // Remove the branch.
253 I->eraseFromParent();
255 I = MBB.end();
257 if (I == MBB.begin())
258 return 1;
259 --I;
260 if (!isCondBranchOpcode(I->getOpcode()))
261 return 1;
263 // Remove the branch.
264 I->eraseFromParent();
265 return 2;
268 void ARCInstrInfo::copyPhysReg(MachineBasicBlock &MBB,
269 MachineBasicBlock::iterator I,
270 const DebugLoc &dl, unsigned DestReg,
271 unsigned SrcReg, bool KillSrc) const {
272 assert(ARC::GPR32RegClass.contains(SrcReg) &&
273 "Only GPR32 src copy supported.");
274 assert(ARC::GPR32RegClass.contains(DestReg) &&
275 "Only GPR32 dest copy supported.");
276 BuildMI(MBB, I, dl, get(ARC::MOV_rr), DestReg)
277 .addReg(SrcReg, getKillRegState(KillSrc));
280 void ARCInstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
281 MachineBasicBlock::iterator I,
282 unsigned SrcReg, bool isKill,
283 int FrameIndex,
284 const TargetRegisterClass *RC,
285 const TargetRegisterInfo *TRI) const {
286 DebugLoc dl = MBB.findDebugLoc(I);
287 MachineFunction &MF = *MBB.getParent();
288 MachineFrameInfo &MFI = MF.getFrameInfo();
289 unsigned Align = MFI.getObjectAlignment(FrameIndex);
291 MachineMemOperand *MMO = MF.getMachineMemOperand(
292 MachinePointerInfo::getFixedStack(MF, FrameIndex),
293 MachineMemOperand::MOStore, MFI.getObjectSize(FrameIndex), Align);
295 assert(MMO && "Couldn't get MachineMemOperand for store to stack.");
296 assert(TRI->getSpillSize(*RC) == 4 &&
297 "Only support 4-byte stores to stack now.");
298 assert(ARC::GPR32RegClass.hasSubClassEq(RC) &&
299 "Only support GPR32 stores to stack now.");
300 LLVM_DEBUG(dbgs() << "Created store reg=" << printReg(SrcReg, TRI)
301 << " to FrameIndex=" << FrameIndex << "\n");
302 BuildMI(MBB, I, dl, get(ARC::ST_rs9))
303 .addReg(SrcReg, getKillRegState(isKill))
304 .addFrameIndex(FrameIndex)
305 .addImm(0)
306 .addMemOperand(MMO);
309 void ARCInstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
310 MachineBasicBlock::iterator I,
311 unsigned DestReg, int FrameIndex,
312 const TargetRegisterClass *RC,
313 const TargetRegisterInfo *TRI) const {
314 DebugLoc dl = MBB.findDebugLoc(I);
315 MachineFunction &MF = *MBB.getParent();
316 MachineFrameInfo &MFI = MF.getFrameInfo();
317 unsigned Align = MFI.getObjectAlignment(FrameIndex);
318 MachineMemOperand *MMO = MF.getMachineMemOperand(
319 MachinePointerInfo::getFixedStack(MF, FrameIndex),
320 MachineMemOperand::MOLoad, MFI.getObjectSize(FrameIndex), Align);
322 assert(MMO && "Couldn't get MachineMemOperand for store to stack.");
323 assert(TRI->getSpillSize(*RC) == 4 &&
324 "Only support 4-byte loads from stack now.");
325 assert(ARC::GPR32RegClass.hasSubClassEq(RC) &&
326 "Only support GPR32 stores to stack now.");
327 LLVM_DEBUG(dbgs() << "Created load reg=" << printReg(DestReg, TRI)
328 << " from FrameIndex=" << FrameIndex << "\n");
329 BuildMI(MBB, I, dl, get(ARC::LD_rs9))
330 .addReg(DestReg, RegState::Define)
331 .addFrameIndex(FrameIndex)
332 .addImm(0)
333 .addMemOperand(MMO);
336 /// Return the inverse opcode of the specified Branch instruction.
337 bool ARCInstrInfo::reverseBranchCondition(
338 SmallVectorImpl<MachineOperand> &Cond) const {
339 assert((Cond.size() == 3) && "Invalid ARC branch condition!");
340 Cond[2].setImm(GetOppositeBranchCondition((ARCCC::CondCode)Cond[2].getImm()));
341 return false;
344 MachineBasicBlock::iterator
345 ARCInstrInfo::loadImmediate(MachineBasicBlock &MBB,
346 MachineBasicBlock::iterator MI, unsigned Reg,
347 uint64_t Value) const {
348 DebugLoc dl = MBB.findDebugLoc(MI);
349 if (isInt<12>(Value)) {
350 return BuildMI(MBB, MI, dl, get(ARC::MOV_rs12), Reg)
351 .addImm(Value)
352 .getInstr();
354 llvm_unreachable("Need Arc long immediate instructions.");
357 unsigned ARCInstrInfo::insertBranch(MachineBasicBlock &MBB,
358 MachineBasicBlock *TBB,
359 MachineBasicBlock *FBB,
360 ArrayRef<MachineOperand> Cond,
361 const DebugLoc &dl, int *BytesAdded) const {
362 assert(!BytesAdded && "Code size not handled.");
364 // Shouldn't be a fall through.
365 assert(TBB && "InsertBranch must not be told to insert a fallthrough");
366 assert((Cond.size() == 3 || Cond.size() == 0) &&
367 "ARC branch conditions have two components!");
369 if (Cond.empty()) {
370 BuildMI(&MBB, dl, get(ARC::BR)).addMBB(TBB);
371 return 1;
373 int BccOpc = Cond[1].isImm() ? ARC::BRcc_ru6_p : ARC::BRcc_rr_p;
374 MachineInstrBuilder MIB = BuildMI(&MBB, dl, get(BccOpc));
375 MIB.addMBB(TBB);
376 for (unsigned i = 0; i < 3; i++) {
377 MIB.add(Cond[i]);
380 // One-way conditional branch.
381 if (!FBB) {
382 return 1;
385 // Two-way conditional branch.
386 BuildMI(&MBB, dl, get(ARC::BR)).addMBB(FBB);
387 return 2;
390 unsigned ARCInstrInfo::getInstSizeInBytes(const MachineInstr &MI) const {
391 if (MI.isInlineAsm()) {
392 const MachineFunction *MF = MI.getParent()->getParent();
393 const char *AsmStr = MI.getOperand(0).getSymbolName();
394 return getInlineAsmLength(AsmStr, *MF->getTarget().getMCAsmInfo());
396 return MI.getDesc().getSize();