1 //===- ARCInstrInfo.cpp - ARC Instruction Information -----------*- C++ -*-===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file contains the ARC implementation of the TargetInstrInfo class.
11 //===----------------------------------------------------------------------===//
13 #include "ARCInstrInfo.h"
15 #include "ARCMachineFunctionInfo.h"
16 #include "ARCSubtarget.h"
17 #include "MCTargetDesc/ARCInfo.h"
18 #include "llvm/CodeGen/MachineFrameInfo.h"
19 #include "llvm/CodeGen/MachineInstrBuilder.h"
20 #include "llvm/CodeGen/MachineMemOperand.h"
21 #include "llvm/Support/Debug.h"
22 #include "llvm/Support/TargetRegistry.h"
26 #define GET_INSTRINFO_CTOR_DTOR
27 #include "ARCGenInstrInfo.inc"
29 #define DEBUG_TYPE "arc-inst-info"
38 enum TSFlagsConstants
{
43 // Pin the vtable to this file.
44 void ARCInstrInfo::anchor() {}
46 ARCInstrInfo::ARCInstrInfo()
47 : ARCGenInstrInfo(ARC::ADJCALLSTACKDOWN
, ARC::ADJCALLSTACKUP
), RI() {}
49 static bool isZeroImm(const MachineOperand
&Op
) {
50 return Op
.isImm() && Op
.getImm() == 0;
53 static bool isLoad(int Opcode
) {
54 return Opcode
== ARC::LD_rs9
|| Opcode
== ARC::LDH_rs9
||
55 Opcode
== ARC::LDB_rs9
;
58 static bool isStore(int Opcode
) {
59 return Opcode
== ARC::ST_rs9
|| Opcode
== ARC::STH_rs9
||
60 Opcode
== ARC::STB_rs9
;
63 /// If the specified machine instruction is a direct
64 /// load from a stack slot, return the virtual or physical register number of
65 /// the destination along with the FrameIndex of the loaded stack slot. If
66 /// not, return 0. This predicate must return 0 if the instruction has
67 /// any side effects other than loading from the stack slot.
68 unsigned ARCInstrInfo::isLoadFromStackSlot(const MachineInstr
&MI
,
69 int &FrameIndex
) const {
70 int Opcode
= MI
.getOpcode();
72 if ((MI
.getOperand(1).isFI()) && // is a stack slot
73 (MI
.getOperand(2).isImm()) && // the imm is zero
74 (isZeroImm(MI
.getOperand(2)))) {
75 FrameIndex
= MI
.getOperand(1).getIndex();
76 return MI
.getOperand(0).getReg();
82 /// If the specified machine instruction is a direct
83 /// store to a stack slot, return the virtual or physical register number of
84 /// the source reg along with the FrameIndex of the loaded stack slot. If
85 /// not, return 0. This predicate must return 0 if the instruction has
86 /// any side effects other than storing to the stack slot.
87 unsigned ARCInstrInfo::isStoreToStackSlot(const MachineInstr
&MI
,
88 int &FrameIndex
) const {
89 int Opcode
= MI
.getOpcode();
90 if (isStore(Opcode
)) {
91 if ((MI
.getOperand(1).isFI()) && // is a stack slot
92 (MI
.getOperand(2).isImm()) && // the imm is zero
93 (isZeroImm(MI
.getOperand(2)))) {
94 FrameIndex
= MI
.getOperand(1).getIndex();
95 return MI
.getOperand(0).getReg();
101 /// Return the inverse of passed condition, i.e. turning COND_E to COND_NE.
102 static ARCCC::CondCode
GetOppositeBranchCondition(ARCCC::CondCode CC
) {
105 llvm_unreachable("Illegal condition code!");
137 static bool isUncondBranchOpcode(int Opc
) { return Opc
== ARC::BR
; }
139 static bool isCondBranchOpcode(int Opc
) {
140 return Opc
== ARC::BRcc_rr_p
|| Opc
== ARC::BRcc_ru6_p
;
143 static bool isJumpOpcode(int Opc
) { return Opc
== ARC::J
; }
145 /// Analyze the branching code at the end of MBB, returning
146 /// true if it cannot be understood (e.g. it's a switch dispatch or isn't
147 /// implemented for a target). Upon success, this returns false and returns
148 /// with the following information in various cases:
150 /// 1. If this block ends with no branches (it just falls through to its succ)
151 /// just return false, leaving TBB/FBB null.
152 /// 2. If this block ends with only an unconditional branch, it sets TBB to be
153 /// the destination block.
154 /// 3. If this block ends with a conditional branch and it falls through to a
155 /// successor block, it sets TBB to be the branch destination block and a
156 /// list of operands that evaluate the condition. These operands can be
157 /// passed to other TargetInstrInfo methods to create new branches.
158 /// 4. If this block ends with a conditional branch followed by an
159 /// unconditional branch, it returns the 'true' destination in TBB, the
160 /// 'false' destination in FBB, and a list of operands that evaluate the
161 /// condition. These operands can be passed to other TargetInstrInfo
162 /// methods to create new branches.
164 /// Note that RemoveBranch and InsertBranch must be implemented to support
165 /// cases where this method returns success.
167 /// If AllowModify is true, then this routine is allowed to modify the basic
168 /// block (e.g. delete instructions after the unconditional branch).
170 bool ARCInstrInfo::analyzeBranch(MachineBasicBlock
&MBB
,
171 MachineBasicBlock
*&TBB
,
172 MachineBasicBlock
*&FBB
,
173 SmallVectorImpl
<MachineOperand
> &Cond
,
174 bool AllowModify
) const {
176 MachineBasicBlock::iterator I
= MBB
.end();
177 if (I
== MBB
.begin())
181 while (isPredicated(*I
) || I
->isTerminator() || I
->isDebugValue()) {
182 // Flag to be raised on unanalyzeable instructions. This is useful in cases
183 // where we want to clean up on the end of the basic block before we bail
185 bool CantAnalyze
= false;
187 // Skip over DEBUG values and predicated nonterminators.
188 while (I
->isDebugInstr() || !I
->isTerminator()) {
189 if (I
== MBB
.begin())
194 if (isJumpOpcode(I
->getOpcode())) {
195 // Indirect branches and jump tables can't be analyzed, but we still want
196 // to clean up any instructions at the tail of the basic block.
198 } else if (isUncondBranchOpcode(I
->getOpcode())) {
199 TBB
= I
->getOperand(0).getMBB();
200 } else if (isCondBranchOpcode(I
->getOpcode())) {
201 // Bail out if we encounter multiple conditional branches.
205 assert(!FBB
&& "FBB should have been null.");
207 TBB
= I
->getOperand(0).getMBB();
208 Cond
.push_back(I
->getOperand(1));
209 Cond
.push_back(I
->getOperand(2));
210 Cond
.push_back(I
->getOperand(3));
211 } else if (I
->isReturn()) {
212 // Returns can't be analyzed, but we should run cleanup.
213 CantAnalyze
= !isPredicated(*I
);
215 // We encountered other unrecognized terminator. Bail out immediately.
219 // Cleanup code - to be run for unpredicated unconditional branches and
221 if (!isPredicated(*I
) && (isUncondBranchOpcode(I
->getOpcode()) ||
222 isJumpOpcode(I
->getOpcode()) || I
->isReturn())) {
223 // Forget any previous condition branch information - it no longer
228 // If we can modify the function, delete everything below this
229 // unconditional branch.
231 MachineBasicBlock::iterator DI
= std::next(I
);
232 while (DI
!= MBB
.end()) {
233 MachineInstr
&InstToDelete
= *DI
;
235 InstToDelete
.eraseFromParent();
243 if (I
== MBB
.begin())
249 // We made it past the terminators without bailing out - we must have
250 // analyzed this branch successfully.
254 unsigned ARCInstrInfo::removeBranch(MachineBasicBlock
&MBB
,
255 int *BytesRemoved
) const {
256 assert(!BytesRemoved
&& "Code size not handled");
257 MachineBasicBlock::iterator I
= MBB
.getLastNonDebugInstr();
261 if (!isUncondBranchOpcode(I
->getOpcode()) &&
262 !isCondBranchOpcode(I
->getOpcode()))
265 // Remove the branch.
266 I
->eraseFromParent();
270 if (I
== MBB
.begin())
273 if (!isCondBranchOpcode(I
->getOpcode()))
276 // Remove the branch.
277 I
->eraseFromParent();
281 void ARCInstrInfo::copyPhysReg(MachineBasicBlock
&MBB
,
282 MachineBasicBlock::iterator I
,
283 const DebugLoc
&dl
, unsigned DestReg
,
284 unsigned SrcReg
, bool KillSrc
) const {
285 assert(ARC::GPR32RegClass
.contains(SrcReg
) &&
286 "Only GPR32 src copy supported.");
287 assert(ARC::GPR32RegClass
.contains(DestReg
) &&
288 "Only GPR32 dest copy supported.");
289 BuildMI(MBB
, I
, dl
, get(ARC::MOV_rr
), DestReg
)
290 .addReg(SrcReg
, getKillRegState(KillSrc
));
293 void ARCInstrInfo::storeRegToStackSlot(MachineBasicBlock
&MBB
,
294 MachineBasicBlock::iterator I
,
295 unsigned SrcReg
, bool isKill
,
297 const TargetRegisterClass
*RC
,
298 const TargetRegisterInfo
*TRI
) const {
299 DebugLoc dl
= MBB
.findDebugLoc(I
);
300 MachineFunction
&MF
= *MBB
.getParent();
301 MachineFrameInfo
&MFI
= MF
.getFrameInfo();
302 unsigned Align
= MFI
.getObjectAlignment(FrameIndex
);
304 MachineMemOperand
*MMO
= MF
.getMachineMemOperand(
305 MachinePointerInfo::getFixedStack(MF
, FrameIndex
),
306 MachineMemOperand::MOStore
, MFI
.getObjectSize(FrameIndex
), Align
);
308 assert(MMO
&& "Couldn't get MachineMemOperand for store to stack.");
309 assert(TRI
->getSpillSize(*RC
) == 4 &&
310 "Only support 4-byte stores to stack now.");
311 assert(ARC::GPR32RegClass
.hasSubClassEq(RC
) &&
312 "Only support GPR32 stores to stack now.");
313 LLVM_DEBUG(dbgs() << "Created store reg=" << printReg(SrcReg
, TRI
)
314 << " to FrameIndex=" << FrameIndex
<< "\n");
315 BuildMI(MBB
, I
, dl
, get(ARC::ST_rs9
))
316 .addReg(SrcReg
, getKillRegState(isKill
))
317 .addFrameIndex(FrameIndex
)
322 void ARCInstrInfo::loadRegFromStackSlot(MachineBasicBlock
&MBB
,
323 MachineBasicBlock::iterator I
,
324 unsigned DestReg
, int FrameIndex
,
325 const TargetRegisterClass
*RC
,
326 const TargetRegisterInfo
*TRI
) const {
327 DebugLoc dl
= MBB
.findDebugLoc(I
);
328 MachineFunction
&MF
= *MBB
.getParent();
329 MachineFrameInfo
&MFI
= MF
.getFrameInfo();
330 unsigned Align
= MFI
.getObjectAlignment(FrameIndex
);
331 MachineMemOperand
*MMO
= MF
.getMachineMemOperand(
332 MachinePointerInfo::getFixedStack(MF
, FrameIndex
),
333 MachineMemOperand::MOLoad
, MFI
.getObjectSize(FrameIndex
), Align
);
335 assert(MMO
&& "Couldn't get MachineMemOperand for store to stack.");
336 assert(TRI
->getSpillSize(*RC
) == 4 &&
337 "Only support 4-byte loads from stack now.");
338 assert(ARC::GPR32RegClass
.hasSubClassEq(RC
) &&
339 "Only support GPR32 stores to stack now.");
340 LLVM_DEBUG(dbgs() << "Created load reg=" << printReg(DestReg
, TRI
)
341 << " from FrameIndex=" << FrameIndex
<< "\n");
342 BuildMI(MBB
, I
, dl
, get(ARC::LD_rs9
))
343 .addReg(DestReg
, RegState::Define
)
344 .addFrameIndex(FrameIndex
)
349 /// Return the inverse opcode of the specified Branch instruction.
350 bool ARCInstrInfo::reverseBranchCondition(
351 SmallVectorImpl
<MachineOperand
> &Cond
) const {
352 assert((Cond
.size() == 3) && "Invalid ARC branch condition!");
353 Cond
[2].setImm(GetOppositeBranchCondition((ARCCC::CondCode
)Cond
[2].getImm()));
357 MachineBasicBlock::iterator
358 ARCInstrInfo::loadImmediate(MachineBasicBlock
&MBB
,
359 MachineBasicBlock::iterator MI
, unsigned Reg
,
360 uint64_t Value
) const {
361 DebugLoc dl
= MBB
.findDebugLoc(MI
);
362 if (isInt
<12>(Value
)) {
363 return BuildMI(MBB
, MI
, dl
, get(ARC::MOV_rs12
), Reg
)
367 llvm_unreachable("Need Arc long immediate instructions.");
370 unsigned ARCInstrInfo::insertBranch(MachineBasicBlock
&MBB
,
371 MachineBasicBlock
*TBB
,
372 MachineBasicBlock
*FBB
,
373 ArrayRef
<MachineOperand
> Cond
,
374 const DebugLoc
&dl
, int *BytesAdded
) const {
375 assert(!BytesAdded
&& "Code size not handled.");
377 // Shouldn't be a fall through.
378 assert(TBB
&& "InsertBranch must not be told to insert a fallthrough");
379 assert((Cond
.size() == 3 || Cond
.size() == 0) &&
380 "ARC branch conditions have two components!");
383 BuildMI(&MBB
, dl
, get(ARC::BR
)).addMBB(TBB
);
386 int BccOpc
= Cond
[1].isImm() ? ARC::BRcc_ru6_p
: ARC::BRcc_rr_p
;
387 MachineInstrBuilder MIB
= BuildMI(&MBB
, dl
, get(BccOpc
));
389 for (unsigned i
= 0; i
< 3; i
++) {
393 // One-way conditional branch.
398 // Two-way conditional branch.
399 BuildMI(&MBB
, dl
, get(ARC::BR
)).addMBB(FBB
);
403 unsigned ARCInstrInfo::getInstSizeInBytes(const MachineInstr
&MI
) const {
404 if (MI
.isInlineAsm()) {
405 const MachineFunction
*MF
= MI
.getParent()->getParent();
406 const char *AsmStr
= MI
.getOperand(0).getSymbolName();
407 return getInlineAsmLength(AsmStr
, *MF
->getTarget().getMCAsmInfo());
409 return MI
.getDesc().getSize();
412 bool ARCInstrInfo::isPostIncrement(const MachineInstr
&MI
) const {
413 const MCInstrDesc
&MID
= MI
.getDesc();
414 const uint64_t F
= MID
.TSFlags
;
415 return ((F
>> TSF_AddrModeOff
) & TSF_AddModeMask
) == PostInc
;
418 bool ARCInstrInfo::isPreIncrement(const MachineInstr
&MI
) const {
419 const MCInstrDesc
&MID
= MI
.getDesc();
420 const uint64_t F
= MID
.TSFlags
;
421 return ((F
>> TSF_AddrModeOff
) & TSF_AddModeMask
) == PreInc
;
424 bool ARCInstrInfo::getBaseAndOffsetPosition(const MachineInstr
&MI
,
426 unsigned &OffsetPos
) const {
427 if (!MI
.mayLoad() && !MI
.mayStore())
433 if (isPostIncrement(MI
) || isPreIncrement(MI
)) {
438 if (!MI
.getOperand(BasePos
).isReg() || !MI
.getOperand(OffsetPos
).isImm())