1 //===- X86RegisterInfo.cpp - X86 Register Information -----------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file was developed by the LLVM research group and is distributed under
6 // the University of Illinois Open Source License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file contains the X86 implementation of the MRegisterInfo class. This
11 // file is responsible for the frame pointer elimination optimization on X86.
13 //===----------------------------------------------------------------------===//
16 #include "X86RegisterInfo.h"
17 #include "X86InstrBuilder.h"
18 #include "llvm/Constants.h"
19 #include "llvm/Type.h"
20 #include "llvm/CodeGen/ValueTypes.h"
21 #include "llvm/CodeGen/MachineInstrBuilder.h"
22 #include "llvm/CodeGen/MachineFunction.h"
23 #include "llvm/CodeGen/MachineFrameInfo.h"
24 #include "llvm/CodeGen/MachineLocation.h"
25 #include "llvm/Target/TargetFrameInfo.h"
26 #include "llvm/Target/TargetMachine.h"
27 #include "llvm/Target/TargetOptions.h"
28 #include "llvm/Support/CommandLine.h"
29 #include "llvm/ADT/STLExtras.h"
36 NoFusing("disable-spill-fusing",
37 cl::desc("Disable fusing of spill code into instructions"));
39 PrintFailedFusing("print-failed-fuse-candidates",
40 cl::desc("Print instructions that the allocator wants to"
41 " fuse, but the X86 backend currently can't"),
45 X86RegisterInfo::X86RegisterInfo()
46 : X86GenRegisterInfo(X86::ADJCALLSTACKDOWN
, X86::ADJCALLSTACKUP
) {}
48 void X86RegisterInfo::storeRegToStackSlot(MachineBasicBlock
&MBB
,
49 MachineBasicBlock::iterator MI
,
50 unsigned SrcReg
, int FrameIdx
,
51 const TargetRegisterClass
*RC
) const {
53 if (RC
== &X86::R32RegClass
) {
55 } else if (RC
== &X86::R8RegClass
) {
57 } else if (RC
== &X86::R16RegClass
) {
59 } else if (RC
== &X86::RFPRegClass
|| RC
== &X86::RSTRegClass
) {
61 } else if (RC
== &X86::FR32RegClass
) {
63 } else if (RC
== &X86::FR64RegClass
) {
65 } else if (RC
== &X86::VR128RegClass
) {
68 assert(0 && "Unknown regclass");
71 addFrameReference(BuildMI(MBB
, MI
, Opc
, 5), FrameIdx
).addReg(SrcReg
);
74 void X86RegisterInfo::loadRegFromStackSlot(MachineBasicBlock
&MBB
,
75 MachineBasicBlock::iterator MI
,
76 unsigned DestReg
, int FrameIdx
,
77 const TargetRegisterClass
*RC
) const{
79 if (RC
== &X86::R32RegClass
) {
81 } else if (RC
== &X86::R8RegClass
) {
83 } else if (RC
== &X86::R16RegClass
) {
85 } else if (RC
== &X86::RFPRegClass
|| RC
== &X86::RSTRegClass
) {
87 } else if (RC
== &X86::FR32RegClass
) {
89 } else if (RC
== &X86::FR64RegClass
) {
91 } else if (RC
== &X86::VR128RegClass
) {
94 assert(0 && "Unknown regclass");
97 addFrameReference(BuildMI(MBB
, MI
, Opc
, 4, DestReg
), FrameIdx
);
100 void X86RegisterInfo::copyRegToReg(MachineBasicBlock
&MBB
,
101 MachineBasicBlock::iterator MI
,
102 unsigned DestReg
, unsigned SrcReg
,
103 const TargetRegisterClass
*RC
) const {
105 if (RC
== &X86::R32RegClass
) {
107 } else if (RC
== &X86::R8RegClass
) {
109 } else if (RC
== &X86::R16RegClass
) {
111 } else if (RC
== &X86::RFPRegClass
|| RC
== &X86::RSTRegClass
) {
113 } else if (RC
== &X86::FR32RegClass
) {
114 Opc
= X86::FsMOVAPSrr
;
115 } else if (RC
== &X86::FR64RegClass
) {
116 Opc
= X86::FsMOVAPDrr
;
117 } else if (RC
== &X86::VR128RegClass
) {
120 assert(0 && "Unknown regclass");
123 BuildMI(MBB
, MI
, Opc
, 1, DestReg
).addReg(SrcReg
);
127 static MachineInstr
*MakeMInst(unsigned Opcode
, unsigned FrameIndex
,
129 return addFrameReference(BuildMI(Opcode
, 4), FrameIndex
);
132 static MachineInstr
*MakeMRInst(unsigned Opcode
, unsigned FrameIndex
,
134 return addFrameReference(BuildMI(Opcode
, 5), FrameIndex
)
135 .addReg(MI
->getOperand(1).getReg());
138 static MachineInstr
*MakeMRIInst(unsigned Opcode
, unsigned FrameIndex
,
140 return addFrameReference(BuildMI(Opcode
, 6), FrameIndex
)
141 .addReg(MI
->getOperand(1).getReg())
142 .addZImm(MI
->getOperand(2).getImmedValue());
145 static MachineInstr
*MakeMIInst(unsigned Opcode
, unsigned FrameIndex
,
147 if (MI
->getOperand(1).isImmediate())
148 return addFrameReference(BuildMI(Opcode
, 5), FrameIndex
)
149 .addZImm(MI
->getOperand(1).getImmedValue());
150 else if (MI
->getOperand(1).isGlobalAddress())
151 return addFrameReference(BuildMI(Opcode
, 5), FrameIndex
)
152 .addGlobalAddress(MI
->getOperand(1).getGlobal(),
153 false, MI
->getOperand(1).getOffset());
154 assert(0 && "Unknown operand for MakeMI!");
158 static MachineInstr
*MakeM0Inst(unsigned Opcode
, unsigned FrameIndex
,
160 return addFrameReference(BuildMI(Opcode
, 5), FrameIndex
).addZImm(0);
163 static MachineInstr
*MakeRMInst(unsigned Opcode
, unsigned FrameIndex
,
165 const MachineOperand
& op
= MI
->getOperand(0);
166 return addFrameReference(BuildMI(Opcode
, 5, op
.getReg(), op
.getUseType()),
170 static MachineInstr
*MakeRMIInst(unsigned Opcode
, unsigned FrameIndex
,
172 const MachineOperand
& op
= MI
->getOperand(0);
173 return addFrameReference(BuildMI(Opcode
, 6, op
.getReg(), op
.getUseType()),
174 FrameIndex
).addZImm(MI
->getOperand(2).getImmedValue());
178 MachineInstr
* X86RegisterInfo::foldMemoryOperand(MachineInstr
* MI
,
180 int FrameIndex
) const {
181 if (NoFusing
) return NULL
;
183 /// FIXME: This should obviously be autogenerated by tablegen when patterns
185 MachineBasicBlock
& MBB
= *MI
->getParent();
187 switch(MI
->getOpcode()) {
188 case X86::XCHG8rr
: return MakeMRInst(X86::XCHG8mr
,FrameIndex
, MI
);
189 case X86::XCHG16rr
: return MakeMRInst(X86::XCHG16mr
,FrameIndex
, MI
);
190 case X86::XCHG32rr
: return MakeMRInst(X86::XCHG32mr
,FrameIndex
, MI
);
191 case X86::MOV8rr
: return MakeMRInst(X86::MOV8mr
, FrameIndex
, MI
);
192 case X86::MOV16rr
: return MakeMRInst(X86::MOV16mr
, FrameIndex
, MI
);
193 case X86::MOV32rr
: return MakeMRInst(X86::MOV32mr
, FrameIndex
, MI
);
194 case X86::MOV8ri
: return MakeMIInst(X86::MOV8mi
, FrameIndex
, MI
);
195 case X86::MOV16ri
: return MakeMIInst(X86::MOV16mi
, FrameIndex
, MI
);
196 case X86::MOV32ri
: return MakeMIInst(X86::MOV32mi
, FrameIndex
, MI
);
197 case X86::MUL8r
: return MakeMInst( X86::MUL8m
, FrameIndex
, MI
);
198 case X86::MUL16r
: return MakeMInst( X86::MUL16m
, FrameIndex
, MI
);
199 case X86::MUL32r
: return MakeMInst( X86::MUL32m
, FrameIndex
, MI
);
200 case X86::IMUL8r
: return MakeMInst( X86::IMUL8m
, FrameIndex
, MI
);
201 case X86::IMUL16r
: return MakeMInst( X86::IMUL16m
, FrameIndex
, MI
);
202 case X86::IMUL32r
: return MakeMInst( X86::IMUL32m
, FrameIndex
, MI
);
203 case X86::DIV8r
: return MakeMInst( X86::DIV8m
, FrameIndex
, MI
);
204 case X86::DIV16r
: return MakeMInst( X86::DIV16m
, FrameIndex
, MI
);
205 case X86::DIV32r
: return MakeMInst( X86::DIV32m
, FrameIndex
, MI
);
206 case X86::IDIV8r
: return MakeMInst( X86::IDIV8m
, FrameIndex
, MI
);
207 case X86::IDIV16r
: return MakeMInst( X86::IDIV16m
, FrameIndex
, MI
);
208 case X86::IDIV32r
: return MakeMInst( X86::IDIV32m
, FrameIndex
, MI
);
209 case X86::NEG8r
: return MakeMInst( X86::NEG8m
, FrameIndex
, MI
);
210 case X86::NEG16r
: return MakeMInst( X86::NEG16m
, FrameIndex
, MI
);
211 case X86::NEG32r
: return MakeMInst( X86::NEG32m
, FrameIndex
, MI
);
212 case X86::NOT8r
: return MakeMInst( X86::NOT8m
, FrameIndex
, MI
);
213 case X86::NOT16r
: return MakeMInst( X86::NOT16m
, FrameIndex
, MI
);
214 case X86::NOT32r
: return MakeMInst( X86::NOT32m
, FrameIndex
, MI
);
215 case X86::INC8r
: return MakeMInst( X86::INC8m
, FrameIndex
, MI
);
216 case X86::INC16r
: return MakeMInst( X86::INC16m
, FrameIndex
, MI
);
217 case X86::INC32r
: return MakeMInst( X86::INC32m
, FrameIndex
, MI
);
218 case X86::DEC8r
: return MakeMInst( X86::DEC8m
, FrameIndex
, MI
);
219 case X86::DEC16r
: return MakeMInst( X86::DEC16m
, FrameIndex
, MI
);
220 case X86::DEC32r
: return MakeMInst( X86::DEC32m
, FrameIndex
, MI
);
221 case X86::ADD8rr
: return MakeMRInst(X86::ADD8mr
, FrameIndex
, MI
);
222 case X86::ADD16rr
: return MakeMRInst(X86::ADD16mr
, FrameIndex
, MI
);
223 case X86::ADD32rr
: return MakeMRInst(X86::ADD32mr
, FrameIndex
, MI
);
224 case X86::ADD8ri
: return MakeMIInst(X86::ADD8mi
, FrameIndex
, MI
);
225 case X86::ADD16ri
: return MakeMIInst(X86::ADD16mi
, FrameIndex
, MI
);
226 case X86::ADD32ri
: return MakeMIInst(X86::ADD32mi
, FrameIndex
, MI
);
227 case X86::ADD16ri8
: return MakeMIInst(X86::ADD16mi8
,FrameIndex
, MI
);
228 case X86::ADD32ri8
: return MakeMIInst(X86::ADD32mi8
,FrameIndex
, MI
);
229 case X86::ADC32rr
: return MakeMRInst(X86::ADC32mr
, FrameIndex
, MI
);
230 case X86::ADC32ri
: return MakeMIInst(X86::ADC32mi
, FrameIndex
, MI
);
231 case X86::ADC32ri8
: return MakeMIInst(X86::ADC32mi8
,FrameIndex
, MI
);
232 case X86::SUB8rr
: return MakeMRInst(X86::SUB8mr
, FrameIndex
, MI
);
233 case X86::SUB16rr
: return MakeMRInst(X86::SUB16mr
, FrameIndex
, MI
);
234 case X86::SUB32rr
: return MakeMRInst(X86::SUB32mr
, FrameIndex
, MI
);
235 case X86::SUB8ri
: return MakeMIInst(X86::SUB8mi
, FrameIndex
, MI
);
236 case X86::SUB16ri
: return MakeMIInst(X86::SUB16mi
, FrameIndex
, MI
);
237 case X86::SUB32ri
: return MakeMIInst(X86::SUB32mi
, FrameIndex
, MI
);
238 case X86::SUB16ri8
: return MakeMIInst(X86::SUB16mi8
,FrameIndex
, MI
);
239 case X86::SUB32ri8
: return MakeMIInst(X86::SUB32mi8
,FrameIndex
, MI
);
240 case X86::SBB32rr
: return MakeMRInst(X86::SBB32mr
, FrameIndex
, MI
);
241 case X86::SBB32ri
: return MakeMIInst(X86::SBB32mi
, FrameIndex
, MI
);
242 case X86::SBB32ri8
: return MakeMIInst(X86::SBB32mi8
,FrameIndex
, MI
);
243 case X86::AND8rr
: return MakeMRInst(X86::AND8mr
, FrameIndex
, MI
);
244 case X86::AND16rr
: return MakeMRInst(X86::AND16mr
, FrameIndex
, MI
);
245 case X86::AND32rr
: return MakeMRInst(X86::AND32mr
, FrameIndex
, MI
);
246 case X86::AND8ri
: return MakeMIInst(X86::AND8mi
, FrameIndex
, MI
);
247 case X86::AND16ri
: return MakeMIInst(X86::AND16mi
, FrameIndex
, MI
);
248 case X86::AND32ri
: return MakeMIInst(X86::AND32mi
, FrameIndex
, MI
);
249 case X86::AND16ri8
: return MakeMIInst(X86::AND16mi8
,FrameIndex
, MI
);
250 case X86::AND32ri8
: return MakeMIInst(X86::AND32mi8
,FrameIndex
, MI
);
251 case X86::OR8rr
: return MakeMRInst(X86::OR8mr
, FrameIndex
, MI
);
252 case X86::OR16rr
: return MakeMRInst(X86::OR16mr
, FrameIndex
, MI
);
253 case X86::OR32rr
: return MakeMRInst(X86::OR32mr
, FrameIndex
, MI
);
254 case X86::OR8ri
: return MakeMIInst(X86::OR8mi
, FrameIndex
, MI
);
255 case X86::OR16ri
: return MakeMIInst(X86::OR16mi
, FrameIndex
, MI
);
256 case X86::OR32ri
: return MakeMIInst(X86::OR32mi
, FrameIndex
, MI
);
257 case X86::OR16ri8
: return MakeMIInst(X86::OR16mi8
, FrameIndex
, MI
);
258 case X86::OR32ri8
: return MakeMIInst(X86::OR32mi8
, FrameIndex
, MI
);
259 case X86::XOR8rr
: return MakeMRInst(X86::XOR8mr
, FrameIndex
, MI
);
260 case X86::XOR16rr
: return MakeMRInst(X86::XOR16mr
, FrameIndex
, MI
);
261 case X86::XOR32rr
: return MakeMRInst(X86::XOR32mr
, FrameIndex
, MI
);
262 case X86::XOR8ri
: return MakeMIInst(X86::XOR8mi
, FrameIndex
, MI
);
263 case X86::XOR16ri
: return MakeMIInst(X86::XOR16mi
, FrameIndex
, MI
);
264 case X86::XOR32ri
: return MakeMIInst(X86::XOR32mi
, FrameIndex
, MI
);
265 case X86::XOR16ri8
: return MakeMIInst(X86::XOR16mi8
,FrameIndex
, MI
);
266 case X86::XOR32ri8
: return MakeMIInst(X86::XOR32mi8
,FrameIndex
, MI
);
267 case X86::SHL8rCL
: return MakeMInst( X86::SHL8mCL
,FrameIndex
, MI
);
268 case X86::SHL16rCL
: return MakeMInst( X86::SHL16mCL
,FrameIndex
, MI
);
269 case X86::SHL32rCL
: return MakeMInst( X86::SHL32mCL
,FrameIndex
, MI
);
270 case X86::SHL8ri
: return MakeMIInst(X86::SHL8mi
, FrameIndex
, MI
);
271 case X86::SHL16ri
: return MakeMIInst(X86::SHL16mi
, FrameIndex
, MI
);
272 case X86::SHL32ri
: return MakeMIInst(X86::SHL32mi
, FrameIndex
, MI
);
273 case X86::SHR8rCL
: return MakeMInst( X86::SHR8mCL
,FrameIndex
, MI
);
274 case X86::SHR16rCL
: return MakeMInst( X86::SHR16mCL
,FrameIndex
, MI
);
275 case X86::SHR32rCL
: return MakeMInst( X86::SHR32mCL
,FrameIndex
, MI
);
276 case X86::SHR8ri
: return MakeMIInst(X86::SHR8mi
, FrameIndex
, MI
);
277 case X86::SHR16ri
: return MakeMIInst(X86::SHR16mi
, FrameIndex
, MI
);
278 case X86::SHR32ri
: return MakeMIInst(X86::SHR32mi
, FrameIndex
, MI
);
279 case X86::SAR8rCL
: return MakeMInst( X86::SAR8mCL
,FrameIndex
, MI
);
280 case X86::SAR16rCL
: return MakeMInst( X86::SAR16mCL
,FrameIndex
, MI
);
281 case X86::SAR32rCL
: return MakeMInst( X86::SAR32mCL
,FrameIndex
, MI
);
282 case X86::SAR8ri
: return MakeMIInst(X86::SAR8mi
, FrameIndex
, MI
);
283 case X86::SAR16ri
: return MakeMIInst(X86::SAR16mi
, FrameIndex
, MI
);
284 case X86::SAR32ri
: return MakeMIInst(X86::SAR32mi
, FrameIndex
, MI
);
285 case X86::ROL8rCL
: return MakeMInst( X86::ROL8mCL
,FrameIndex
, MI
);
286 case X86::ROL16rCL
: return MakeMInst( X86::ROL16mCL
,FrameIndex
, MI
);
287 case X86::ROL32rCL
: return MakeMInst( X86::ROL32mCL
,FrameIndex
, MI
);
288 case X86::ROL8ri
: return MakeMIInst(X86::ROL8mi
, FrameIndex
, MI
);
289 case X86::ROL16ri
: return MakeMIInst(X86::ROL16mi
, FrameIndex
, MI
);
290 case X86::ROL32ri
: return MakeMIInst(X86::ROL32mi
, FrameIndex
, MI
);
291 case X86::ROR8rCL
: return MakeMInst( X86::ROR8mCL
,FrameIndex
, MI
);
292 case X86::ROR16rCL
: return MakeMInst( X86::ROR16mCL
,FrameIndex
, MI
);
293 case X86::ROR32rCL
: return MakeMInst( X86::ROR32mCL
,FrameIndex
, MI
);
294 case X86::ROR8ri
: return MakeMIInst(X86::ROR8mi
, FrameIndex
, MI
);
295 case X86::ROR16ri
: return MakeMIInst(X86::ROR16mi
, FrameIndex
, MI
);
296 case X86::ROR32ri
: return MakeMIInst(X86::ROR32mi
, FrameIndex
, MI
);
297 case X86::SHLD32rrCL
:return MakeMRInst( X86::SHLD32mrCL
,FrameIndex
, MI
);
298 case X86::SHLD32rri8
:return MakeMRIInst(X86::SHLD32mri8
,FrameIndex
, MI
);
299 case X86::SHRD32rrCL
:return MakeMRInst( X86::SHRD32mrCL
,FrameIndex
, MI
);
300 case X86::SHRD32rri8
:return MakeMRIInst(X86::SHRD32mri8
,FrameIndex
, MI
);
301 case X86::SHLD16rrCL
:return MakeMRInst( X86::SHLD16mrCL
,FrameIndex
, MI
);
302 case X86::SHLD16rri8
:return MakeMRIInst(X86::SHLD16mri8
,FrameIndex
, MI
);
303 case X86::SHRD16rrCL
:return MakeMRInst( X86::SHRD16mrCL
,FrameIndex
, MI
);
304 case X86::SHRD16rri8
:return MakeMRIInst(X86::SHRD16mri8
,FrameIndex
, MI
);
305 case X86::SETBr
: return MakeMInst( X86::SETBm
, FrameIndex
, MI
);
306 case X86::SETAEr
: return MakeMInst( X86::SETAEm
, FrameIndex
, MI
);
307 case X86::SETEr
: return MakeMInst( X86::SETEm
, FrameIndex
, MI
);
308 case X86::SETNEr
: return MakeMInst( X86::SETNEm
, FrameIndex
, MI
);
309 case X86::SETBEr
: return MakeMInst( X86::SETBEm
, FrameIndex
, MI
);
310 case X86::SETAr
: return MakeMInst( X86::SETAm
, FrameIndex
, MI
);
311 case X86::SETSr
: return MakeMInst( X86::SETSm
, FrameIndex
, MI
);
312 case X86::SETNSr
: return MakeMInst( X86::SETNSm
, FrameIndex
, MI
);
313 case X86::SETPr
: return MakeMInst( X86::SETPm
, FrameIndex
, MI
);
314 case X86::SETNPr
: return MakeMInst( X86::SETNPm
, FrameIndex
, MI
);
315 case X86::SETLr
: return MakeMInst( X86::SETLm
, FrameIndex
, MI
);
316 case X86::SETGEr
: return MakeMInst( X86::SETGEm
, FrameIndex
, MI
);
317 case X86::SETLEr
: return MakeMInst( X86::SETLEm
, FrameIndex
, MI
);
318 case X86::SETGr
: return MakeMInst( X86::SETGm
, FrameIndex
, MI
);
319 case X86::TEST8rr
: return MakeMRInst(X86::TEST8mr
,FrameIndex
, MI
);
320 case X86::TEST16rr
: return MakeMRInst(X86::TEST16mr
,FrameIndex
, MI
);
321 case X86::TEST32rr
: return MakeMRInst(X86::TEST32mr
,FrameIndex
, MI
);
322 case X86::TEST8ri
: return MakeMIInst(X86::TEST8mi
,FrameIndex
, MI
);
323 case X86::TEST16ri
: return MakeMIInst(X86::TEST16mi
,FrameIndex
, MI
);
324 case X86::TEST32ri
: return MakeMIInst(X86::TEST32mi
,FrameIndex
, MI
);
325 case X86::CMP8rr
: return MakeMRInst(X86::CMP8mr
, FrameIndex
, MI
);
326 case X86::CMP16rr
: return MakeMRInst(X86::CMP16mr
, FrameIndex
, MI
);
327 case X86::CMP32rr
: return MakeMRInst(X86::CMP32mr
, FrameIndex
, MI
);
328 case X86::CMP8ri
: return MakeMIInst(X86::CMP8mi
, FrameIndex
, MI
);
329 case X86::CMP16ri
: return MakeMIInst(X86::CMP16mi
, FrameIndex
, MI
);
330 case X86::CMP32ri
: return MakeMIInst(X86::CMP32mi
, FrameIndex
, MI
);
331 // Alias instructions
332 case X86::MOV8r0
: return MakeM0Inst(X86::MOV8mi
, FrameIndex
, MI
);
333 case X86::MOV16r0
: return MakeM0Inst(X86::MOV16mi
, FrameIndex
, MI
);
334 case X86::MOV32r0
: return MakeM0Inst(X86::MOV32mi
, FrameIndex
, MI
);
335 // Alias scalar SSE instructions
336 case X86::FsMOVAPSrr
: return MakeMRInst(X86::MOVSSmr
, FrameIndex
, MI
);
337 case X86::FsMOVAPDrr
: return MakeMRInst(X86::MOVSDmr
, FrameIndex
, MI
);
338 // Scalar SSE instructions
339 case X86::MOVSSrr
: return MakeMRInst(X86::MOVSSmr
, FrameIndex
, MI
);
340 case X86::MOVSDrr
: return MakeMRInst(X86::MOVSDmr
, FrameIndex
, MI
);
342 // Packed SSE instructions
343 // FIXME: Can't use these until we are spilling XMM registers to
344 // 128-bit locations.
345 case X86::MOVAPSrr
: return MakeMRInst(X86::MOVAPSmr
, FrameIndex
, MI
);
346 case X86::MOVAPDrr
: return MakeMRInst(X86::MOVAPDmr
, FrameIndex
, MI
);
350 switch(MI
->getOpcode()) {
351 case X86::XCHG8rr
: return MakeRMInst(X86::XCHG8rm
,FrameIndex
, MI
);
352 case X86::XCHG16rr
: return MakeRMInst(X86::XCHG16rm
,FrameIndex
, MI
);
353 case X86::XCHG32rr
: return MakeRMInst(X86::XCHG32rm
,FrameIndex
, MI
);
354 case X86::MOV8rr
: return MakeRMInst(X86::MOV8rm
, FrameIndex
, MI
);
355 case X86::MOV16rr
: return MakeRMInst(X86::MOV16rm
, FrameIndex
, MI
);
356 case X86::MOV32rr
: return MakeRMInst(X86::MOV32rm
, FrameIndex
, MI
);
357 case X86::CMOVB16rr
: return MakeRMInst(X86::CMOVB16rm
, FrameIndex
, MI
);
358 case X86::CMOVB32rr
: return MakeRMInst(X86::CMOVB32rm
, FrameIndex
, MI
);
359 case X86::CMOVAE16rr
: return MakeRMInst(X86::CMOVAE16rm
, FrameIndex
, MI
);
360 case X86::CMOVAE32rr
: return MakeRMInst(X86::CMOVAE32rm
, FrameIndex
, MI
);
361 case X86::CMOVE16rr
: return MakeRMInst(X86::CMOVE16rm
, FrameIndex
, MI
);
362 case X86::CMOVE32rr
: return MakeRMInst(X86::CMOVE32rm
, FrameIndex
, MI
);
363 case X86::CMOVNE16rr
:return MakeRMInst(X86::CMOVNE16rm
, FrameIndex
, MI
);
364 case X86::CMOVNE32rr
:return MakeRMInst(X86::CMOVNE32rm
, FrameIndex
, MI
);
365 case X86::CMOVBE16rr
:return MakeRMInst(X86::CMOVBE16rm
, FrameIndex
, MI
);
366 case X86::CMOVBE32rr
:return MakeRMInst(X86::CMOVBE32rm
, FrameIndex
, MI
);
367 case X86::CMOVA16rr
:return MakeRMInst(X86::CMOVA16rm
, FrameIndex
, MI
);
368 case X86::CMOVA32rr
:return MakeRMInst(X86::CMOVA32rm
, FrameIndex
, MI
);
369 case X86::CMOVS16rr
: return MakeRMInst(X86::CMOVS16rm
, FrameIndex
, MI
);
370 case X86::CMOVS32rr
: return MakeRMInst(X86::CMOVS32rm
, FrameIndex
, MI
);
371 case X86::CMOVNS16rr
: return MakeRMInst(X86::CMOVNS16rm
, FrameIndex
, MI
);
372 case X86::CMOVNS32rr
: return MakeRMInst(X86::CMOVNS32rm
, FrameIndex
, MI
);
373 case X86::CMOVP16rr
: return MakeRMInst(X86::CMOVP16rm
, FrameIndex
, MI
);
374 case X86::CMOVP32rr
: return MakeRMInst(X86::CMOVP32rm
, FrameIndex
, MI
);
375 case X86::CMOVNP16rr
: return MakeRMInst(X86::CMOVNP16rm
, FrameIndex
, MI
);
376 case X86::CMOVNP32rr
: return MakeRMInst(X86::CMOVNP32rm
, FrameIndex
, MI
);
377 case X86::CMOVL16rr
: return MakeRMInst(X86::CMOVL16rm
, FrameIndex
, MI
);
378 case X86::CMOVL32rr
: return MakeRMInst(X86::CMOVL32rm
, FrameIndex
, MI
);
379 case X86::CMOVGE16rr
: return MakeRMInst(X86::CMOVGE16rm
, FrameIndex
, MI
);
380 case X86::CMOVGE32rr
: return MakeRMInst(X86::CMOVGE32rm
, FrameIndex
, MI
);
381 case X86::CMOVLE16rr
: return MakeRMInst(X86::CMOVLE16rm
, FrameIndex
, MI
);
382 case X86::CMOVLE32rr
: return MakeRMInst(X86::CMOVLE32rm
, FrameIndex
, MI
);
383 case X86::CMOVG16rr
: return MakeRMInst(X86::CMOVG16rm
, FrameIndex
, MI
);
384 case X86::CMOVG32rr
: return MakeRMInst(X86::CMOVG32rm
, FrameIndex
, MI
);
385 case X86::ADD8rr
: return MakeRMInst(X86::ADD8rm
, FrameIndex
, MI
);
386 case X86::ADD16rr
: return MakeRMInst(X86::ADD16rm
, FrameIndex
, MI
);
387 case X86::ADD32rr
: return MakeRMInst(X86::ADD32rm
, FrameIndex
, MI
);
388 case X86::ADC32rr
: return MakeRMInst(X86::ADC32rm
, FrameIndex
, MI
);
389 case X86::SUB8rr
: return MakeRMInst(X86::SUB8rm
, FrameIndex
, MI
);
390 case X86::SUB16rr
: return MakeRMInst(X86::SUB16rm
, FrameIndex
, MI
);
391 case X86::SUB32rr
: return MakeRMInst(X86::SUB32rm
, FrameIndex
, MI
);
392 case X86::SBB32rr
: return MakeRMInst(X86::SBB32rm
, FrameIndex
, MI
);
393 case X86::AND8rr
: return MakeRMInst(X86::AND8rm
, FrameIndex
, MI
);
394 case X86::AND16rr
: return MakeRMInst(X86::AND16rm
, FrameIndex
, MI
);
395 case X86::AND32rr
: return MakeRMInst(X86::AND32rm
, FrameIndex
, MI
);
396 case X86::OR8rr
: return MakeRMInst(X86::OR8rm
, FrameIndex
, MI
);
397 case X86::OR16rr
: return MakeRMInst(X86::OR16rm
, FrameIndex
, MI
);
398 case X86::OR32rr
: return MakeRMInst(X86::OR32rm
, FrameIndex
, MI
);
399 case X86::XOR8rr
: return MakeRMInst(X86::XOR8rm
, FrameIndex
, MI
);
400 case X86::XOR16rr
: return MakeRMInst(X86::XOR16rm
, FrameIndex
, MI
);
401 case X86::XOR32rr
: return MakeRMInst(X86::XOR32rm
, FrameIndex
, MI
);
402 case X86::TEST8rr
: return MakeRMInst(X86::TEST8rm
,FrameIndex
, MI
);
403 case X86::TEST16rr
: return MakeRMInst(X86::TEST16rm
,FrameIndex
, MI
);
404 case X86::TEST32rr
: return MakeRMInst(X86::TEST32rm
,FrameIndex
, MI
);
405 case X86::IMUL16rr
: return MakeRMInst(X86::IMUL16rm
,FrameIndex
, MI
);
406 case X86::IMUL32rr
: return MakeRMInst(X86::IMUL32rm
,FrameIndex
, MI
);
407 case X86::IMUL16rri
: return MakeRMIInst(X86::IMUL16rmi
, FrameIndex
, MI
);
408 case X86::IMUL32rri
: return MakeRMIInst(X86::IMUL32rmi
, FrameIndex
, MI
);
409 case X86::IMUL16rri8
:return MakeRMIInst(X86::IMUL16rmi8
, FrameIndex
, MI
);
410 case X86::IMUL32rri8
:return MakeRMIInst(X86::IMUL32rmi8
, FrameIndex
, MI
);
411 case X86::CMP8rr
: return MakeRMInst(X86::CMP8rm
, FrameIndex
, MI
);
412 case X86::CMP16rr
: return MakeRMInst(X86::CMP16rm
, FrameIndex
, MI
);
413 case X86::CMP32rr
: return MakeRMInst(X86::CMP32rm
, FrameIndex
, MI
);
414 case X86::MOVSX16rr8
:return MakeRMInst(X86::MOVSX16rm8
, FrameIndex
, MI
);
415 case X86::MOVSX32rr8
:return MakeRMInst(X86::MOVSX32rm8
, FrameIndex
, MI
);
416 case X86::MOVSX32rr16
:return MakeRMInst(X86::MOVSX32rm16
, FrameIndex
, MI
);
417 case X86::MOVZX16rr8
:return MakeRMInst(X86::MOVZX16rm8
, FrameIndex
, MI
);
418 case X86::MOVZX32rr8
:return MakeRMInst(X86::MOVZX32rm8
, FrameIndex
, MI
);
419 case X86::MOVZX32rr16
:return MakeRMInst(X86::MOVZX32rm16
, FrameIndex
, MI
);
420 // Alias scalar SSE instructions
421 case X86::FsMOVAPSrr
:return MakeRMInst(X86::MOVSSrm
, FrameIndex
, MI
);
422 case X86::FsMOVAPDrr
:return MakeRMInst(X86::MOVSDrm
, FrameIndex
, MI
);
423 // Scalar SSE instructions
424 case X86::MOVSSrr
: return MakeRMInst(X86::MOVSSrm
, FrameIndex
, MI
);
425 case X86::MOVSDrr
: return MakeRMInst(X86::MOVSDrm
, FrameIndex
, MI
);
426 case X86::CVTTSS2SIrr
:return MakeRMInst(X86::CVTTSS2SIrm
, FrameIndex
, MI
);
427 case X86::CVTTSD2SIrr
:return MakeRMInst(X86::CVTTSD2SIrm
, FrameIndex
, MI
);
428 case X86::CVTSS2SDrr
:return MakeRMInst(X86::CVTSS2SDrm
, FrameIndex
, MI
);
429 case X86::CVTSD2SSrr
:return MakeRMInst(X86::CVTSD2SSrm
, FrameIndex
, MI
);
430 case X86::CVTSI2SSrr
:return MakeRMInst(X86::CVTSI2SSrm
, FrameIndex
, MI
);
431 case X86::CVTSI2SDrr
:return MakeRMInst(X86::CVTSI2SDrm
, FrameIndex
, MI
);
432 case X86::SQRTSSr
: return MakeRMInst(X86::SQRTSSm
, FrameIndex
, MI
);
433 case X86::SQRTSDr
: return MakeRMInst(X86::SQRTSDm
, FrameIndex
, MI
);
434 case X86::UCOMISSrr
: return MakeRMInst(X86::UCOMISSrm
, FrameIndex
, MI
);
435 case X86::UCOMISDrr
: return MakeRMInst(X86::UCOMISDrm
, FrameIndex
, MI
);
436 case X86::ADDSSrr
: return MakeRMInst(X86::ADDSSrm
, FrameIndex
, MI
);
437 case X86::ADDSDrr
: return MakeRMInst(X86::ADDSDrm
, FrameIndex
, MI
);
438 case X86::MULSSrr
: return MakeRMInst(X86::MULSSrm
, FrameIndex
, MI
);
439 case X86::MULSDrr
: return MakeRMInst(X86::MULSDrm
, FrameIndex
, MI
);
440 case X86::DIVSSrr
: return MakeRMInst(X86::DIVSSrm
, FrameIndex
, MI
);
441 case X86::DIVSDrr
: return MakeRMInst(X86::DIVSDrm
, FrameIndex
, MI
);
442 case X86::SUBSSrr
: return MakeRMInst(X86::SUBSSrm
, FrameIndex
, MI
);
443 case X86::SUBSDrr
: return MakeRMInst(X86::SUBSDrm
, FrameIndex
, MI
);
444 case X86::CMPSSrr
: return MakeRMInst(X86::CMPSSrm
, FrameIndex
, MI
);
445 case X86::CMPSDrr
: return MakeRMInst(X86::CMPSDrm
, FrameIndex
, MI
);
447 // Packed SSE instructions
448 // FIXME: Can't use these until we are spilling XMM registers to
449 // 128-bit locations.
450 case X86::ANDPSrr
: return MakeRMInst(X86::ANDPSrm
, FrameIndex
, MI
);
451 case X86::ANDPDrr
: return MakeRMInst(X86::ANDPDrm
, FrameIndex
, MI
);
452 case X86::ORPSrr
: return MakeRMInst(X86::ORPSrm
, FrameIndex
, MI
);
453 case X86::ORPDrr
: return MakeRMInst(X86::ORPDrm
, FrameIndex
, MI
);
454 case X86::XORPSrr
: return MakeRMInst(X86::XORPSrm
, FrameIndex
, MI
);
455 case X86::XORPDrr
: return MakeRMInst(X86::XORPDrm
, FrameIndex
, MI
);
456 case X86::ANDNPSrr
: return MakeRMInst(X86::ANDNPSrm
, FrameIndex
, MI
);
457 case X86::ANDNPDrr
: return MakeRMInst(X86::ANDNPDrm
, FrameIndex
, MI
);
458 case X86::MOVAPSrr
: return MakeRMInst(X86::MOVAPSrm
, FrameIndex
, MI
);
459 case X86::MOVAPDrr
: return MakeRMInst(X86::MOVAPDrm
, FrameIndex
, MI
);
463 if (PrintFailedFusing
)
464 std::cerr
<< "We failed to fuse: " << *MI
;
468 //===----------------------------------------------------------------------===//
469 // Stack Frame Processing methods
470 //===----------------------------------------------------------------------===//
472 // hasFP - Return true if the specified function should have a dedicated frame
473 // pointer register. This is true if the function has variable sized allocas or
474 // if frame pointer elimination is disabled.
476 static bool hasFP(MachineFunction
&MF
) {
477 return NoFramePointerElim
|| MF
.getFrameInfo()->hasVarSizedObjects();
480 void X86RegisterInfo::
481 eliminateCallFramePseudoInstr(MachineFunction
&MF
, MachineBasicBlock
&MBB
,
482 MachineBasicBlock::iterator I
) const {
484 // If we have a frame pointer, turn the adjcallstackup instruction into a
485 // 'sub ESP, <amt>' and the adjcallstackdown instruction into 'add ESP,
487 MachineInstr
*Old
= I
;
488 unsigned Amount
= Old
->getOperand(0).getImmedValue();
490 // We need to keep the stack aligned properly. To do this, we round the
491 // amount of space needed for the outgoing arguments up to the next
492 // alignment boundary.
493 unsigned Align
= MF
.getTarget().getFrameInfo()->getStackAlignment();
494 Amount
= (Amount
+Align
-1)/Align
*Align
;
496 MachineInstr
*New
= 0;
497 if (Old
->getOpcode() == X86::ADJCALLSTACKDOWN
) {
498 New
=BuildMI(X86::SUB32ri
, 1, X86::ESP
, MachineOperand::UseAndDef
)
501 assert(Old
->getOpcode() == X86::ADJCALLSTACKUP
);
502 // factor out the amount the callee already popped.
503 unsigned CalleeAmt
= Old
->getOperand(1).getImmedValue();
506 unsigned Opc
= Amount
< 128 ? X86::ADD32ri8
: X86::ADD32ri
;
507 New
= BuildMI(Opc
, 1, X86::ESP
,
508 MachineOperand::UseAndDef
).addZImm(Amount
);
512 // Replace the pseudo instruction with a new instruction...
513 if (New
) MBB
.insert(I
, New
);
515 } else if (I
->getOpcode() == X86::ADJCALLSTACKUP
) {
516 // If we are performing frame pointer elimination and if the callee pops
517 // something off the stack pointer, add it back. We do this until we have
518 // more advanced stack pointer tracking ability.
519 if (unsigned CalleeAmt
= I
->getOperand(1).getImmedValue()) {
520 unsigned Opc
= CalleeAmt
< 128 ? X86::SUB32ri8
: X86::SUB32ri
;
522 BuildMI(Opc
, 1, X86::ESP
,
523 MachineOperand::UseAndDef
).addZImm(CalleeAmt
);
531 void X86RegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator II
) const{
533 MachineInstr
&MI
= *II
;
534 MachineFunction
&MF
= *MI
.getParent()->getParent();
535 while (!MI
.getOperand(i
).isFrameIndex()) {
537 assert(i
< MI
.getNumOperands() && "Instr doesn't have FrameIndex operand!");
540 int FrameIndex
= MI
.getOperand(i
).getFrameIndex();
542 // This must be part of a four operand memory reference. Replace the
543 // FrameIndex with base register with EBP. Add add an offset to the offset.
544 MI
.SetMachineOperandReg(i
, hasFP(MF
) ? X86::EBP
: X86::ESP
);
546 // Now add the frame object offset to the offset from EBP.
547 int Offset
= MF
.getFrameInfo()->getObjectOffset(FrameIndex
) +
548 MI
.getOperand(i
+3).getImmedValue()+4;
551 Offset
+= MF
.getFrameInfo()->getStackSize();
553 Offset
+= 4; // Skip the saved EBP
555 MI
.SetMachineOperandConst(i
+3, MachineOperand::MO_SignExtendedImmed
, Offset
);
559 X86RegisterInfo::processFunctionBeforeFrameFinalized(MachineFunction
&MF
) const{
561 // Create a frame entry for the EBP register that must be saved.
562 int FrameIdx
= MF
.getFrameInfo()->CreateFixedObject(4, -8);
563 assert(FrameIdx
== MF
.getFrameInfo()->getObjectIndexBegin() &&
564 "Slot for EBP register must be last in order to be found!");
568 void X86RegisterInfo::emitPrologue(MachineFunction
&MF
) const {
569 MachineBasicBlock
&MBB
= MF
.front(); // Prolog goes in entry BB
570 MachineBasicBlock::iterator MBBI
= MBB
.begin();
571 MachineFrameInfo
*MFI
= MF
.getFrameInfo();
574 // Get the number of bytes to allocate from the FrameInfo
575 unsigned NumBytes
= MFI
->getStackSize();
577 // Get the offset of the stack slot for the EBP register... which is
578 // guaranteed to be the last slot by processFunctionBeforeFrameFinalized.
579 int EBPOffset
= MFI
->getObjectOffset(MFI
->getObjectIndexBegin())+4;
581 if (NumBytes
) { // adjust stack pointer: ESP -= numbytes
582 unsigned Opc
= NumBytes
< 128 ? X86::SUB32ri8
: X86::SUB32ri
;
583 MI
= BuildMI(Opc
, 1, X86::ESP
,MachineOperand::UseAndDef
).addImm(NumBytes
);
584 MBB
.insert(MBBI
, MI
);
587 // Save EBP into the appropriate stack slot...
588 MI
= addRegOffset(BuildMI(X86::MOV32mr
, 5), // mov [ESP-<offset>], EBP
589 X86::ESP
, EBPOffset
+NumBytes
).addReg(X86::EBP
);
590 MBB
.insert(MBBI
, MI
);
592 // Update EBP with the new base value...
593 if (NumBytes
== 4) // mov EBP, ESP
594 MI
= BuildMI(X86::MOV32rr
, 2, X86::EBP
).addReg(X86::ESP
);
595 else // lea EBP, [ESP+StackSize]
596 MI
= addRegOffset(BuildMI(X86::LEA32r
, 5, X86::EBP
), X86::ESP
,NumBytes
-4);
598 MBB
.insert(MBBI
, MI
);
601 if (MFI
->hasCalls()) {
602 // When we have no frame pointer, we reserve argument space for call sites
603 // in the function immediately on entry to the current function. This
604 // eliminates the need for add/sub ESP brackets around call sites.
606 NumBytes
+= MFI
->getMaxCallFrameSize();
608 // Round the size to a multiple of the alignment (don't forget the 4 byte
610 unsigned Align
= MF
.getTarget().getFrameInfo()->getStackAlignment();
611 NumBytes
= ((NumBytes
+4)+Align
-1)/Align
*Align
- 4;
614 // Update frame info to pretend that this is part of the stack...
615 MFI
->setStackSize(NumBytes
);
618 // adjust stack pointer: ESP -= numbytes
619 unsigned Opc
= NumBytes
< 128 ? X86::SUB32ri8
: X86::SUB32ri
;
620 MI
= BuildMI(Opc
, 1, X86::ESP
, MachineOperand::UseAndDef
).addImm(NumBytes
);
621 MBB
.insert(MBBI
, MI
);
626 void X86RegisterInfo::emitEpilogue(MachineFunction
&MF
,
627 MachineBasicBlock
&MBB
) const {
628 const MachineFrameInfo
*MFI
= MF
.getFrameInfo();
629 MachineBasicBlock::iterator MBBI
= prior(MBB
.end());
631 switch (MBBI
->getOpcode()) {
636 case X86::TAILJMPm
: break; // These are ok
638 assert(0 && "Can only insert epilog into returning blocks");
642 // Get the offset of the stack slot for the EBP register... which is
643 // guaranteed to be the last slot by processFunctionBeforeFrameFinalized.
644 int EBPOffset
= MFI
->getObjectOffset(MFI
->getObjectIndexEnd()-1)+4;
647 BuildMI(MBB
, MBBI
, X86::MOV32rr
, 1,X86::ESP
).addReg(X86::EBP
);
650 BuildMI(MBB
, MBBI
, X86::POP32r
, 0, X86::EBP
);
652 // Get the number of bytes allocated from the FrameInfo...
653 unsigned NumBytes
= MFI
->getStackSize();
655 if (NumBytes
) { // adjust stack pointer back: ESP += numbytes
656 // If there is an ADD32ri or SUB32ri of ESP immediately before this
657 // instruction, merge the two instructions.
658 if (MBBI
!= MBB
.begin()) {
659 MachineBasicBlock::iterator PI
= prior(MBBI
);
660 if ((PI
->getOpcode() == X86::ADD32ri
||
661 PI
->getOpcode() == X86::ADD32ri8
) &&
662 PI
->getOperand(0).getReg() == X86::ESP
) {
663 NumBytes
+= PI
->getOperand(1).getImmedValue();
665 } else if ((PI
->getOpcode() == X86::SUB32ri
||
666 PI
->getOpcode() == X86::SUB32ri8
) &&
667 PI
->getOperand(0).getReg() == X86::ESP
) {
668 NumBytes
-= PI
->getOperand(1).getImmedValue();
670 } else if (PI
->getOpcode() == X86::ADJSTACKPTRri
) {
671 NumBytes
+= PI
->getOperand(1).getImmedValue();
677 unsigned Opc
= NumBytes
< 128 ? X86::ADD32ri8
: X86::ADD32ri
;
678 BuildMI(MBB
, MBBI
, Opc
, 2)
679 .addReg(X86::ESP
, MachineOperand::UseAndDef
).addZImm(NumBytes
);
680 } else if ((int)NumBytes
< 0) {
681 unsigned Opc
= -NumBytes
< 128 ? X86::SUB32ri8
: X86::SUB32ri
;
682 BuildMI(MBB
, MBBI
, Opc
, 2)
683 .addReg(X86::ESP
, MachineOperand::UseAndDef
).addZImm(-NumBytes
);
689 unsigned X86RegisterInfo::getRARegister() const {
690 return X86::ST0
; // use a non-register register
693 unsigned X86RegisterInfo::getFrameRegister(MachineFunction
&MF
) const {
694 return hasFP(MF
) ? X86::EBP
: X86::ESP
;
697 #include "X86GenRegisterInfo.inc"