blob: 08843fced0b879ffe74e178b7b538d354a8792c8 [file] [log] [blame]
Misha Brukmancf2b9ac2002-11-22 22:43:47 +00001//===- X86RegisterInfo.cpp - X86 Register Information -----------*- C++ -*-===//
Alkis Evlogimenos39354c92004-03-14 07:19:51 +00002//
John Criswellb576c942003-10-20 19:43:21 +00003// The LLVM Compiler Infrastructure
4//
5// This file was developed by the LLVM research group and is distributed under
6// the University of Illinois Open Source License. See LICENSE.TXT for details.
Alkis Evlogimenos39354c92004-03-14 07:19:51 +00007//
John Criswellb576c942003-10-20 19:43:21 +00008//===----------------------------------------------------------------------===//
Chris Lattner72614082002-10-25 22:55:53 +00009//
Chris Lattner3c1c03d2002-12-28 20:32:28 +000010// This file contains the X86 implementation of the MRegisterInfo class. This
11// file is responsible for the frame pointer elimination optimization on X86.
Chris Lattner72614082002-10-25 22:55:53 +000012//
13//===----------------------------------------------------------------------===//
14
Misha Brukmanb83b2862002-11-20 18:59:43 +000015#include "X86.h"
Chris Lattner72614082002-10-25 22:55:53 +000016#include "X86RegisterInfo.h"
Misha Brukmancf2b9ac2002-11-22 22:43:47 +000017#include "X86InstrBuilder.h"
Evan Chenge8bd0a32006-06-06 23:30:24 +000018#include "X86MachineFunctionInfo.h"
Evan Cheng25ab6902006-09-08 06:48:29 +000019#include "X86Subtarget.h"
Evan Chenge8bd0a32006-06-06 23:30:24 +000020#include "X86TargetMachine.h"
Misha Brukmanb83b2862002-11-20 18:59:43 +000021#include "llvm/Constants.h"
Evan Cheng3649b0e2006-06-02 22:38:37 +000022#include "llvm/Function.h"
Evan Cheng25ab6902006-09-08 06:48:29 +000023#include "llvm/Type.h"
Chris Lattnerc8c377d2003-07-29 05:14:16 +000024#include "llvm/CodeGen/ValueTypes.h"
Misha Brukmanb83b2862002-11-20 18:59:43 +000025#include "llvm/CodeGen/MachineInstrBuilder.h"
Chris Lattner198ab642002-12-15 20:06:35 +000026#include "llvm/CodeGen/MachineFunction.h"
Chris Lattneraa09b752002-12-28 21:08:28 +000027#include "llvm/CodeGen/MachineFrameInfo.h"
Jim Laskeyf1d78e82006-03-23 18:12:57 +000028#include "llvm/CodeGen/MachineLocation.h"
Anton Korobeynikovce3b4652007-05-02 19:53:33 +000029#include "llvm/Target/TargetAsmInfo.h"
Chris Lattnerf158da22003-01-16 02:20:12 +000030#include "llvm/Target/TargetFrameInfo.h"
Evan Cheng51cdcd12006-12-07 01:21:59 +000031#include "llvm/Target/TargetInstrInfo.h"
Misha Brukman83eaa0b2004-06-21 21:10:24 +000032#include "llvm/Target/TargetMachine.h"
Chris Lattner0cf0c372004-07-11 04:17:10 +000033#include "llvm/Target/TargetOptions.h"
Reid Spencer551ccae2004-09-01 22:55:40 +000034#include "llvm/Support/CommandLine.h"
Evan Chengb371f452007-02-19 21:49:54 +000035#include "llvm/ADT/BitVector.h"
Reid Spencer551ccae2004-09-01 22:55:40 +000036#include "llvm/ADT/STLExtras.h"
Chris Lattner300d0ed2004-02-14 06:00:36 +000037using namespace llvm;
Brian Gaeked0fde302003-11-11 22:41:34 +000038
Chris Lattner3c1c03d2002-12-28 20:32:28 +000039namespace {
40 cl::opt<bool>
Chris Lattnera7660be2004-02-17 06:30:34 +000041 NoFusing("disable-spill-fusing",
42 cl::desc("Disable fusing of spill code into instructions"));
Chris Lattneree0919b2004-02-17 08:03:47 +000043 cl::opt<bool>
44 PrintFailedFusing("print-failed-fuse-candidates",
45 cl::desc("Print instructions that the allocator wants to"
46 " fuse, but the X86 backend currently can't"),
47 cl::Hidden);
Chris Lattner3c1c03d2002-12-28 20:32:28 +000048}
Chris Lattner72614082002-10-25 22:55:53 +000049
Evan Cheng25ab6902006-09-08 06:48:29 +000050X86RegisterInfo::X86RegisterInfo(X86TargetMachine &tm,
51 const TargetInstrInfo &tii)
52 : X86GenRegisterInfo(X86::ADJCALLSTACKDOWN, X86::ADJCALLSTACKUP),
53 TM(tm), TII(tii) {
54 // Cache some information.
55 const X86Subtarget *Subtarget = &TM.getSubtarget<X86Subtarget>();
56 Is64Bit = Subtarget->is64Bit();
57 if (Is64Bit) {
58 SlotSize = 8;
59 StackPtr = X86::RSP;
60 FramePtr = X86::RBP;
61 } else {
62 SlotSize = 4;
63 StackPtr = X86::ESP;
64 FramePtr = X86::EBP;
65 }
66}
Chris Lattner7ad3e062003-08-03 15:48:14 +000067
Duncan Sandsee465742007-08-29 19:01:20 +000068// getX86RegNum - This function maps LLVM register identifiers to their X86
69// specific numbering, which is used in various places encoding instructions.
70//
71unsigned X86RegisterInfo::getX86RegNum(unsigned RegNo) {
72 switch(RegNo) {
73 case X86::RAX: case X86::EAX: case X86::AX: case X86::AL: return N86::EAX;
74 case X86::RCX: case X86::ECX: case X86::CX: case X86::CL: return N86::ECX;
75 case X86::RDX: case X86::EDX: case X86::DX: case X86::DL: return N86::EDX;
76 case X86::RBX: case X86::EBX: case X86::BX: case X86::BL: return N86::EBX;
77 case X86::RSP: case X86::ESP: case X86::SP: case X86::SPL: case X86::AH:
78 return N86::ESP;
79 case X86::RBP: case X86::EBP: case X86::BP: case X86::BPL: case X86::CH:
80 return N86::EBP;
81 case X86::RSI: case X86::ESI: case X86::SI: case X86::SIL: case X86::DH:
82 return N86::ESI;
83 case X86::RDI: case X86::EDI: case X86::DI: case X86::DIL: case X86::BH:
84 return N86::EDI;
85
86 case X86::R8: case X86::R8D: case X86::R8W: case X86::R8B:
87 return N86::EAX;
88 case X86::R9: case X86::R9D: case X86::R9W: case X86::R9B:
89 return N86::ECX;
90 case X86::R10: case X86::R10D: case X86::R10W: case X86::R10B:
91 return N86::EDX;
92 case X86::R11: case X86::R11D: case X86::R11W: case X86::R11B:
93 return N86::EBX;
94 case X86::R12: case X86::R12D: case X86::R12W: case X86::R12B:
95 return N86::ESP;
96 case X86::R13: case X86::R13D: case X86::R13W: case X86::R13B:
97 return N86::EBP;
98 case X86::R14: case X86::R14D: case X86::R14W: case X86::R14B:
99 return N86::ESI;
100 case X86::R15: case X86::R15D: case X86::R15W: case X86::R15B:
101 return N86::EDI;
102
103 case X86::ST0: case X86::ST1: case X86::ST2: case X86::ST3:
104 case X86::ST4: case X86::ST5: case X86::ST6: case X86::ST7:
105 return RegNo-X86::ST0;
106
107 case X86::XMM0: case X86::XMM1: case X86::XMM2: case X86::XMM3:
108 case X86::XMM4: case X86::XMM5: case X86::XMM6: case X86::XMM7:
109 return getDwarfRegNum(RegNo) - getDwarfRegNum(X86::XMM0);
110 case X86::XMM8: case X86::XMM9: case X86::XMM10: case X86::XMM11:
111 case X86::XMM12: case X86::XMM13: case X86::XMM14: case X86::XMM15:
112 return getDwarfRegNum(RegNo) - getDwarfRegNum(X86::XMM8);
113
114 default:
115 assert(isVirtualRegister(RegNo) && "Unknown physical register!");
116 assert(0 && "Register allocator hasn't allocated reg correctly yet!");
117 return 0;
118 }
119}
120
Evan Cheng89d16592007-07-17 07:59:08 +0000121bool X86RegisterInfo::spillCalleeSavedRegisters(MachineBasicBlock &MBB,
122 MachineBasicBlock::iterator MI,
123 const std::vector<CalleeSavedInfo> &CSI) const {
124 if (CSI.empty())
125 return false;
126
127 MachineFunction &MF = *MBB.getParent();
128 X86MachineFunctionInfo *X86FI = MF.getInfo<X86MachineFunctionInfo>();
129 X86FI->setCalleeSavedFrameSize(CSI.size() * SlotSize);
130 unsigned Opc = Is64Bit ? X86::PUSH64r : X86::PUSH32r;
131 for (unsigned i = CSI.size(); i != 0; --i) {
132 unsigned Reg = CSI[i-1].getReg();
133 // Add the callee-saved register as live-in. It's killed at the spill.
134 MBB.addLiveIn(Reg);
135 BuildMI(MBB, MI, TII.get(Opc)).addReg(Reg);
136 }
137 return true;
138}
139
140bool X86RegisterInfo::restoreCalleeSavedRegisters(MachineBasicBlock &MBB,
141 MachineBasicBlock::iterator MI,
142 const std::vector<CalleeSavedInfo> &CSI) const {
143 if (CSI.empty())
144 return false;
145
146 unsigned Opc = Is64Bit ? X86::POP64r : X86::POP32r;
147 for (unsigned i = 0, e = CSI.size(); i != e; ++i) {
148 unsigned Reg = CSI[i].getReg();
149 BuildMI(MBB, MI, TII.get(Opc), Reg);
150 }
151 return true;
152}
153
Chris Lattner01d0efb2004-08-15 22:15:11 +0000154void X86RegisterInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
155 MachineBasicBlock::iterator MI,
Chris Lattner97d5e642005-09-30 01:29:42 +0000156 unsigned SrcReg, int FrameIdx,
157 const TargetRegisterClass *RC) const {
Chris Lattner56bcae02005-09-30 17:12:38 +0000158 unsigned Opc;
Evan Cheng25ab6902006-09-08 06:48:29 +0000159 if (RC == &X86::GR64RegClass) {
160 Opc = X86::MOV64mr;
161 } else if (RC == &X86::GR32RegClass) {
Chris Lattner56bcae02005-09-30 17:12:38 +0000162 Opc = X86::MOV32mr;
Evan Cheng069287d2006-05-16 07:21:53 +0000163 } else if (RC == &X86::GR16RegClass) {
Chris Lattner56bcae02005-09-30 17:12:38 +0000164 Opc = X86::MOV16mr;
Evan Cheng069287d2006-05-16 07:21:53 +0000165 } else if (RC == &X86::GR8RegClass) {
Evan Cheng403be7e2006-05-08 08:01:26 +0000166 Opc = X86::MOV8mr;
Evan Cheng069287d2006-05-16 07:21:53 +0000167 } else if (RC == &X86::GR32_RegClass) {
Evan Cheng403be7e2006-05-08 08:01:26 +0000168 Opc = X86::MOV32_mr;
Evan Cheng069287d2006-05-16 07:21:53 +0000169 } else if (RC == &X86::GR16_RegClass) {
Evan Cheng403be7e2006-05-08 08:01:26 +0000170 Opc = X86::MOV16_mr;
Dale Johannesen9e3d3ab2007-09-14 22:26:36 +0000171 } else if (RC == &X86::RFP80RegClass) {
172 Opc = X86::ST_FpP80m; // pops
Dale Johannesenca8035e2007-09-17 20:15:38 +0000173 } else if (RC == &X86::RFP64RegClass) {
Dale Johannesene377d4d2007-07-04 21:07:47 +0000174 Opc = X86::ST_Fp64m;
Dale Johannesen849f2142007-07-03 00:53:03 +0000175 } else if (RC == &X86::RFP32RegClass) {
Dale Johannesene377d4d2007-07-04 21:07:47 +0000176 Opc = X86::ST_Fp32m;
Evan Cheng19ade3b2006-02-16 21:20:26 +0000177 } else if (RC == &X86::FR32RegClass) {
Nate Begeman14e2cf62005-10-14 22:06:00 +0000178 Opc = X86::MOVSSmr;
Evan Cheng19ade3b2006-02-16 21:20:26 +0000179 } else if (RC == &X86::FR64RegClass) {
Chris Lattner56bcae02005-09-30 17:12:38 +0000180 Opc = X86::MOVSDmr;
Evan Cheng2246f842006-03-18 01:23:20 +0000181 } else if (RC == &X86::VR128RegClass) {
Evan Chenged1492e2006-04-28 02:23:35 +0000182 Opc = X86::MOVAPSmr;
Bill Wendling2f88dcd2007-03-08 22:09:11 +0000183 } else if (RC == &X86::VR64RegClass) {
Bill Wendlingc9c9d2d2007-04-03 06:18:31 +0000184 Opc = X86::MMX_MOVQ64mr;
Chris Lattner56bcae02005-09-30 17:12:38 +0000185 } else {
186 assert(0 && "Unknown regclass");
187 abort();
188 }
Evan Cheng0fa1b6d2007-02-23 01:10:04 +0000189 addFrameReference(BuildMI(MBB, MI, TII.get(Opc)), FrameIdx)
190 .addReg(SrcReg, false, false, true);
Misha Brukmanb83b2862002-11-20 18:59:43 +0000191}
192
Chris Lattner01d0efb2004-08-15 22:15:11 +0000193void X86RegisterInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
194 MachineBasicBlock::iterator MI,
Chris Lattner97d5e642005-09-30 01:29:42 +0000195 unsigned DestReg, int FrameIdx,
196 const TargetRegisterClass *RC) const{
Chris Lattner56bcae02005-09-30 17:12:38 +0000197 unsigned Opc;
Evan Cheng25ab6902006-09-08 06:48:29 +0000198 if (RC == &X86::GR64RegClass) {
199 Opc = X86::MOV64rm;
200 } else if (RC == &X86::GR32RegClass) {
Chris Lattner56bcae02005-09-30 17:12:38 +0000201 Opc = X86::MOV32rm;
Evan Cheng069287d2006-05-16 07:21:53 +0000202 } else if (RC == &X86::GR16RegClass) {
Chris Lattner56bcae02005-09-30 17:12:38 +0000203 Opc = X86::MOV16rm;
Evan Cheng069287d2006-05-16 07:21:53 +0000204 } else if (RC == &X86::GR8RegClass) {
Evan Cheng403be7e2006-05-08 08:01:26 +0000205 Opc = X86::MOV8rm;
Evan Cheng069287d2006-05-16 07:21:53 +0000206 } else if (RC == &X86::GR32_RegClass) {
Evan Cheng403be7e2006-05-08 08:01:26 +0000207 Opc = X86::MOV32_rm;
Evan Cheng069287d2006-05-16 07:21:53 +0000208 } else if (RC == &X86::GR16_RegClass) {
Evan Cheng403be7e2006-05-08 08:01:26 +0000209 Opc = X86::MOV16_rm;
Dale Johannesen9e3d3ab2007-09-14 22:26:36 +0000210 } else if (RC == &X86::RFP80RegClass) {
211 Opc = X86::LD_Fp80m;
Dale Johannesenca8035e2007-09-17 20:15:38 +0000212 } else if (RC == &X86::RFP64RegClass) {
Dale Johannesene377d4d2007-07-04 21:07:47 +0000213 Opc = X86::LD_Fp64m;
Dale Johannesen849f2142007-07-03 00:53:03 +0000214 } else if (RC == &X86::RFP32RegClass) {
Dale Johannesene377d4d2007-07-04 21:07:47 +0000215 Opc = X86::LD_Fp32m;
Evan Cheng19ade3b2006-02-16 21:20:26 +0000216 } else if (RC == &X86::FR32RegClass) {
Nate Begeman14e2cf62005-10-14 22:06:00 +0000217 Opc = X86::MOVSSrm;
Evan Cheng19ade3b2006-02-16 21:20:26 +0000218 } else if (RC == &X86::FR64RegClass) {
Chris Lattner56bcae02005-09-30 17:12:38 +0000219 Opc = X86::MOVSDrm;
Evan Cheng2246f842006-03-18 01:23:20 +0000220 } else if (RC == &X86::VR128RegClass) {
Evan Chenged1492e2006-04-28 02:23:35 +0000221 Opc = X86::MOVAPSrm;
Bill Wendling2f88dcd2007-03-08 22:09:11 +0000222 } else if (RC == &X86::VR64RegClass) {
Bill Wendlingc9c9d2d2007-04-03 06:18:31 +0000223 Opc = X86::MMX_MOVQ64rm;
Chris Lattner56bcae02005-09-30 17:12:38 +0000224 } else {
225 assert(0 && "Unknown regclass");
226 abort();
227 }
Evan Chengc0f64ff2006-11-27 23:37:22 +0000228 addFrameReference(BuildMI(MBB, MI, TII.get(Opc), DestReg), FrameIdx);
Misha Brukmanb83b2862002-11-20 18:59:43 +0000229}
230
Chris Lattner01d0efb2004-08-15 22:15:11 +0000231void X86RegisterInfo::copyRegToReg(MachineBasicBlock &MBB,
232 MachineBasicBlock::iterator MI,
233 unsigned DestReg, unsigned SrcReg,
Evan Cheng9efce632007-09-26 06:25:56 +0000234 const TargetRegisterClass *DestRC,
235 const TargetRegisterClass *SrcRC) const {
236 if (DestRC != SrcRC) {
Evan Chengff110262007-09-26 21:31:07 +0000237 // Moving EFLAGS to / from another register requires a push and a pop.
238 if (SrcRC == &X86::CCRRegClass) {
239 assert(SrcReg == X86::EFLAGS);
240 if (DestRC == &X86::GR64RegClass) {
241 BuildMI(MBB, MI, TII.get(X86::PUSHFQ));
242 BuildMI(MBB, MI, TII.get(X86::POP64r), DestReg);
243 return;
244 } else if (DestRC == &X86::GR32RegClass) {
245 BuildMI(MBB, MI, TII.get(X86::PUSHFD));
246 BuildMI(MBB, MI, TII.get(X86::POP32r), DestReg);
247 return;
248 }
249 } else if (DestRC == &X86::CCRRegClass) {
250 assert(DestReg == X86::EFLAGS);
251 if (SrcRC == &X86::GR64RegClass) {
252 BuildMI(MBB, MI, TII.get(X86::PUSH64r)).addReg(SrcReg);
253 BuildMI(MBB, MI, TII.get(X86::POPFQ));
254 return;
255 } else if (SrcRC == &X86::GR32RegClass) {
256 BuildMI(MBB, MI, TII.get(X86::PUSH32r)).addReg(SrcReg);
257 BuildMI(MBB, MI, TII.get(X86::POPFD));
258 return;
259 }
260 }
Evan Cheng9efce632007-09-26 06:25:56 +0000261 cerr << "Not yet supported!";
262 abort();
263 }
264
Chris Lattner56bcae02005-09-30 17:12:38 +0000265 unsigned Opc;
Evan Cheng9efce632007-09-26 06:25:56 +0000266 if (DestRC == &X86::GR64RegClass) {
Evan Cheng25ab6902006-09-08 06:48:29 +0000267 Opc = X86::MOV64rr;
Evan Cheng9efce632007-09-26 06:25:56 +0000268 } else if (DestRC == &X86::GR32RegClass) {
Chris Lattner56bcae02005-09-30 17:12:38 +0000269 Opc = X86::MOV32rr;
Evan Cheng9efce632007-09-26 06:25:56 +0000270 } else if (DestRC == &X86::GR16RegClass) {
Chris Lattner56bcae02005-09-30 17:12:38 +0000271 Opc = X86::MOV16rr;
Evan Cheng9efce632007-09-26 06:25:56 +0000272 } else if (DestRC == &X86::GR8RegClass) {
Evan Cheng403be7e2006-05-08 08:01:26 +0000273 Opc = X86::MOV8rr;
Evan Cheng9efce632007-09-26 06:25:56 +0000274 } else if (DestRC == &X86::GR32_RegClass) {
Evan Cheng403be7e2006-05-08 08:01:26 +0000275 Opc = X86::MOV32_rr;
Evan Cheng9efce632007-09-26 06:25:56 +0000276 } else if (DestRC == &X86::GR16_RegClass) {
Evan Cheng403be7e2006-05-08 08:01:26 +0000277 Opc = X86::MOV16_rr;
Evan Cheng9efce632007-09-26 06:25:56 +0000278 } else if (DestRC == &X86::RFP32RegClass) {
Dale Johannesene377d4d2007-07-04 21:07:47 +0000279 Opc = X86::MOV_Fp3232;
Evan Cheng9efce632007-09-26 06:25:56 +0000280 } else if (DestRC == &X86::RFP64RegClass || DestRC == &X86::RSTRegClass) {
Dale Johannesene377d4d2007-07-04 21:07:47 +0000281 Opc = X86::MOV_Fp6464;
Evan Cheng9efce632007-09-26 06:25:56 +0000282 } else if (DestRC == &X86::RFP80RegClass) {
Dale Johannesen9e3d3ab2007-09-14 22:26:36 +0000283 Opc = X86::MOV_Fp8080;
Evan Cheng9efce632007-09-26 06:25:56 +0000284 } else if (DestRC == &X86::FR32RegClass) {
Evan Chengfe5cb192006-02-16 22:45:17 +0000285 Opc = X86::FsMOVAPSrr;
Evan Cheng9efce632007-09-26 06:25:56 +0000286 } else if (DestRC == &X86::FR64RegClass) {
Evan Chengfe5cb192006-02-16 22:45:17 +0000287 Opc = X86::FsMOVAPDrr;
Evan Cheng9efce632007-09-26 06:25:56 +0000288 } else if (DestRC == &X86::VR128RegClass) {
Evan Chenga964ccd2006-04-10 07:21:31 +0000289 Opc = X86::MOVAPSrr;
Evan Cheng9efce632007-09-26 06:25:56 +0000290 } else if (DestRC == &X86::VR64RegClass) {
Bill Wendlingc9c9d2d2007-04-03 06:18:31 +0000291 Opc = X86::MMX_MOVQ64rr;
Chris Lattner56bcae02005-09-30 17:12:38 +0000292 } else {
293 assert(0 && "Unknown regclass");
294 abort();
295 }
Evan Chengc0f64ff2006-11-27 23:37:22 +0000296 BuildMI(MBB, MI, TII.get(Opc), DestReg).addReg(SrcReg);
Misha Brukman2b46e8e2002-12-13 09:54:12 +0000297}
298
Evan Chengff110262007-09-26 21:31:07 +0000299const TargetRegisterClass *
300X86RegisterInfo::getCrossCopyRegClass(const TargetRegisterClass *RC) const {
301 if (RC == &X86::CCRRegClass)
Evan Cheng3f2d9ec2007-09-27 21:50:05 +0000302 if (Is64Bit)
303 return &X86::GR64RegClass;
304 else
305 return &X86::GR32RegClass;
Evan Chengff110262007-09-26 21:31:07 +0000306 return NULL;
307}
Evan Chengbf2c8b32007-03-20 08:09:38 +0000308
309void X86RegisterInfo::reMaterialize(MachineBasicBlock &MBB,
310 MachineBasicBlock::iterator I,
311 unsigned DestReg,
312 const MachineInstr *Orig) const {
Evan Chengb0869ed2007-09-10 20:48:53 +0000313 // MOV32r0 etc. are implemented with xor which clobbers condition code.
314 // Re-materialize them as movri instructions to avoid side effects.
315 switch (Orig->getOpcode()) {
316 case X86::MOV8r0:
317 BuildMI(MBB, I, TII.get(X86::MOV8ri), DestReg).addImm(0);
318 break;
319 case X86::MOV16r0:
320 BuildMI(MBB, I, TII.get(X86::MOV16ri), DestReg).addImm(0);
321 break;
322 case X86::MOV32r0:
323 BuildMI(MBB, I, TII.get(X86::MOV32ri), DestReg).addImm(0);
324 break;
325 case X86::MOV64r0:
326 BuildMI(MBB, I, TII.get(X86::MOV64ri32), DestReg).addImm(0);
327 break;
328 default: {
329 MachineInstr *MI = Orig->clone();
330 MI->getOperand(0).setReg(DestReg);
331 MBB.insert(I, MI);
332 break;
333 }
334 }
Evan Chengbf2c8b32007-03-20 08:09:38 +0000335}
336
Evan Chengf4c3a592007-08-30 05:54:07 +0000337static const MachineInstrBuilder &FuseInstrAddOperand(MachineInstrBuilder &MIB,
338 MachineOperand &MO) {
Dan Gohman92dfe202007-09-14 20:33:02 +0000339 if (MO.isRegister())
Evan Chengf4c3a592007-08-30 05:54:07 +0000340 MIB = MIB.addReg(MO.getReg(), MO.isDef(), MO.isImplicit());
Dan Gohman92dfe202007-09-14 20:33:02 +0000341 else if (MO.isImmediate())
Evan Chengf4c3a592007-08-30 05:54:07 +0000342 MIB = MIB.addImm(MO.getImm());
343 else if (MO.isFrameIndex())
344 MIB = MIB.addFrameIndex(MO.getFrameIndex());
345 else if (MO.isGlobalAddress())
346 MIB = MIB.addGlobalAddress(MO.getGlobal(), MO.getOffset());
347 else if (MO.isConstantPoolIndex())
348 MIB = MIB.addConstantPoolIndex(MO.getConstantPoolIndex(), MO.getOffset());
349 else if (MO.isJumpTableIndex())
350 MIB = MIB.addJumpTableIndex(MO.getJumpTableIndex());
351 else if (MO.isExternalSymbol())
352 MIB = MIB.addExternalSymbol(MO.getSymbolName());
353 else
354 assert(0 && "Unknown operand for FuseInst!");
355
356 return MIB;
357}
358
359static MachineInstr *FuseTwoAddrInst(unsigned Opcode,
360 SmallVector<MachineOperand,4> &MOs,
361 MachineInstr *MI, const TargetInstrInfo &TII) {
Evan Cheng171d09e2006-11-10 01:28:43 +0000362 unsigned NumOps = TII.getNumOperands(MI->getOpcode())-2;
Evan Chengf4c3a592007-08-30 05:54:07 +0000363
Chris Lattner29268692006-09-05 02:12:02 +0000364 // Create the base instruction with the memory operand as the first part.
Evan Chengf4c3a592007-08-30 05:54:07 +0000365 MachineInstrBuilder MIB = BuildMI(TII.get(Opcode));
366 unsigned NumAddrOps = MOs.size();
367 for (unsigned i = 0; i != NumAddrOps; ++i)
368 MIB = FuseInstrAddOperand(MIB, MOs[i]);
369 if (NumAddrOps < 4) // FrameIndex only
370 MIB.addImm(1).addReg(0).addImm(0);
Chris Lattner29268692006-09-05 02:12:02 +0000371
372 // Loop over the rest of the ri operands, converting them over.
373 for (unsigned i = 0; i != NumOps; ++i) {
Evan Cheng6f34b432006-09-08 21:08:13 +0000374 MachineOperand &MO = MI->getOperand(i+2);
Evan Chengf4c3a592007-08-30 05:54:07 +0000375 MIB = FuseInstrAddOperand(MIB, MO);
Chris Lattner29268692006-09-05 02:12:02 +0000376 }
377 return MIB;
Alkis Evlogimenos89b02142004-02-17 08:49:20 +0000378}
379
Chris Lattner29268692006-09-05 02:12:02 +0000380static MachineInstr *FuseInst(unsigned Opcode, unsigned OpNo,
Evan Chengf4c3a592007-08-30 05:54:07 +0000381 SmallVector<MachineOperand,4> &MOs,
382 MachineInstr *MI, const TargetInstrInfo &TII) {
Evan Chengc0f64ff2006-11-27 23:37:22 +0000383 MachineInstrBuilder MIB = BuildMI(TII.get(Opcode));
Chris Lattner29268692006-09-05 02:12:02 +0000384
385 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
386 MachineOperand &MO = MI->getOperand(i);
387 if (i == OpNo) {
Dan Gohman92dfe202007-09-14 20:33:02 +0000388 assert(MO.isRegister() && "Expected to fold into reg operand!");
Evan Chengf4c3a592007-08-30 05:54:07 +0000389 unsigned NumAddrOps = MOs.size();
390 for (unsigned i = 0; i != NumAddrOps; ++i)
391 MIB = FuseInstrAddOperand(MIB, MOs[i]);
392 if (NumAddrOps < 4) // FrameIndex only
393 MIB.addImm(1).addReg(0).addImm(0);
394 } else {
395 MIB = FuseInstrAddOperand(MIB, MO);
396 }
Chris Lattner29268692006-09-05 02:12:02 +0000397 }
398 return MIB;
Chris Lattner7c035b72004-02-17 05:35:13 +0000399}
400
Evan Chengf4c3a592007-08-30 05:54:07 +0000401static MachineInstr *MakeM0Inst(const TargetInstrInfo &TII, unsigned Opcode,
402 SmallVector<MachineOperand,4> &MOs,
Evan Cheng8586b952006-03-17 02:36:22 +0000403 MachineInstr *MI) {
Evan Chengf4c3a592007-08-30 05:54:07 +0000404 MachineInstrBuilder MIB = BuildMI(TII.get(Opcode));
405
406 unsigned NumAddrOps = MOs.size();
407 for (unsigned i = 0; i != NumAddrOps; ++i)
408 MIB = FuseInstrAddOperand(MIB, MOs[i]);
409 if (NumAddrOps < 4) // FrameIndex only
410 MIB.addImm(1).addReg(0).addImm(0);
411 return MIB.addImm(0);
Evan Cheng8586b952006-03-17 02:36:22 +0000412}
413
Chris Lattner0f9c4912004-02-17 05:46:06 +0000414
Jim Laskeyf19807c2006-07-19 17:53:32 +0000415//===----------------------------------------------------------------------===//
416// Efficient Lookup Table Support
417//===----------------------------------------------------------------------===//
418
419namespace {
Jim Laskey613f1f82006-07-19 19:32:06 +0000420 /// TableEntry - Maps the 'from' opcode to a fused form of the 'to' opcode.
421 ///
Jim Laskeyf19807c2006-07-19 17:53:32 +0000422 struct TableEntry {
Jim Laskey613f1f82006-07-19 19:32:06 +0000423 unsigned from; // Original opcode.
424 unsigned to; // New opcode.
Jim Laskey613f1f82006-07-19 19:32:06 +0000425
426 // less operators used by STL search.
Jim Laskeyf19807c2006-07-19 17:53:32 +0000427 bool operator<(const TableEntry &TE) const { return from < TE.from; }
428 friend bool operator<(const TableEntry &TE, unsigned V) {
429 return TE.from < V;
430 }
431 friend bool operator<(unsigned V, const TableEntry &TE) {
432 return V < TE.from;
433 }
434 };
435}
436
Jim Laskey613f1f82006-07-19 19:32:06 +0000437/// TableIsSorted - Return true if the table is in 'from' opcode order.
438///
Jim Laskeyf19807c2006-07-19 17:53:32 +0000439static bool TableIsSorted(const TableEntry *Table, unsigned NumEntries) {
Jim Laskey613f1f82006-07-19 19:32:06 +0000440 for (unsigned i = 1; i != NumEntries; ++i)
441 if (!(Table[i-1] < Table[i])) {
Bill Wendlingf5da1332006-12-07 22:21:48 +0000442 cerr << "Entries out of order " << Table[i-1].from
443 << " " << Table[i].from << "\n";
Jim Laskey613f1f82006-07-19 19:32:06 +0000444 return false;
445 }
Jim Laskeyf19807c2006-07-19 17:53:32 +0000446 return true;
447}
448
Jim Laskey613f1f82006-07-19 19:32:06 +0000449/// TableLookup - Return the table entry matching the specified opcode.
450/// Otherwise return NULL.
451static const TableEntry *TableLookup(const TableEntry *Table, unsigned N,
452 unsigned Opcode) {
Jim Laskeyf19807c2006-07-19 17:53:32 +0000453 const TableEntry *I = std::lower_bound(Table, Table+N, Opcode);
Jim Laskey613f1f82006-07-19 19:32:06 +0000454 if (I != Table+N && I->from == Opcode)
455 return I;
456 return NULL;
Jim Laskeyf19807c2006-07-19 17:53:32 +0000457}
458
Jim Laskeyf19807c2006-07-19 17:53:32 +0000459#ifdef NDEBUG
460#define ASSERT_SORTED(TABLE)
461#else
462#define ASSERT_SORTED(TABLE) \
463 { static bool TABLE##Checked = false; \
Jim Laskey613f1f82006-07-19 19:32:06 +0000464 if (!TABLE##Checked) { \
Owen Anderson718cb662007-09-07 04:06:50 +0000465 assert(TableIsSorted(TABLE, array_lengthof(TABLE)) && \
Jim Laskeyf19807c2006-07-19 17:53:32 +0000466 "All lookup tables must be sorted for efficient access!"); \
Jim Laskey613f1f82006-07-19 19:32:06 +0000467 TABLE##Checked = true; \
468 } \
Jim Laskeyf19807c2006-07-19 17:53:32 +0000469 }
470#endif
471
Evan Chengf4c3a592007-08-30 05:54:07 +0000472MachineInstr*
473X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned i,
474 SmallVector<MachineOperand,4> &MOs) const {
Jim Laskeyf19807c2006-07-19 17:53:32 +0000475 // Table (and size) to search
476 const TableEntry *OpcodeTablePtr = NULL;
477 unsigned OpcodeTableSize = 0;
Chris Lattner29268692006-09-05 02:12:02 +0000478 bool isTwoAddrFold = false;
Evan Cheng171d09e2006-11-10 01:28:43 +0000479 unsigned NumOps = TII.getNumOperands(MI->getOpcode());
480 bool isTwoAddr = NumOps > 1 &&
Evan Cheng51cdcd12006-12-07 01:21:59 +0000481 MI->getInstrDescriptor()->getOperandConstraint(1, TOI::TIED_TO) != -1;
Jim Laskeyf19807c2006-07-19 17:53:32 +0000482
Evan Cheng6ce7dc22006-11-15 20:58:11 +0000483 MachineInstr *NewMI = NULL;
Chris Lattner29268692006-09-05 02:12:02 +0000484 // Folding a memory location into the two-address part of a two-address
485 // instruction is different than folding it other places. It requires
486 // replacing the *two* registers with the memory location.
Evan Cheng171d09e2006-11-10 01:28:43 +0000487 if (isTwoAddr && NumOps >= 2 && i < 2 &&
Dan Gohman92dfe202007-09-14 20:33:02 +0000488 MI->getOperand(0).isRegister() &&
489 MI->getOperand(1).isRegister() &&
Evan Chengf4c3a592007-08-30 05:54:07 +0000490 MI->getOperand(0).getReg() == MI->getOperand(1).getReg()) {
Jim Laskeyf19807c2006-07-19 17:53:32 +0000491 static const TableEntry OpcodeTable[] = {
Chris Lattner29268692006-09-05 02:12:02 +0000492 { X86::ADC32ri, X86::ADC32mi },
493 { X86::ADC32ri8, X86::ADC32mi8 },
494 { X86::ADC32rr, X86::ADC32mr },
Evan Cheng25ab6902006-09-08 06:48:29 +0000495 { X86::ADC64ri32, X86::ADC64mi32 },
496 { X86::ADC64ri8, X86::ADC64mi8 },
497 { X86::ADC64rr, X86::ADC64mr },
Chris Lattner29268692006-09-05 02:12:02 +0000498 { X86::ADD16ri, X86::ADD16mi },
499 { X86::ADD16ri8, X86::ADD16mi8 },
500 { X86::ADD16rr, X86::ADD16mr },
501 { X86::ADD32ri, X86::ADD32mi },
502 { X86::ADD32ri8, X86::ADD32mi8 },
503 { X86::ADD32rr, X86::ADD32mr },
Evan Cheng25ab6902006-09-08 06:48:29 +0000504 { X86::ADD64ri32, X86::ADD64mi32 },
505 { X86::ADD64ri8, X86::ADD64mi8 },
506 { X86::ADD64rr, X86::ADD64mr },
Chris Lattner29268692006-09-05 02:12:02 +0000507 { X86::ADD8ri, X86::ADD8mi },
508 { X86::ADD8rr, X86::ADD8mr },
509 { X86::AND16ri, X86::AND16mi },
510 { X86::AND16ri8, X86::AND16mi8 },
511 { X86::AND16rr, X86::AND16mr },
512 { X86::AND32ri, X86::AND32mi },
513 { X86::AND32ri8, X86::AND32mi8 },
514 { X86::AND32rr, X86::AND32mr },
Evan Cheng25ab6902006-09-08 06:48:29 +0000515 { X86::AND64ri32, X86::AND64mi32 },
516 { X86::AND64ri8, X86::AND64mi8 },
517 { X86::AND64rr, X86::AND64mr },
Chris Lattner29268692006-09-05 02:12:02 +0000518 { X86::AND8ri, X86::AND8mi },
519 { X86::AND8rr, X86::AND8mr },
520 { X86::DEC16r, X86::DEC16m },
521 { X86::DEC32r, X86::DEC32m },
Evan Cheng25ab6902006-09-08 06:48:29 +0000522 { X86::DEC64_16r, X86::DEC16m },
523 { X86::DEC64_32r, X86::DEC32m },
524 { X86::DEC64r, X86::DEC64m },
Chris Lattner29268692006-09-05 02:12:02 +0000525 { X86::DEC8r, X86::DEC8m },
526 { X86::INC16r, X86::INC16m },
527 { X86::INC32r, X86::INC32m },
Evan Cheng25ab6902006-09-08 06:48:29 +0000528 { X86::INC64_16r, X86::INC16m },
529 { X86::INC64_32r, X86::INC32m },
530 { X86::INC64r, X86::INC64m },
Chris Lattner29268692006-09-05 02:12:02 +0000531 { X86::INC8r, X86::INC8m },
532 { X86::NEG16r, X86::NEG16m },
533 { X86::NEG32r, X86::NEG32m },
Evan Cheng25ab6902006-09-08 06:48:29 +0000534 { X86::NEG64r, X86::NEG64m },
Chris Lattner29268692006-09-05 02:12:02 +0000535 { X86::NEG8r, X86::NEG8m },
536 { X86::NOT16r, X86::NOT16m },
537 { X86::NOT32r, X86::NOT32m },
Evan Cheng25ab6902006-09-08 06:48:29 +0000538 { X86::NOT64r, X86::NOT64m },
Chris Lattner29268692006-09-05 02:12:02 +0000539 { X86::NOT8r, X86::NOT8m },
540 { X86::OR16ri, X86::OR16mi },
541 { X86::OR16ri8, X86::OR16mi8 },
542 { X86::OR16rr, X86::OR16mr },
543 { X86::OR32ri, X86::OR32mi },
544 { X86::OR32ri8, X86::OR32mi8 },
545 { X86::OR32rr, X86::OR32mr },
Evan Cheng25ab6902006-09-08 06:48:29 +0000546 { X86::OR64ri32, X86::OR64mi32 },
547 { X86::OR64ri8, X86::OR64mi8 },
548 { X86::OR64rr, X86::OR64mr },
Chris Lattner29268692006-09-05 02:12:02 +0000549 { X86::OR8ri, X86::OR8mi },
550 { X86::OR8rr, X86::OR8mr },
551 { X86::ROL16r1, X86::ROL16m1 },
552 { X86::ROL16rCL, X86::ROL16mCL },
553 { X86::ROL16ri, X86::ROL16mi },
554 { X86::ROL32r1, X86::ROL32m1 },
555 { X86::ROL32rCL, X86::ROL32mCL },
556 { X86::ROL32ri, X86::ROL32mi },
Evan Cheng25ab6902006-09-08 06:48:29 +0000557 { X86::ROL64r1, X86::ROL64m1 },
558 { X86::ROL64rCL, X86::ROL64mCL },
559 { X86::ROL64ri, X86::ROL64mi },
Chris Lattner29268692006-09-05 02:12:02 +0000560 { X86::ROL8r1, X86::ROL8m1 },
561 { X86::ROL8rCL, X86::ROL8mCL },
562 { X86::ROL8ri, X86::ROL8mi },
563 { X86::ROR16r1, X86::ROR16m1 },
564 { X86::ROR16rCL, X86::ROR16mCL },
565 { X86::ROR16ri, X86::ROR16mi },
566 { X86::ROR32r1, X86::ROR32m1 },
567 { X86::ROR32rCL, X86::ROR32mCL },
568 { X86::ROR32ri, X86::ROR32mi },
Evan Cheng25ab6902006-09-08 06:48:29 +0000569 { X86::ROR64r1, X86::ROR64m1 },
570 { X86::ROR64rCL, X86::ROR64mCL },
571 { X86::ROR64ri, X86::ROR64mi },
Chris Lattner29268692006-09-05 02:12:02 +0000572 { X86::ROR8r1, X86::ROR8m1 },
573 { X86::ROR8rCL, X86::ROR8mCL },
574 { X86::ROR8ri, X86::ROR8mi },
575 { X86::SAR16r1, X86::SAR16m1 },
576 { X86::SAR16rCL, X86::SAR16mCL },
577 { X86::SAR16ri, X86::SAR16mi },
578 { X86::SAR32r1, X86::SAR32m1 },
579 { X86::SAR32rCL, X86::SAR32mCL },
580 { X86::SAR32ri, X86::SAR32mi },
Evan Cheng25ab6902006-09-08 06:48:29 +0000581 { X86::SAR64r1, X86::SAR64m1 },
582 { X86::SAR64rCL, X86::SAR64mCL },
583 { X86::SAR64ri, X86::SAR64mi },
Chris Lattner29268692006-09-05 02:12:02 +0000584 { X86::SAR8r1, X86::SAR8m1 },
585 { X86::SAR8rCL, X86::SAR8mCL },
586 { X86::SAR8ri, X86::SAR8mi },
587 { X86::SBB32ri, X86::SBB32mi },
588 { X86::SBB32ri8, X86::SBB32mi8 },
589 { X86::SBB32rr, X86::SBB32mr },
Evan Cheng25ab6902006-09-08 06:48:29 +0000590 { X86::SBB64ri32, X86::SBB64mi32 },
591 { X86::SBB64ri8, X86::SBB64mi8 },
592 { X86::SBB64rr, X86::SBB64mr },
Chris Lattner29268692006-09-05 02:12:02 +0000593 { X86::SHL16r1, X86::SHL16m1 },
594 { X86::SHL16rCL, X86::SHL16mCL },
595 { X86::SHL16ri, X86::SHL16mi },
596 { X86::SHL32r1, X86::SHL32m1 },
597 { X86::SHL32rCL, X86::SHL32mCL },
598 { X86::SHL32ri, X86::SHL32mi },
Evan Cheng25ab6902006-09-08 06:48:29 +0000599 { X86::SHL64r1, X86::SHL64m1 },
600 { X86::SHL64rCL, X86::SHL64mCL },
601 { X86::SHL64ri, X86::SHL64mi },
Chris Lattner29268692006-09-05 02:12:02 +0000602 { X86::SHL8r1, X86::SHL8m1 },
603 { X86::SHL8rCL, X86::SHL8mCL },
604 { X86::SHL8ri, X86::SHL8mi },
605 { X86::SHLD16rrCL, X86::SHLD16mrCL },
606 { X86::SHLD16rri8, X86::SHLD16mri8 },
607 { X86::SHLD32rrCL, X86::SHLD32mrCL },
608 { X86::SHLD32rri8, X86::SHLD32mri8 },
Evan Cheng25ab6902006-09-08 06:48:29 +0000609 { X86::SHLD64rrCL, X86::SHLD64mrCL },
610 { X86::SHLD64rri8, X86::SHLD64mri8 },
Chris Lattner29268692006-09-05 02:12:02 +0000611 { X86::SHR16r1, X86::SHR16m1 },
612 { X86::SHR16rCL, X86::SHR16mCL },
613 { X86::SHR16ri, X86::SHR16mi },
614 { X86::SHR32r1, X86::SHR32m1 },
615 { X86::SHR32rCL, X86::SHR32mCL },
616 { X86::SHR32ri, X86::SHR32mi },
Evan Cheng25ab6902006-09-08 06:48:29 +0000617 { X86::SHR64r1, X86::SHR64m1 },
618 { X86::SHR64rCL, X86::SHR64mCL },
619 { X86::SHR64ri, X86::SHR64mi },
Chris Lattner29268692006-09-05 02:12:02 +0000620 { X86::SHR8r1, X86::SHR8m1 },
621 { X86::SHR8rCL, X86::SHR8mCL },
622 { X86::SHR8ri, X86::SHR8mi },
623 { X86::SHRD16rrCL, X86::SHRD16mrCL },
624 { X86::SHRD16rri8, X86::SHRD16mri8 },
625 { X86::SHRD32rrCL, X86::SHRD32mrCL },
626 { X86::SHRD32rri8, X86::SHRD32mri8 },
Evan Cheng25ab6902006-09-08 06:48:29 +0000627 { X86::SHRD64rrCL, X86::SHRD64mrCL },
628 { X86::SHRD64rri8, X86::SHRD64mri8 },
Chris Lattner29268692006-09-05 02:12:02 +0000629 { X86::SUB16ri, X86::SUB16mi },
630 { X86::SUB16ri8, X86::SUB16mi8 },
631 { X86::SUB16rr, X86::SUB16mr },
632 { X86::SUB32ri, X86::SUB32mi },
633 { X86::SUB32ri8, X86::SUB32mi8 },
634 { X86::SUB32rr, X86::SUB32mr },
Evan Cheng25ab6902006-09-08 06:48:29 +0000635 { X86::SUB64ri32, X86::SUB64mi32 },
636 { X86::SUB64ri8, X86::SUB64mi8 },
637 { X86::SUB64rr, X86::SUB64mr },
Chris Lattner29268692006-09-05 02:12:02 +0000638 { X86::SUB8ri, X86::SUB8mi },
639 { X86::SUB8rr, X86::SUB8mr },
640 { X86::XOR16ri, X86::XOR16mi },
641 { X86::XOR16ri8, X86::XOR16mi8 },
642 { X86::XOR16rr, X86::XOR16mr },
643 { X86::XOR32ri, X86::XOR32mi },
644 { X86::XOR32ri8, X86::XOR32mi8 },
645 { X86::XOR32rr, X86::XOR32mr },
Evan Cheng25ab6902006-09-08 06:48:29 +0000646 { X86::XOR64ri32, X86::XOR64mi32 },
647 { X86::XOR64ri8, X86::XOR64mi8 },
648 { X86::XOR64rr, X86::XOR64mr },
Chris Lattner29268692006-09-05 02:12:02 +0000649 { X86::XOR8ri, X86::XOR8mi },
650 { X86::XOR8rr, X86::XOR8mr }
651 };
652 ASSERT_SORTED(OpcodeTable);
653 OpcodeTablePtr = OpcodeTable;
Owen Anderson718cb662007-09-07 04:06:50 +0000654 OpcodeTableSize = array_lengthof(OpcodeTable);
Chris Lattner29268692006-09-05 02:12:02 +0000655 isTwoAddrFold = true;
656 } else if (i == 0) { // If operand 0
657 if (MI->getOpcode() == X86::MOV16r0)
Evan Chengf4c3a592007-08-30 05:54:07 +0000658 NewMI = MakeM0Inst(TII, X86::MOV16mi, MOs, MI);
Chris Lattner29268692006-09-05 02:12:02 +0000659 else if (MI->getOpcode() == X86::MOV32r0)
Evan Chengf4c3a592007-08-30 05:54:07 +0000660 NewMI = MakeM0Inst(TII, X86::MOV32mi, MOs, MI);
Evan Cheng25ab6902006-09-08 06:48:29 +0000661 else if (MI->getOpcode() == X86::MOV64r0)
Evan Chengf4c3a592007-08-30 05:54:07 +0000662 NewMI = MakeM0Inst(TII, X86::MOV64mi32, MOs, MI);
Chris Lattner29268692006-09-05 02:12:02 +0000663 else if (MI->getOpcode() == X86::MOV8r0)
Evan Chengf4c3a592007-08-30 05:54:07 +0000664 NewMI = MakeM0Inst(TII, X86::MOV8mi, MOs, MI);
Evan Cheng6ce7dc22006-11-15 20:58:11 +0000665 if (NewMI) {
666 NewMI->copyKillDeadInfo(MI);
667 return NewMI;
668 }
Chris Lattner29268692006-09-05 02:12:02 +0000669
670 static const TableEntry OpcodeTable[] = {
Dan Gohman869b2b22007-09-18 14:59:14 +0000671 { X86::CALL32r, X86::CALL32m },
672 { X86::CALL64r, X86::CALL64m },
Chris Lattner29268692006-09-05 02:12:02 +0000673 { X86::CMP16ri, X86::CMP16mi },
674 { X86::CMP16ri8, X86::CMP16mi8 },
675 { X86::CMP32ri, X86::CMP32mi },
676 { X86::CMP32ri8, X86::CMP32mi8 },
Dan Gohman869b2b22007-09-18 14:59:14 +0000677 { X86::CMP64ri32, X86::CMP64mi32 },
678 { X86::CMP64ri8, X86::CMP64mi8 },
Chris Lattner29268692006-09-05 02:12:02 +0000679 { X86::CMP8ri, X86::CMP8mi },
680 { X86::DIV16r, X86::DIV16m },
681 { X86::DIV32r, X86::DIV32m },
Evan Cheng25ab6902006-09-08 06:48:29 +0000682 { X86::DIV64r, X86::DIV64m },
Chris Lattner29268692006-09-05 02:12:02 +0000683 { X86::DIV8r, X86::DIV8m },
684 { X86::FsMOVAPDrr, X86::MOVSDmr },
685 { X86::FsMOVAPSrr, X86::MOVSSmr },
686 { X86::IDIV16r, X86::IDIV16m },
687 { X86::IDIV32r, X86::IDIV32m },
Evan Cheng25ab6902006-09-08 06:48:29 +0000688 { X86::IDIV64r, X86::IDIV64m },
Chris Lattner29268692006-09-05 02:12:02 +0000689 { X86::IDIV8r, X86::IDIV8m },
690 { X86::IMUL16r, X86::IMUL16m },
691 { X86::IMUL32r, X86::IMUL32m },
Evan Cheng25ab6902006-09-08 06:48:29 +0000692 { X86::IMUL64r, X86::IMUL64m },
Chris Lattner29268692006-09-05 02:12:02 +0000693 { X86::IMUL8r, X86::IMUL8m },
Dan Gohman869b2b22007-09-18 14:59:14 +0000694 { X86::JMP32r, X86::JMP32m },
695 { X86::JMP64r, X86::JMP64m },
Chris Lattner29268692006-09-05 02:12:02 +0000696 { X86::MOV16ri, X86::MOV16mi },
697 { X86::MOV16rr, X86::MOV16mr },
698 { X86::MOV32ri, X86::MOV32mi },
699 { X86::MOV32rr, X86::MOV32mr },
Evan Cheng25ab6902006-09-08 06:48:29 +0000700 { X86::MOV64ri32, X86::MOV64mi32 },
701 { X86::MOV64rr, X86::MOV64mr },
Chris Lattner29268692006-09-05 02:12:02 +0000702 { X86::MOV8ri, X86::MOV8mi },
703 { X86::MOV8rr, X86::MOV8mr },
704 { X86::MOVAPDrr, X86::MOVAPDmr },
705 { X86::MOVAPSrr, X86::MOVAPSmr },
706 { X86::MOVPDI2DIrr, X86::MOVPDI2DImr },
Evan Chengebf01d62006-11-16 23:33:25 +0000707 { X86::MOVPQIto64rr,X86::MOVPQIto64mr },
Chris Lattner29268692006-09-05 02:12:02 +0000708 { X86::MOVPS2SSrr, X86::MOVPS2SSmr },
709 { X86::MOVSDrr, X86::MOVSDmr },
Evan Cheng21b76122006-12-14 21:55:39 +0000710 { X86::MOVSDto64rr, X86::MOVSDto64mr },
Evan Cheng0e8dbc62006-12-14 19:44:45 +0000711 { X86::MOVSS2DIrr, X86::MOVSS2DImr },
Chris Lattner29268692006-09-05 02:12:02 +0000712 { X86::MOVSSrr, X86::MOVSSmr },
713 { X86::MOVUPDrr, X86::MOVUPDmr },
714 { X86::MOVUPSrr, X86::MOVUPSmr },
715 { X86::MUL16r, X86::MUL16m },
716 { X86::MUL32r, X86::MUL32m },
Evan Cheng25ab6902006-09-08 06:48:29 +0000717 { X86::MUL64r, X86::MUL64m },
Chris Lattner29268692006-09-05 02:12:02 +0000718 { X86::MUL8r, X86::MUL8m },
Evan Cheng0488db92007-09-25 01:57:46 +0000719
720 // TEMPORARY
721 { X86::NEW_CMP16ri, X86::NEW_CMP16mi },
722 { X86::NEW_CMP16ri8,X86::NEW_CMP16mi8 },
723 { X86::NEW_CMP32ri, X86::NEW_CMP32mi },
724 { X86::NEW_CMP32ri8,X86::NEW_CMP32mi8 },
725 { X86::NEW_CMP64ri32,X86::NEW_CMP64mi32 },
726 { X86::NEW_CMP64ri8,X86::NEW_CMP64mi8 },
727 { X86::NEW_CMP8ri, X86::NEW_CMP8mi },
728 { X86::NEW_SETAEr, X86::NEW_SETAEm },
729 { X86::NEW_SETAr, X86::NEW_SETAm },
730 { X86::NEW_SETBEr, X86::NEW_SETBEm },
731 { X86::NEW_SETBr, X86::NEW_SETBm },
732 { X86::NEW_SETEr, X86::NEW_SETEm },
733 { X86::NEW_SETGEr, X86::NEW_SETGEm },
734 { X86::NEW_SETGr, X86::NEW_SETGm },
735 { X86::NEW_SETLEr, X86::NEW_SETLEm },
736 { X86::NEW_SETLr, X86::NEW_SETLm },
737 { X86::NEW_SETNEr, X86::NEW_SETNEm },
738 { X86::NEW_SETNPr, X86::NEW_SETNPm },
739 { X86::NEW_SETNSr, X86::NEW_SETNSm },
740 { X86::NEW_SETPr, X86::NEW_SETPm },
741 { X86::NEW_SETSr, X86::NEW_SETSm },
Evan Chengfdd08372007-09-25 22:10:43 +0000742 { X86::NEW_TEST16ri,X86::NEW_TEST16mi },
743 { X86::NEW_TEST32ri,X86::NEW_TEST32mi },
744 { X86::NEW_TEST64ri32, X86::NEW_TEST64mi32 },
745 { X86::NEW_TEST8ri, X86::NEW_TEST8mi },
Evan Cheng0488db92007-09-25 01:57:46 +0000746
Chris Lattner29268692006-09-05 02:12:02 +0000747 { X86::SETAEr, X86::SETAEm },
748 { X86::SETAr, X86::SETAm },
749 { X86::SETBEr, X86::SETBEm },
750 { X86::SETBr, X86::SETBm },
751 { X86::SETEr, X86::SETEm },
752 { X86::SETGEr, X86::SETGEm },
753 { X86::SETGr, X86::SETGm },
754 { X86::SETLEr, X86::SETLEm },
755 { X86::SETLr, X86::SETLm },
756 { X86::SETNEr, X86::SETNEm },
757 { X86::SETNPr, X86::SETNPm },
758 { X86::SETNSr, X86::SETNSm },
759 { X86::SETPr, X86::SETPm },
760 { X86::SETSr, X86::SETSm },
Dan Gohman869b2b22007-09-18 14:59:14 +0000761 { X86::TAILJMPr, X86::TAILJMPm },
Chris Lattner29268692006-09-05 02:12:02 +0000762 { X86::TEST16ri, X86::TEST16mi },
763 { X86::TEST32ri, X86::TEST32mi },
Evan Cheng25ab6902006-09-08 06:48:29 +0000764 { X86::TEST64ri32, X86::TEST64mi32 },
Chris Lattner29268692006-09-05 02:12:02 +0000765 { X86::TEST8ri, X86::TEST8mi },
766 { X86::XCHG16rr, X86::XCHG16mr },
767 { X86::XCHG32rr, X86::XCHG32mr },
Evan Cheng25ab6902006-09-08 06:48:29 +0000768 { X86::XCHG64rr, X86::XCHG64mr },
Chris Lattner29268692006-09-05 02:12:02 +0000769 { X86::XCHG8rr, X86::XCHG8mr }
Jim Laskeyf19807c2006-07-19 17:53:32 +0000770 };
Evan Chengf4c3a592007-08-30 05:54:07 +0000771
Jim Laskeyf19807c2006-07-19 17:53:32 +0000772 ASSERT_SORTED(OpcodeTable);
773 OpcodeTablePtr = OpcodeTable;
Owen Anderson718cb662007-09-07 04:06:50 +0000774 OpcodeTableSize = array_lengthof(OpcodeTable);
Chris Lattner7c035b72004-02-17 05:35:13 +0000775 } else if (i == 1) {
Jim Laskeyf19807c2006-07-19 17:53:32 +0000776 static const TableEntry OpcodeTable[] = {
Chris Lattner29268692006-09-05 02:12:02 +0000777 { X86::CMP16rr, X86::CMP16rm },
778 { X86::CMP32rr, X86::CMP32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000779 { X86::CMP64rr, X86::CMP64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000780 { X86::CMP8rr, X86::CMP8rm },
Chris Lattner29268692006-09-05 02:12:02 +0000781 { X86::CVTSD2SSrr, X86::CVTSD2SSrm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000782 { X86::CVTSI2SD64rr, X86::CVTSI2SD64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000783 { X86::CVTSI2SDrr, X86::CVTSI2SDrm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000784 { X86::CVTSI2SS64rr, X86::CVTSI2SS64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000785 { X86::CVTSI2SSrr, X86::CVTSI2SSrm },
786 { X86::CVTSS2SDrr, X86::CVTSS2SDrm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000787 { X86::CVTTSD2SI64rr, X86::CVTTSD2SI64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000788 { X86::CVTTSD2SIrr, X86::CVTTSD2SIrm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000789 { X86::CVTTSS2SI64rr, X86::CVTTSS2SI64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000790 { X86::CVTTSS2SIrr, X86::CVTTSS2SIrm },
791 { X86::FsMOVAPDrr, X86::MOVSDrm },
792 { X86::FsMOVAPSrr, X86::MOVSSrm },
793 { X86::IMUL16rri, X86::IMUL16rmi },
794 { X86::IMUL16rri8, X86::IMUL16rmi8 },
795 { X86::IMUL32rri, X86::IMUL32rmi },
796 { X86::IMUL32rri8, X86::IMUL32rmi8 },
Evan Cheng25ab6902006-09-08 06:48:29 +0000797 { X86::IMUL64rri32, X86::IMUL64rmi32 },
798 { X86::IMUL64rri8, X86::IMUL64rmi8 },
Chris Lattner29268692006-09-05 02:12:02 +0000799 { X86::Int_CMPSDrr, X86::Int_CMPSDrm },
800 { X86::Int_CMPSSrr, X86::Int_CMPSSrm },
801 { X86::Int_COMISDrr, X86::Int_COMISDrm },
802 { X86::Int_COMISSrr, X86::Int_COMISSrm },
803 { X86::Int_CVTDQ2PDrr, X86::Int_CVTDQ2PDrm },
804 { X86::Int_CVTDQ2PSrr, X86::Int_CVTDQ2PSrm },
805 { X86::Int_CVTPD2DQrr, X86::Int_CVTPD2DQrm },
806 { X86::Int_CVTPD2PSrr, X86::Int_CVTPD2PSrm },
807 { X86::Int_CVTPS2DQrr, X86::Int_CVTPS2DQrm },
808 { X86::Int_CVTPS2PDrr, X86::Int_CVTPS2PDrm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000809 { X86::Int_CVTSD2SI64rr,X86::Int_CVTSD2SI64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000810 { X86::Int_CVTSD2SIrr, X86::Int_CVTSD2SIrm },
811 { X86::Int_CVTSD2SSrr, X86::Int_CVTSD2SSrm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000812 { X86::Int_CVTSI2SD64rr,X86::Int_CVTSI2SD64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000813 { X86::Int_CVTSI2SDrr, X86::Int_CVTSI2SDrm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000814 { X86::Int_CVTSI2SS64rr,X86::Int_CVTSI2SS64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000815 { X86::Int_CVTSI2SSrr, X86::Int_CVTSI2SSrm },
816 { X86::Int_CVTSS2SDrr, X86::Int_CVTSS2SDrm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000817 { X86::Int_CVTSS2SI64rr,X86::Int_CVTSS2SI64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000818 { X86::Int_CVTSS2SIrr, X86::Int_CVTSS2SIrm },
819 { X86::Int_CVTTPD2DQrr, X86::Int_CVTTPD2DQrm },
820 { X86::Int_CVTTPS2DQrr, X86::Int_CVTTPS2DQrm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000821 { X86::Int_CVTTSD2SI64rr,X86::Int_CVTTSD2SI64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000822 { X86::Int_CVTTSD2SIrr, X86::Int_CVTTSD2SIrm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000823 { X86::Int_CVTTSS2SI64rr,X86::Int_CVTTSS2SI64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000824 { X86::Int_CVTTSS2SIrr, X86::Int_CVTTSS2SIrm },
825 { X86::Int_UCOMISDrr, X86::Int_UCOMISDrm },
826 { X86::Int_UCOMISSrr, X86::Int_UCOMISSrm },
827 { X86::MOV16rr, X86::MOV16rm },
828 { X86::MOV32rr, X86::MOV32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000829 { X86::MOV64rr, X86::MOV64rm },
Evan Chengebf01d62006-11-16 23:33:25 +0000830 { X86::MOV64toPQIrr, X86::MOV64toPQIrm },
Evan Cheng21b76122006-12-14 21:55:39 +0000831 { X86::MOV64toSDrr, X86::MOV64toSDrm },
Chris Lattner29268692006-09-05 02:12:02 +0000832 { X86::MOV8rr, X86::MOV8rm },
833 { X86::MOVAPDrr, X86::MOVAPDrm },
834 { X86::MOVAPSrr, X86::MOVAPSrm },
835 { X86::MOVDDUPrr, X86::MOVDDUPrm },
836 { X86::MOVDI2PDIrr, X86::MOVDI2PDIrm },
Evan Cheng0e8dbc62006-12-14 19:44:45 +0000837 { X86::MOVDI2SSrr, X86::MOVDI2SSrm },
Chris Lattner29268692006-09-05 02:12:02 +0000838 { X86::MOVSD2PDrr, X86::MOVSD2PDrm },
839 { X86::MOVSDrr, X86::MOVSDrm },
840 { X86::MOVSHDUPrr, X86::MOVSHDUPrm },
841 { X86::MOVSLDUPrr, X86::MOVSLDUPrm },
842 { X86::MOVSS2PSrr, X86::MOVSS2PSrm },
843 { X86::MOVSSrr, X86::MOVSSrm },
844 { X86::MOVSX16rr8, X86::MOVSX16rm8 },
845 { X86::MOVSX32rr16, X86::MOVSX32rm16 },
846 { X86::MOVSX32rr8, X86::MOVSX32rm8 },
Evan Cheng25ab6902006-09-08 06:48:29 +0000847 { X86::MOVSX64rr16, X86::MOVSX64rm16 },
848 { X86::MOVSX64rr32, X86::MOVSX64rm32 },
849 { X86::MOVSX64rr8, X86::MOVSX64rm8 },
Chris Lattner29268692006-09-05 02:12:02 +0000850 { X86::MOVUPDrr, X86::MOVUPDrm },
851 { X86::MOVUPSrr, X86::MOVUPSrm },
852 { X86::MOVZX16rr8, X86::MOVZX16rm8 },
853 { X86::MOVZX32rr16, X86::MOVZX32rm16 },
854 { X86::MOVZX32rr8, X86::MOVZX32rm8 },
Evan Cheng25ab6902006-09-08 06:48:29 +0000855 { X86::MOVZX64rr16, X86::MOVZX64rm16 },
856 { X86::MOVZX64rr8, X86::MOVZX64rm8 },
Evan Cheng0488db92007-09-25 01:57:46 +0000857
858 // TEMPORARY
Evan Chengfdd08372007-09-25 22:10:43 +0000859 { X86::NEW_CMP16rr, X86::NEW_CMP16rm },
860 { X86::NEW_CMP32rr, X86::NEW_CMP32rm },
861 { X86::NEW_CMP64rr, X86::NEW_CMP64rm },
862 { X86::NEW_CMP8rr, X86::NEW_CMP8rm },
Evan Cheng0488db92007-09-25 01:57:46 +0000863 { X86::NEW_Int_COMISDrr, X86::NEW_Int_COMISDrm },
864 { X86::NEW_Int_COMISSrr, X86::NEW_Int_COMISSrm },
865 { X86::NEW_Int_UCOMISDrr, X86::NEW_Int_UCOMISDrm },
866 { X86::NEW_Int_UCOMISSrr, X86::NEW_Int_UCOMISSrm },
867 { X86::NEW_TEST16rr, X86::NEW_TEST16rm },
868 { X86::NEW_TEST32rr, X86::NEW_TEST32rm },
869 { X86::NEW_TEST64rr, X86::NEW_TEST64rm },
870 { X86::NEW_TEST8rr, X86::NEW_TEST8rm },
871 { X86::NEW_UCOMISDrr, X86::NEW_UCOMISDrm },
872 { X86::NEW_UCOMISSrr, X86::NEW_UCOMISSrm },
873
Evan Chengfab7eff2007-09-19 19:02:47 +0000874 { X86::PSHUFDri, X86::PSHUFDmi },
875 { X86::PSHUFHWri, X86::PSHUFHWmi },
876 { X86::PSHUFLWri, X86::PSHUFLWmi },
Evan Cheng25ab6902006-09-08 06:48:29 +0000877 { X86::PsMOVZX64rr32, X86::PsMOVZX64rm32 },
Dan Gohmana4ddacf2007-09-20 14:17:21 +0000878 { X86::RCPPSr, X86::RCPPSm },
879 { X86::RCPPSr_Int, X86::RCPPSm_Int },
880 { X86::RSQRTPSr, X86::RSQRTPSm },
881 { X86::RSQRTPSr_Int, X86::RSQRTPSm_Int },
882 { X86::RSQRTSSr, X86::RSQRTSSm },
883 { X86::RSQRTSSr_Int, X86::RSQRTSSm_Int },
884 { X86::SQRTPDr, X86::SQRTPDm },
885 { X86::SQRTPDr_Int, X86::SQRTPDm_Int },
886 { X86::SQRTPSr, X86::SQRTPSm },
887 { X86::SQRTPSr_Int, X86::SQRTPSm_Int },
888 { X86::SQRTSDr, X86::SQRTSDm },
889 { X86::SQRTSDr_Int, X86::SQRTSDm_Int },
890 { X86::SQRTSSr, X86::SQRTSSm },
891 { X86::SQRTSSr_Int, X86::SQRTSSm_Int },
Chris Lattner29268692006-09-05 02:12:02 +0000892 { X86::TEST16rr, X86::TEST16rm },
893 { X86::TEST32rr, X86::TEST32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000894 { X86::TEST64rr, X86::TEST64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000895 { X86::TEST8rr, X86::TEST8rm },
Chris Lattnerb14ca602006-09-07 20:32:01 +0000896 // FIXME: TEST*rr EAX,EAX ---> CMP [mem], 0
Chris Lattner29268692006-09-05 02:12:02 +0000897 { X86::UCOMISDrr, X86::UCOMISDrm },
898 { X86::UCOMISSrr, X86::UCOMISSrm },
899 { X86::XCHG16rr, X86::XCHG16rm },
900 { X86::XCHG32rr, X86::XCHG32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000901 { X86::XCHG64rr, X86::XCHG64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000902 { X86::XCHG8rr, X86::XCHG8rm }
903 };
Evan Chengf4c3a592007-08-30 05:54:07 +0000904
Chris Lattner29268692006-09-05 02:12:02 +0000905 ASSERT_SORTED(OpcodeTable);
906 OpcodeTablePtr = OpcodeTable;
Owen Anderson718cb662007-09-07 04:06:50 +0000907 OpcodeTableSize = array_lengthof(OpcodeTable);
Chris Lattner29268692006-09-05 02:12:02 +0000908 } else if (i == 2) {
909 static const TableEntry OpcodeTable[] = {
910 { X86::ADC32rr, X86::ADC32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000911 { X86::ADC64rr, X86::ADC64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000912 { X86::ADD16rr, X86::ADD16rm },
913 { X86::ADD32rr, X86::ADD32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000914 { X86::ADD64rr, X86::ADD64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000915 { X86::ADD8rr, X86::ADD8rm },
916 { X86::ADDPDrr, X86::ADDPDrm },
917 { X86::ADDPSrr, X86::ADDPSrm },
918 { X86::ADDSDrr, X86::ADDSDrm },
919 { X86::ADDSSrr, X86::ADDSSrm },
920 { X86::ADDSUBPDrr, X86::ADDSUBPDrm },
921 { X86::ADDSUBPSrr, X86::ADDSUBPSrm },
922 { X86::AND16rr, X86::AND16rm },
923 { X86::AND32rr, X86::AND32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000924 { X86::AND64rr, X86::AND64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000925 { X86::AND8rr, X86::AND8rm },
926 { X86::ANDNPDrr, X86::ANDNPDrm },
927 { X86::ANDNPSrr, X86::ANDNPSrm },
928 { X86::ANDPDrr, X86::ANDPDrm },
929 { X86::ANDPSrr, X86::ANDPSrm },
930 { X86::CMOVA16rr, X86::CMOVA16rm },
931 { X86::CMOVA32rr, X86::CMOVA32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000932 { X86::CMOVA64rr, X86::CMOVA64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000933 { X86::CMOVAE16rr, X86::CMOVAE16rm },
934 { X86::CMOVAE32rr, X86::CMOVAE32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000935 { X86::CMOVAE64rr, X86::CMOVAE64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000936 { X86::CMOVB16rr, X86::CMOVB16rm },
937 { X86::CMOVB32rr, X86::CMOVB32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000938 { X86::CMOVB64rr, X86::CMOVB64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000939 { X86::CMOVBE16rr, X86::CMOVBE16rm },
940 { X86::CMOVBE32rr, X86::CMOVBE32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000941 { X86::CMOVBE64rr, X86::CMOVBE64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000942 { X86::CMOVE16rr, X86::CMOVE16rm },
943 { X86::CMOVE32rr, X86::CMOVE32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000944 { X86::CMOVE64rr, X86::CMOVE64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000945 { X86::CMOVG16rr, X86::CMOVG16rm },
946 { X86::CMOVG32rr, X86::CMOVG32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000947 { X86::CMOVG64rr, X86::CMOVG64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000948 { X86::CMOVGE16rr, X86::CMOVGE16rm },
949 { X86::CMOVGE32rr, X86::CMOVGE32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000950 { X86::CMOVGE64rr, X86::CMOVGE64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000951 { X86::CMOVL16rr, X86::CMOVL16rm },
952 { X86::CMOVL32rr, X86::CMOVL32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000953 { X86::CMOVL64rr, X86::CMOVL64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000954 { X86::CMOVLE16rr, X86::CMOVLE16rm },
955 { X86::CMOVLE32rr, X86::CMOVLE32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000956 { X86::CMOVLE64rr, X86::CMOVLE64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000957 { X86::CMOVNE16rr, X86::CMOVNE16rm },
958 { X86::CMOVNE32rr, X86::CMOVNE32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000959 { X86::CMOVNE64rr, X86::CMOVNE64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000960 { X86::CMOVNP16rr, X86::CMOVNP16rm },
961 { X86::CMOVNP32rr, X86::CMOVNP32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000962 { X86::CMOVNP64rr, X86::CMOVNP64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000963 { X86::CMOVNS16rr, X86::CMOVNS16rm },
964 { X86::CMOVNS32rr, X86::CMOVNS32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000965 { X86::CMOVNS64rr, X86::CMOVNS64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000966 { X86::CMOVP16rr, X86::CMOVP16rm },
967 { X86::CMOVP32rr, X86::CMOVP32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000968 { X86::CMOVP64rr, X86::CMOVP64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000969 { X86::CMOVS16rr, X86::CMOVS16rm },
970 { X86::CMOVS32rr, X86::CMOVS32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +0000971 { X86::CMOVS64rr, X86::CMOVS64rm },
Dan Gohmana4ddacf2007-09-20 14:17:21 +0000972 { X86::CMPPDrri, X86::CMPPDrmi },
973 { X86::CMPPSrri, X86::CMPPSrmi },
974 { X86::CMPSDrr, X86::CMPSDrm },
975 { X86::CMPSSrr, X86::CMPSSrm },
Chris Lattner29268692006-09-05 02:12:02 +0000976 { X86::DIVPDrr, X86::DIVPDrm },
977 { X86::DIVPSrr, X86::DIVPSrm },
978 { X86::DIVSDrr, X86::DIVSDrm },
979 { X86::DIVSSrr, X86::DIVSSrm },
980 { X86::HADDPDrr, X86::HADDPDrm },
981 { X86::HADDPSrr, X86::HADDPSrm },
982 { X86::HSUBPDrr, X86::HSUBPDrm },
983 { X86::HSUBPSrr, X86::HSUBPSrm },
984 { X86::IMUL16rr, X86::IMUL16rm },
985 { X86::IMUL32rr, X86::IMUL32rm },
Dan Gohman869b2b22007-09-18 14:59:14 +0000986 { X86::IMUL64rr, X86::IMUL64rm },
Chris Lattner29268692006-09-05 02:12:02 +0000987 { X86::MAXPDrr, X86::MAXPDrm },
Dan Gohman20382522007-07-10 00:05:58 +0000988 { X86::MAXPDrr_Int, X86::MAXPDrm_Int },
Chris Lattner29268692006-09-05 02:12:02 +0000989 { X86::MAXPSrr, X86::MAXPSrm },
Dan Gohman20382522007-07-10 00:05:58 +0000990 { X86::MAXPSrr_Int, X86::MAXPSrm_Int },
991 { X86::MAXSDrr, X86::MAXSDrm },
992 { X86::MAXSDrr_Int, X86::MAXSDrm_Int },
993 { X86::MAXSSrr, X86::MAXSSrm },
994 { X86::MAXSSrr_Int, X86::MAXSSrm_Int },
Chris Lattner29268692006-09-05 02:12:02 +0000995 { X86::MINPDrr, X86::MINPDrm },
Dan Gohman20382522007-07-10 00:05:58 +0000996 { X86::MINPDrr_Int, X86::MINPDrm_Int },
Chris Lattner29268692006-09-05 02:12:02 +0000997 { X86::MINPSrr, X86::MINPSrm },
Dan Gohman20382522007-07-10 00:05:58 +0000998 { X86::MINPSrr_Int, X86::MINPSrm_Int },
999 { X86::MINSDrr, X86::MINSDrm },
1000 { X86::MINSDrr_Int, X86::MINSDrm_Int },
1001 { X86::MINSSrr, X86::MINSSrm },
1002 { X86::MINSSrr_Int, X86::MINSSrm_Int },
Chris Lattner29268692006-09-05 02:12:02 +00001003 { X86::MULPDrr, X86::MULPDrm },
1004 { X86::MULPSrr, X86::MULPSrm },
1005 { X86::MULSDrr, X86::MULSDrm },
1006 { X86::MULSSrr, X86::MULSSrm },
Evan Cheng0488db92007-09-25 01:57:46 +00001007
1008 // TEMPORARY
1009 { X86::NEW_CMOVA16rr, X86::NEW_CMOVA16rm },
1010 { X86::NEW_CMOVA32rr, X86::NEW_CMOVA32rm },
1011 { X86::NEW_CMOVA64rr, X86::NEW_CMOVA64rm },
1012 { X86::NEW_CMOVAE16rr, X86::NEW_CMOVAE16rm },
1013 { X86::NEW_CMOVAE32rr, X86::NEW_CMOVAE32rm },
1014 { X86::NEW_CMOVAE64rr, X86::NEW_CMOVAE64rm },
1015 { X86::NEW_CMOVB16rr, X86::NEW_CMOVB16rm },
1016 { X86::NEW_CMOVB32rr, X86::NEW_CMOVB32rm },
1017 { X86::NEW_CMOVB64rr, X86::NEW_CMOVB64rm },
1018 { X86::NEW_CMOVBE16rr, X86::NEW_CMOVBE16rm },
1019 { X86::NEW_CMOVBE32rr, X86::NEW_CMOVBE32rm },
1020 { X86::NEW_CMOVBE64rr, X86::NEW_CMOVBE64rm },
1021 { X86::NEW_CMOVE16rr, X86::NEW_CMOVE16rm },
1022 { X86::NEW_CMOVE32rr, X86::NEW_CMOVE32rm },
1023 { X86::NEW_CMOVE64rr, X86::NEW_CMOVE64rm },
1024 { X86::NEW_CMOVG16rr, X86::NEW_CMOVG16rm },
1025 { X86::NEW_CMOVG32rr, X86::NEW_CMOVG32rm },
1026 { X86::NEW_CMOVG64rr, X86::NEW_CMOVG64rm },
1027 { X86::NEW_CMOVGE16rr, X86::NEW_CMOVGE16rm },
1028 { X86::NEW_CMOVGE32rr, X86::NEW_CMOVGE32rm },
1029 { X86::NEW_CMOVGE64rr, X86::NEW_CMOVGE64rm },
1030 { X86::NEW_CMOVL16rr, X86::NEW_CMOVL16rm },
1031 { X86::NEW_CMOVL32rr, X86::NEW_CMOVL32rm },
1032 { X86::NEW_CMOVL64rr, X86::NEW_CMOVL64rm },
1033 { X86::NEW_CMOVLE16rr, X86::NEW_CMOVLE16rm },
1034 { X86::NEW_CMOVLE32rr, X86::NEW_CMOVLE32rm },
1035 { X86::NEW_CMOVLE64rr, X86::NEW_CMOVLE64rm },
1036 { X86::NEW_CMOVNE16rr, X86::NEW_CMOVNE16rm },
1037 { X86::NEW_CMOVNE32rr, X86::NEW_CMOVNE32rm },
1038 { X86::NEW_CMOVNE64rr, X86::NEW_CMOVNE64rm },
1039 { X86::NEW_CMOVNP16rr, X86::NEW_CMOVNP16rm },
1040 { X86::NEW_CMOVNP32rr, X86::NEW_CMOVNP32rm },
1041 { X86::NEW_CMOVNP64rr, X86::NEW_CMOVNP64rm },
1042 { X86::NEW_CMOVNS16rr, X86::NEW_CMOVNS16rm },
1043 { X86::NEW_CMOVNS32rr, X86::NEW_CMOVNS32rm },
1044 { X86::NEW_CMOVNS64rr, X86::NEW_CMOVNS64rm },
1045 { X86::NEW_CMOVP16rr, X86::NEW_CMOVP16rm },
1046 { X86::NEW_CMOVP32rr, X86::NEW_CMOVP32rm },
1047 { X86::NEW_CMOVP64rr, X86::NEW_CMOVP64rm },
1048 { X86::NEW_CMOVS16rr, X86::NEW_CMOVS16rm },
1049 { X86::NEW_CMOVS32rr, X86::NEW_CMOVS32rm },
1050 { X86::NEW_CMOVS64rr, X86::NEW_CMOVS64rm },
1051
Chris Lattner29268692006-09-05 02:12:02 +00001052 { X86::OR16rr, X86::OR16rm },
1053 { X86::OR32rr, X86::OR32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +00001054 { X86::OR64rr, X86::OR64rm },
Chris Lattner29268692006-09-05 02:12:02 +00001055 { X86::OR8rr, X86::OR8rm },
1056 { X86::ORPDrr, X86::ORPDrm },
1057 { X86::ORPSrr, X86::ORPSrm },
1058 { X86::PACKSSDWrr, X86::PACKSSDWrm },
1059 { X86::PACKSSWBrr, X86::PACKSSWBrm },
1060 { X86::PACKUSWBrr, X86::PACKUSWBrm },
1061 { X86::PADDBrr, X86::PADDBrm },
1062 { X86::PADDDrr, X86::PADDDrm },
Bill Wendlingb53e98e2007-04-24 21:19:14 +00001063 { X86::PADDQrr, X86::PADDQrm },
Chris Lattner29268692006-09-05 02:12:02 +00001064 { X86::PADDSBrr, X86::PADDSBrm },
1065 { X86::PADDSWrr, X86::PADDSWrm },
1066 { X86::PADDWrr, X86::PADDWrm },
1067 { X86::PANDNrr, X86::PANDNrm },
1068 { X86::PANDrr, X86::PANDrm },
1069 { X86::PAVGBrr, X86::PAVGBrm },
1070 { X86::PAVGWrr, X86::PAVGWrm },
1071 { X86::PCMPEQBrr, X86::PCMPEQBrm },
1072 { X86::PCMPEQDrr, X86::PCMPEQDrm },
1073 { X86::PCMPEQWrr, X86::PCMPEQWrm },
1074 { X86::PCMPGTBrr, X86::PCMPGTBrm },
1075 { X86::PCMPGTDrr, X86::PCMPGTDrm },
1076 { X86::PCMPGTWrr, X86::PCMPGTWrm },
1077 { X86::PINSRWrri, X86::PINSRWrmi },
1078 { X86::PMADDWDrr, X86::PMADDWDrm },
1079 { X86::PMAXSWrr, X86::PMAXSWrm },
1080 { X86::PMAXUBrr, X86::PMAXUBrm },
1081 { X86::PMINSWrr, X86::PMINSWrm },
1082 { X86::PMINUBrr, X86::PMINUBrm },
1083 { X86::PMULHUWrr, X86::PMULHUWrm },
1084 { X86::PMULHWrr, X86::PMULHWrm },
1085 { X86::PMULLWrr, X86::PMULLWrm },
1086 { X86::PMULUDQrr, X86::PMULUDQrm },
1087 { X86::PORrr, X86::PORrm },
1088 { X86::PSADBWrr, X86::PSADBWrm },
1089 { X86::PSLLDrr, X86::PSLLDrm },
1090 { X86::PSLLQrr, X86::PSLLQrm },
1091 { X86::PSLLWrr, X86::PSLLWrm },
1092 { X86::PSRADrr, X86::PSRADrm },
1093 { X86::PSRAWrr, X86::PSRAWrm },
1094 { X86::PSRLDrr, X86::PSRLDrm },
1095 { X86::PSRLQrr, X86::PSRLQrm },
1096 { X86::PSRLWrr, X86::PSRLWrm },
1097 { X86::PSUBBrr, X86::PSUBBrm },
1098 { X86::PSUBDrr, X86::PSUBDrm },
1099 { X86::PSUBSBrr, X86::PSUBSBrm },
1100 { X86::PSUBSWrr, X86::PSUBSWrm },
1101 { X86::PSUBWrr, X86::PSUBWrm },
1102 { X86::PUNPCKHBWrr, X86::PUNPCKHBWrm },
1103 { X86::PUNPCKHDQrr, X86::PUNPCKHDQrm },
1104 { X86::PUNPCKHQDQrr, X86::PUNPCKHQDQrm },
1105 { X86::PUNPCKHWDrr, X86::PUNPCKHWDrm },
1106 { X86::PUNPCKLBWrr, X86::PUNPCKLBWrm },
1107 { X86::PUNPCKLDQrr, X86::PUNPCKLDQrm },
1108 { X86::PUNPCKLQDQrr, X86::PUNPCKLQDQrm },
1109 { X86::PUNPCKLWDrr, X86::PUNPCKLWDrm },
1110 { X86::PXORrr, X86::PXORrm },
Chris Lattner29268692006-09-05 02:12:02 +00001111 { X86::SBB32rr, X86::SBB32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +00001112 { X86::SBB64rr, X86::SBB64rm },
Chris Lattner29268692006-09-05 02:12:02 +00001113 { X86::SHUFPDrri, X86::SHUFPDrmi },
1114 { X86::SHUFPSrri, X86::SHUFPSrmi },
Chris Lattner29268692006-09-05 02:12:02 +00001115 { X86::SUB16rr, X86::SUB16rm },
1116 { X86::SUB32rr, X86::SUB32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +00001117 { X86::SUB64rr, X86::SUB64rm },
Chris Lattner29268692006-09-05 02:12:02 +00001118 { X86::SUB8rr, X86::SUB8rm },
1119 { X86::SUBPDrr, X86::SUBPDrm },
1120 { X86::SUBPSrr, X86::SUBPSrm },
1121 { X86::SUBSDrr, X86::SUBSDrm },
1122 { X86::SUBSSrr, X86::SUBSSrm },
Chris Lattnerb14ca602006-09-07 20:32:01 +00001123 // FIXME: TEST*rr -> swapped operand of TEST*mr.
Chris Lattner29268692006-09-05 02:12:02 +00001124 { X86::UNPCKHPDrr, X86::UNPCKHPDrm },
1125 { X86::UNPCKHPSrr, X86::UNPCKHPSrm },
1126 { X86::UNPCKLPDrr, X86::UNPCKLPDrm },
1127 { X86::UNPCKLPSrr, X86::UNPCKLPSrm },
1128 { X86::XOR16rr, X86::XOR16rm },
1129 { X86::XOR32rr, X86::XOR32rm },
Evan Cheng25ab6902006-09-08 06:48:29 +00001130 { X86::XOR64rr, X86::XOR64rm },
Chris Lattner29268692006-09-05 02:12:02 +00001131 { X86::XOR8rr, X86::XOR8rm },
1132 { X86::XORPDrr, X86::XORPDrm },
1133 { X86::XORPSrr, X86::XORPSrm }
Jim Laskeyf19807c2006-07-19 17:53:32 +00001134 };
Evan Chengf4c3a592007-08-30 05:54:07 +00001135
Jim Laskey613f1f82006-07-19 19:32:06 +00001136 ASSERT_SORTED(OpcodeTable);
Jim Laskeyf19807c2006-07-19 17:53:32 +00001137 OpcodeTablePtr = OpcodeTable;
Owen Anderson718cb662007-09-07 04:06:50 +00001138 OpcodeTableSize = array_lengthof(OpcodeTable);
Jim Laskeyf19807c2006-07-19 17:53:32 +00001139 }
1140
Chris Lattner29268692006-09-05 02:12:02 +00001141 // If table selected...
Jim Laskeyf19807c2006-07-19 17:53:32 +00001142 if (OpcodeTablePtr) {
Chris Lattner29268692006-09-05 02:12:02 +00001143 // Find the Opcode to fuse
Jim Laskeyf19807c2006-07-19 17:53:32 +00001144 unsigned fromOpcode = MI->getOpcode();
Jim Laskeyf19807c2006-07-19 17:53:32 +00001145 // Lookup fromOpcode in table
Chris Lattner29268692006-09-05 02:12:02 +00001146 if (const TableEntry *Entry = TableLookup(OpcodeTablePtr, OpcodeTableSize,
1147 fromOpcode)) {
1148 if (isTwoAddrFold)
Evan Chengf4c3a592007-08-30 05:54:07 +00001149 NewMI = FuseTwoAddrInst(Entry->to, MOs, MI, TII);
Evan Cheng6ce7dc22006-11-15 20:58:11 +00001150 else
Evan Chengf4c3a592007-08-30 05:54:07 +00001151 NewMI = FuseInst(Entry->to, i, MOs, MI, TII);
Evan Cheng6ce7dc22006-11-15 20:58:11 +00001152 NewMI->copyKillDeadInfo(MI);
1153 return NewMI;
Chris Lattner7c035b72004-02-17 05:35:13 +00001154 }
Alkis Evlogimenosb4998662004-02-17 04:33:18 +00001155 }
Jim Laskeyf19807c2006-07-19 17:53:32 +00001156
1157 // No fusion
Alkis Evlogimenos39354c92004-03-14 07:19:51 +00001158 if (PrintFailedFusing)
Bill Wendlingf5da1332006-12-07 22:21:48 +00001159 cerr << "We failed to fuse ("
1160 << ((i == 1) ? "r" : "s") << "): " << *MI;
Alkis Evlogimenos39354c92004-03-14 07:19:51 +00001161 return NULL;
Alkis Evlogimenosb4998662004-02-17 04:33:18 +00001162}
1163
Jim Laskeyf19807c2006-07-19 17:53:32 +00001164
Evan Chengf4c3a592007-08-30 05:54:07 +00001165MachineInstr* X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned OpNum,
1166 int FrameIndex) const {
1167 // Check switch flag
1168 if (NoFusing) return NULL;
1169 SmallVector<MachineOperand,4> MOs;
1170 MOs.push_back(MachineOperand::CreateFrameIndex(FrameIndex));
1171 return foldMemoryOperand(MI, OpNum, MOs);
1172}
1173
1174MachineInstr* X86RegisterInfo::foldMemoryOperand(MachineInstr *MI, unsigned OpNum,
1175 MachineInstr *LoadMI) const {
1176 // Check switch flag
1177 if (NoFusing) return NULL;
1178 SmallVector<MachineOperand,4> MOs;
1179 unsigned NumOps = TII.getNumOperands(LoadMI->getOpcode());
1180 for (unsigned i = NumOps - 4; i != NumOps; ++i)
1181 MOs.push_back(LoadMI->getOperand(i));
1182 return foldMemoryOperand(MI, OpNum, MOs);
1183}
1184
Evan Cheng64d80e32007-07-19 01:14:50 +00001185const unsigned *
1186X86RegisterInfo::getCalleeSavedRegs(const MachineFunction *MF) const {
Evan Chengc2b861d2007-01-02 21:33:40 +00001187 static const unsigned CalleeSavedRegs32Bit[] = {
Evan Cheng0f3ac8d2006-05-18 00:12:58 +00001188 X86::ESI, X86::EDI, X86::EBX, X86::EBP, 0
1189 };
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001190
1191 static const unsigned CalleeSavedRegs32EHRet[] = {
1192 X86::EAX, X86::EDX, X86::ESI, X86::EDI, X86::EBX, X86::EBP, 0
1193 };
1194
Evan Chengc2b861d2007-01-02 21:33:40 +00001195 static const unsigned CalleeSavedRegs64Bit[] = {
Evan Cheng25ab6902006-09-08 06:48:29 +00001196 X86::RBX, X86::R12, X86::R13, X86::R14, X86::R15, X86::RBP, 0
1197 };
1198
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001199 if (Is64Bit)
1200 return CalleeSavedRegs64Bit;
1201 else {
1202 if (MF) {
1203 MachineFrameInfo *MFI = MF->getFrameInfo();
1204 MachineModuleInfo *MMI = MFI->getMachineModuleInfo();
1205 if (MMI && MMI->callsEHReturn())
1206 return CalleeSavedRegs32EHRet;
1207 }
1208 return CalleeSavedRegs32Bit;
1209 }
Evan Cheng0f3ac8d2006-05-18 00:12:58 +00001210}
1211
1212const TargetRegisterClass* const*
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001213X86RegisterInfo::getCalleeSavedRegClasses(const MachineFunction *MF) const {
Evan Chengc2b861d2007-01-02 21:33:40 +00001214 static const TargetRegisterClass * const CalleeSavedRegClasses32Bit[] = {
Evan Cheng0f3ac8d2006-05-18 00:12:58 +00001215 &X86::GR32RegClass, &X86::GR32RegClass,
1216 &X86::GR32RegClass, &X86::GR32RegClass, 0
1217 };
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001218 static const TargetRegisterClass * const CalleeSavedRegClasses32EHRet[] = {
1219 &X86::GR32RegClass, &X86::GR32RegClass,
1220 &X86::GR32RegClass, &X86::GR32RegClass,
1221 &X86::GR32RegClass, &X86::GR32RegClass, 0
1222 };
Evan Chengc2b861d2007-01-02 21:33:40 +00001223 static const TargetRegisterClass * const CalleeSavedRegClasses64Bit[] = {
Evan Cheng25ab6902006-09-08 06:48:29 +00001224 &X86::GR64RegClass, &X86::GR64RegClass,
1225 &X86::GR64RegClass, &X86::GR64RegClass,
1226 &X86::GR64RegClass, &X86::GR64RegClass, 0
1227 };
1228
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001229 if (Is64Bit)
1230 return CalleeSavedRegClasses64Bit;
1231 else {
1232 if (MF) {
1233 MachineFrameInfo *MFI = MF->getFrameInfo();
1234 MachineModuleInfo *MMI = MFI->getMachineModuleInfo();
1235 if (MMI && MMI->callsEHReturn())
1236 return CalleeSavedRegClasses32EHRet;
1237 }
1238 return CalleeSavedRegClasses32Bit;
1239 }
1240
Evan Cheng0f3ac8d2006-05-18 00:12:58 +00001241}
1242
Evan Chengb371f452007-02-19 21:49:54 +00001243BitVector X86RegisterInfo::getReservedRegs(const MachineFunction &MF) const {
1244 BitVector Reserved(getNumRegs());
1245 Reserved.set(X86::RSP);
1246 Reserved.set(X86::ESP);
1247 Reserved.set(X86::SP);
1248 Reserved.set(X86::SPL);
1249 if (hasFP(MF)) {
1250 Reserved.set(X86::RBP);
1251 Reserved.set(X86::EBP);
1252 Reserved.set(X86::BP);
1253 Reserved.set(X86::BPL);
1254 }
1255 return Reserved;
1256}
1257
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001258//===----------------------------------------------------------------------===//
1259// Stack Frame Processing methods
1260//===----------------------------------------------------------------------===//
1261
1262// hasFP - Return true if the specified function should have a dedicated frame
1263// pointer register. This is true if the function has variable sized allocas or
1264// if frame pointer elimination is disabled.
1265//
Evan Chengdc775402007-01-23 00:57:47 +00001266bool X86RegisterInfo::hasFP(const MachineFunction &MF) const {
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001267 MachineFrameInfo *MFI = MF.getFrameInfo();
1268 MachineModuleInfo *MMI = MFI->getMachineModuleInfo();
1269
Evan Cheng3649b0e2006-06-02 22:38:37 +00001270 return (NoFramePointerElim ||
Evan Cheng7e7bbf82007-07-19 00:42:05 +00001271 MFI->hasVarSizedObjects() ||
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001272 MF.getInfo<X86MachineFunctionInfo>()->getForceFramePointer() ||
1273 (MMI && MMI->callsUnwindInit()));
Misha Brukman03c6faf2002-12-03 23:11:21 +00001274}
Misha Brukman2adb3952002-12-04 23:57:03 +00001275
Evan Cheng7e7bbf82007-07-19 00:42:05 +00001276bool X86RegisterInfo::hasReservedCallFrame(MachineFunction &MF) const {
1277 return !MF.getFrameInfo()->hasVarSizedObjects();
1278}
1279
Chris Lattnerbb07ef92004-02-14 19:49:54 +00001280void X86RegisterInfo::
1281eliminateCallFramePseudoInstr(MachineFunction &MF, MachineBasicBlock &MBB,
1282 MachineBasicBlock::iterator I) const {
Evan Cheng7e7bbf82007-07-19 00:42:05 +00001283 if (!hasReservedCallFrame(MF)) {
1284 // If the stack pointer can be changed after prologue, turn the
1285 // adjcallstackup instruction into a 'sub ESP, <amt>' and the
1286 // adjcallstackdown instruction into 'add ESP, <amt>'
1287 // TODO: consider using push / pop instead of sub + store / add
Chris Lattnerbb07ef92004-02-14 19:49:54 +00001288 MachineInstr *Old = I;
Chris Lattner61807802007-04-25 04:25:10 +00001289 uint64_t Amount = Old->getOperand(0).getImm();
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001290 if (Amount != 0) {
Chris Lattnerf158da22003-01-16 02:20:12 +00001291 // We need to keep the stack aligned properly. To do this, we round the
1292 // amount of space needed for the outgoing arguments up to the next
1293 // alignment boundary.
Chris Lattnerd029cd22004-06-02 05:55:25 +00001294 unsigned Align = MF.getTarget().getFrameInfo()->getStackAlignment();
Chris Lattnerf158da22003-01-16 02:20:12 +00001295 Amount = (Amount+Align-1)/Align*Align;
1296
Chris Lattner3648c672005-05-13 21:44:04 +00001297 MachineInstr *New = 0;
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001298 if (Old->getOpcode() == X86::ADJCALLSTACKDOWN) {
Evan Chengc0f64ff2006-11-27 23:37:22 +00001299 New=BuildMI(TII.get(Is64Bit ? X86::SUB64ri32 : X86::SUB32ri), StackPtr)
Evan Cheng25ab6902006-09-08 06:48:29 +00001300 .addReg(StackPtr).addImm(Amount);
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001301 } else {
Jeff Cohen00b168892005-07-27 06:12:32 +00001302 assert(Old->getOpcode() == X86::ADJCALLSTACKUP);
Chris Lattner3648c672005-05-13 21:44:04 +00001303 // factor out the amount the callee already popped.
Chris Lattner61807802007-04-25 04:25:10 +00001304 uint64_t CalleeAmt = Old->getOperand(1).getImm();
Chris Lattner3648c672005-05-13 21:44:04 +00001305 Amount -= CalleeAmt;
Chris Lattnerd77525d2006-02-03 18:20:04 +00001306 if (Amount) {
Evan Cheng25ab6902006-09-08 06:48:29 +00001307 unsigned Opc = (Amount < 128) ?
1308 (Is64Bit ? X86::ADD64ri8 : X86::ADD32ri8) :
1309 (Is64Bit ? X86::ADD64ri32 : X86::ADD32ri);
Jim Laskey7ac947d2007-01-24 18:50:57 +00001310 New = BuildMI(TII.get(Opc), StackPtr)
1311 .addReg(StackPtr).addImm(Amount);
Chris Lattnerd77525d2006-02-03 18:20:04 +00001312 }
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001313 }
Chris Lattnerbb07ef92004-02-14 19:49:54 +00001314
1315 // Replace the pseudo instruction with a new instruction...
Chris Lattner3648c672005-05-13 21:44:04 +00001316 if (New) MBB.insert(I, New);
1317 }
1318 } else if (I->getOpcode() == X86::ADJCALLSTACKUP) {
1319 // If we are performing frame pointer elimination and if the callee pops
1320 // something off the stack pointer, add it back. We do this until we have
1321 // more advanced stack pointer tracking ability.
Chris Lattner61807802007-04-25 04:25:10 +00001322 if (uint64_t CalleeAmt = I->getOperand(1).getImm()) {
Evan Cheng25ab6902006-09-08 06:48:29 +00001323 unsigned Opc = (CalleeAmt < 128) ?
1324 (Is64Bit ? X86::SUB64ri8 : X86::SUB32ri8) :
1325 (Is64Bit ? X86::SUB64ri32 : X86::SUB32ri);
Jeff Cohen00b168892005-07-27 06:12:32 +00001326 MachineInstr *New =
Evan Chengc0f64ff2006-11-27 23:37:22 +00001327 BuildMI(TII.get(Opc), StackPtr).addReg(StackPtr).addImm(CalleeAmt);
Chris Lattnerbb07ef92004-02-14 19:49:54 +00001328 MBB.insert(I, New);
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001329 }
1330 }
1331
Chris Lattnerbb07ef92004-02-14 19:49:54 +00001332 MBB.erase(I);
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001333}
1334
Evan Cheng5e6df462007-02-28 00:21:17 +00001335void X86RegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator II,
Evan Cheng97de9132007-05-01 09:13:03 +00001336 int SPAdj, RegScavenger *RS) const{
1337 assert(SPAdj == 0 && "Unexpected");
1338
Chris Lattnerd264bec2003-01-13 00:50:33 +00001339 unsigned i = 0;
Alkis Evlogimenosc0b9dc52004-02-12 02:27:10 +00001340 MachineInstr &MI = *II;
Nate Begemanf8be5e92004-08-14 22:05:10 +00001341 MachineFunction &MF = *MI.getParent()->getParent();
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001342 while (!MI.getOperand(i).isFrameIndex()) {
1343 ++i;
1344 assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
1345 }
1346
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001347 int FrameIndex = MI.getOperand(i).getFrameIndex();
Chris Lattnerd264bec2003-01-13 00:50:33 +00001348 // This must be part of a four operand memory reference. Replace the
Evan Cheng25ab6902006-09-08 06:48:29 +00001349 // FrameIndex with base register with EBP. Add an offset to the offset.
1350 MI.getOperand(i).ChangeToRegister(hasFP(MF) ? FramePtr : StackPtr, false);
Chris Lattnerd264bec2003-01-13 00:50:33 +00001351
1352 // Now add the frame object offset to the offset from EBP.
Chris Lattner61807802007-04-25 04:25:10 +00001353 int64_t Offset = MF.getFrameInfo()->getObjectOffset(FrameIndex) +
1354 MI.getOperand(i+3).getImm()+SlotSize;
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001355
Chris Lattnerd5b7c472003-10-14 18:52:41 +00001356 if (!hasFP(MF))
1357 Offset += MF.getFrameInfo()->getStackSize();
Chris Lattner96c3d2e2004-02-15 00:15:37 +00001358 else
Evan Cheng25ab6902006-09-08 06:48:29 +00001359 Offset += SlotSize; // Skip the saved EBP
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001360
Chris Lattnere53f4a02006-05-04 17:52:23 +00001361 MI.getOperand(i+3).ChangeToImmediate(Offset);
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001362}
1363
Chris Lattnerbb07ef92004-02-14 19:49:54 +00001364void
1365X86RegisterInfo::processFunctionBeforeFrameFinalized(MachineFunction &MF) const{
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001366 if (hasFP(MF)) {
1367 // Create a frame entry for the EBP register that must be saved.
Chris Lattner7c6eefa2007-04-25 17:23:53 +00001368 int FrameIdx = MF.getFrameInfo()->CreateFixedObject(SlotSize,
1369 (int)SlotSize * -2);
Chris Lattner96c3d2e2004-02-15 00:15:37 +00001370 assert(FrameIdx == MF.getFrameInfo()->getObjectIndexBegin() &&
1371 "Slot for EBP register must be last in order to be found!");
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001372 }
1373}
1374
Evan Chenga24dddd2007-04-26 01:09:28 +00001375/// emitSPUpdate - Emit a series of instructions to increment / decrement the
1376/// stack pointer by a constant value.
1377static
1378void emitSPUpdate(MachineBasicBlock &MBB, MachineBasicBlock::iterator &MBBI,
1379 unsigned StackPtr, int64_t NumBytes, bool Is64Bit,
1380 const TargetInstrInfo &TII) {
1381 bool isSub = NumBytes < 0;
1382 uint64_t Offset = isSub ? -NumBytes : NumBytes;
1383 unsigned Opc = isSub
1384 ? ((Offset < 128) ?
1385 (Is64Bit ? X86::SUB64ri8 : X86::SUB32ri8) :
1386 (Is64Bit ? X86::SUB64ri32 : X86::SUB32ri))
1387 : ((Offset < 128) ?
1388 (Is64Bit ? X86::ADD64ri8 : X86::ADD32ri8) :
1389 (Is64Bit ? X86::ADD64ri32 : X86::ADD32ri));
1390 uint64_t Chunk = (1LL << 31) - 1;
1391
1392 while (Offset) {
1393 uint64_t ThisVal = (Offset > Chunk) ? Chunk : Offset;
1394 BuildMI(MBB, MBBI, TII.get(Opc), StackPtr).addReg(StackPtr).addImm(ThisVal);
1395 Offset -= ThisVal;
1396 }
1397}
1398
Chris Lattnerbb07ef92004-02-14 19:49:54 +00001399void X86RegisterInfo::emitPrologue(MachineFunction &MF) const {
Chris Lattner198ab642002-12-15 20:06:35 +00001400 MachineBasicBlock &MBB = MF.front(); // Prolog goes in entry BB
Chris Lattnereafa4232003-01-15 22:57:35 +00001401 MachineFrameInfo *MFI = MF.getFrameInfo();
Evan Cheng3649b0e2006-06-02 22:38:37 +00001402 unsigned Align = MF.getTarget().getFrameInfo()->getStackAlignment();
1403 const Function* Fn = MF.getFunction();
1404 const X86Subtarget* Subtarget = &MF.getTarget().getSubtarget<X86Subtarget>();
Jim Laskey44c3b9f2007-01-26 21:22:28 +00001405 MachineModuleInfo *MMI = MFI->getMachineModuleInfo();
Evan Cheng89d16592007-07-17 07:59:08 +00001406 X86MachineFunctionInfo *X86FI = MF.getInfo<X86MachineFunctionInfo>();
1407 MachineBasicBlock::iterator MBBI = MBB.begin();
Jim Laskey0e410942007-01-24 19:15:24 +00001408
Jim Laskey072200c2007-01-29 18:51:14 +00001409 // Prepare for frame info.
Dan Gohman5e6e93e2007-09-24 16:44:26 +00001410 unsigned FrameLabelId = 0;
Evan Cheng004fb922006-06-13 05:14:44 +00001411
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001412 // Get the number of bytes to allocate from the FrameInfo
Evan Cheng89d16592007-07-17 07:59:08 +00001413 uint64_t StackSize = MFI->getStackSize();
1414 uint64_t NumBytes = StackSize - X86FI->getCalleeSavedFrameSize();
Evan Chengd9245ca2006-04-14 07:26:43 +00001415
Evan Cheng89d16592007-07-17 07:59:08 +00001416 if (hasFP(MF)) {
1417 // Get the offset of the stack slot for the EBP register... which is
1418 // guaranteed to be the last slot by processFunctionBeforeFrameFinalized.
1419 // Update the frame offset adjustment.
1420 MFI->setOffsetAdjustment(SlotSize-NumBytes);
1421
1422 // Save EBP into the appropriate stack slot...
1423 BuildMI(MBB, MBBI, TII.get(Is64Bit ? X86::PUSH64r : X86::PUSH32r))
1424 .addReg(FramePtr);
1425 NumBytes -= SlotSize;
1426
1427 if (MMI && MMI->needsFrameInfo()) {
1428 // Mark effective beginning of when frame pointer becomes valid.
1429 FrameLabelId = MMI->NextLabelID();
1430 BuildMI(MBB, MBBI, TII.get(X86::LABEL)).addImm(FrameLabelId);
1431 }
1432
1433 // Update EBP with the new base value...
1434 BuildMI(MBB, MBBI, TII.get(Is64Bit ? X86::MOV64rr : X86::MOV32rr), FramePtr)
1435 .addReg(StackPtr);
1436 }
1437
1438 unsigned ReadyLabelId = 0;
1439 if (MMI && MMI->needsFrameInfo()) {
1440 // Mark effective beginning of when frame pointer is ready.
1441 ReadyLabelId = MMI->NextLabelID();
1442 BuildMI(MBB, MBBI, TII.get(X86::LABEL)).addImm(ReadyLabelId);
1443 }
1444
1445 // Skip the callee-saved push instructions.
1446 while (MBBI != MBB.end() &&
1447 (MBBI->getOpcode() == X86::PUSH32r ||
1448 MBBI->getOpcode() == X86::PUSH64r))
1449 ++MBBI;
1450
Evan Chengd9245ca2006-04-14 07:26:43 +00001451 if (NumBytes) { // adjust stack pointer: ESP -= numbytes
Anton Korobeynikov317848f2007-01-03 11:43:14 +00001452 if (NumBytes >= 4096 && Subtarget->isTargetCygMing()) {
Anton Korobeynikov57fc00d2007-04-17 09:20:00 +00001453 // Check, whether EAX is livein for this function
1454 bool isEAXAlive = false;
1455 for (MachineFunction::livein_iterator II = MF.livein_begin(),
1456 EE = MF.livein_end(); (II != EE) && !isEAXAlive; ++II) {
1457 unsigned Reg = II->first;
1458 isEAXAlive = (Reg == X86::EAX || Reg == X86::AX ||
1459 Reg == X86::AH || Reg == X86::AL);
1460 }
1461
Evan Cheng004fb922006-06-13 05:14:44 +00001462 // Function prologue calls _alloca to probe the stack when allocating
1463 // more than 4k bytes in one go. Touching the stack at 4K increments is
1464 // necessary to ensure that the guard pages used by the OS virtual memory
1465 // manager are allocated in correct sequence.
Anton Korobeynikov57fc00d2007-04-17 09:20:00 +00001466 if (!isEAXAlive) {
Evan Cheng89d16592007-07-17 07:59:08 +00001467 BuildMI(MBB, MBBI, TII.get(X86::MOV32ri), X86::EAX).addImm(NumBytes);
1468 BuildMI(MBB, MBBI, TII.get(X86::CALLpcrel32))
1469 .addExternalSymbol("_alloca");
Anton Korobeynikov57fc00d2007-04-17 09:20:00 +00001470 } else {
1471 // Save EAX
Evan Cheng89d16592007-07-17 07:59:08 +00001472 BuildMI(MBB, MBBI, TII.get(X86::PUSH32r), X86::EAX);
Anton Korobeynikov57fc00d2007-04-17 09:20:00 +00001473 // Allocate NumBytes-4 bytes on stack. We'll also use 4 already
1474 // allocated bytes for EAX.
Evan Cheng89d16592007-07-17 07:59:08 +00001475 BuildMI(MBB, MBBI, TII.get(X86::MOV32ri), X86::EAX).addImm(NumBytes-4);
1476 BuildMI(MBB, MBBI, TII.get(X86::CALLpcrel32))
1477 .addExternalSymbol("_alloca");
Anton Korobeynikov57fc00d2007-04-17 09:20:00 +00001478 // Restore EAX
Evan Cheng89d16592007-07-17 07:59:08 +00001479 MachineInstr *MI = addRegOffset(BuildMI(TII.get(X86::MOV32rm),X86::EAX),
1480 StackPtr, NumBytes-4);
Anton Korobeynikov57fc00d2007-04-17 09:20:00 +00001481 MBB.insert(MBBI, MI);
1482 }
Evan Cheng004fb922006-06-13 05:14:44 +00001483 } else {
Evan Cheng9b8c6742007-07-17 21:26:42 +00001484 // If there is an ADD32ri or SUB32ri of ESP immediately after this
1485 // instruction, merge the two instructions.
1486 if (MBBI != MBB.end()) {
1487 MachineBasicBlock::iterator NI = next(MBBI);
1488 unsigned Opc = MBBI->getOpcode();
1489 if ((Opc == X86::ADD64ri32 || Opc == X86::ADD64ri8 ||
1490 Opc == X86::ADD32ri || Opc == X86::ADD32ri8) &&
1491 MBBI->getOperand(0).getReg() == StackPtr) {
1492 NumBytes -= MBBI->getOperand(2).getImm();
1493 MBB.erase(MBBI);
1494 MBBI = NI;
1495 } else if ((Opc == X86::SUB64ri32 || Opc == X86::SUB64ri8 ||
1496 Opc == X86::SUB32ri || Opc == X86::SUB32ri8) &&
1497 MBBI->getOperand(0).getReg() == StackPtr) {
1498 NumBytes += MBBI->getOperand(2).getImm();
1499 MBB.erase(MBBI);
1500 MBBI = NI;
1501 }
1502 }
1503
1504 if (NumBytes)
1505 emitSPUpdate(MBB, MBBI, StackPtr, -(int64_t)NumBytes, Is64Bit, TII);
Evan Cheng004fb922006-06-13 05:14:44 +00001506 }
Evan Chengd9245ca2006-04-14 07:26:43 +00001507 }
1508
Jim Laskeye078d1a2007-01-29 23:20:22 +00001509 if (MMI && MMI->needsFrameInfo()) {
Jim Laskey44c3b9f2007-01-26 21:22:28 +00001510 std::vector<MachineMove> &Moves = MMI->getFrameMoves();
Dan Gohman82482942007-09-27 23:12:31 +00001511 const TargetData *TD = MF.getTarget().getTargetData();
Anton Korobeynikovce3b4652007-05-02 19:53:33 +00001512
1513 // Calculate amount of bytes used for return address storing
1514 int stackGrowth =
1515 (MF.getTarget().getFrameInfo()->getStackGrowthDirection() ==
1516 TargetFrameInfo::StackGrowsUp ?
Dan Gohman82482942007-09-27 23:12:31 +00001517 TD->getPointerSize() : -TD->getPointerSize());
Anton Korobeynikovce3b4652007-05-02 19:53:33 +00001518
Evan Cheng89d16592007-07-17 07:59:08 +00001519 if (StackSize) {
Jim Laskey0e410942007-01-24 19:15:24 +00001520 // Show update of SP.
Anton Korobeynikovce3b4652007-05-02 19:53:33 +00001521 if (hasFP(MF)) {
1522 // Adjust SP
1523 MachineLocation SPDst(MachineLocation::VirtualFP);
1524 MachineLocation SPSrc(MachineLocation::VirtualFP, 2*stackGrowth);
1525 Moves.push_back(MachineMove(FrameLabelId, SPDst, SPSrc));
1526 } else {
1527 MachineLocation SPDst(MachineLocation::VirtualFP);
Evan Cheng89d16592007-07-17 07:59:08 +00001528 MachineLocation SPSrc(MachineLocation::VirtualFP, -StackSize+stackGrowth);
Anton Korobeynikovce3b4652007-05-02 19:53:33 +00001529 Moves.push_back(MachineMove(FrameLabelId, SPDst, SPSrc));
1530 }
Jim Laskey0e410942007-01-24 19:15:24 +00001531 } else {
Anton Korobeynikovce3b4652007-05-02 19:53:33 +00001532 //FIXME: Verify & implement for FP
1533 MachineLocation SPDst(StackPtr);
1534 MachineLocation SPSrc(StackPtr, stackGrowth);
1535 Moves.push_back(MachineMove(FrameLabelId, SPDst, SPSrc));
Jim Laskey0e410942007-01-24 19:15:24 +00001536 }
Anton Korobeynikovce3b4652007-05-02 19:53:33 +00001537
Jim Laskey0e410942007-01-24 19:15:24 +00001538 // Add callee saved registers to move list.
1539 const std::vector<CalleeSavedInfo> &CSI = MFI->getCalleeSavedInfo();
Anton Korobeynikovd97b8cd2007-07-24 21:07:39 +00001540
1541 // FIXME: This is dirty hack. The code itself is pretty mess right now.
1542 // It should be rewritten from scratch and generalized sometimes.
1543
1544 // Determine maximum offset (minumum due to stack growth)
1545 int64_t MaxOffset = 0;
1546 for (unsigned I = 0, E = CSI.size(); I!=E; ++I)
1547 MaxOffset = std::min(MaxOffset,
1548 MFI->getObjectOffset(CSI[I].getFrameIdx()));
1549
1550 // Calculate offsets
1551 for (unsigned I = 0, E = CSI.size(); I!=E; ++I) {
Chris Lattnerea84c5e2007-04-25 04:30:24 +00001552 int64_t Offset = MFI->getObjectOffset(CSI[I].getFrameIdx());
Jim Laskey0e410942007-01-24 19:15:24 +00001553 unsigned Reg = CSI[I].getReg();
Anton Korobeynikovd97b8cd2007-07-24 21:07:39 +00001554 Offset = (MaxOffset-Offset+3*stackGrowth);
Jim Laskey0e410942007-01-24 19:15:24 +00001555 MachineLocation CSDst(MachineLocation::VirtualFP, Offset);
1556 MachineLocation CSSrc(Reg);
1557 Moves.push_back(MachineMove(FrameLabelId, CSDst, CSSrc));
1558 }
1559
Anton Korobeynikovce3b4652007-05-02 19:53:33 +00001560 if (hasFP(MF)) {
1561 // Save FP
1562 MachineLocation FPDst(MachineLocation::VirtualFP, 2*stackGrowth);
1563 MachineLocation FPSrc(FramePtr);
1564 Moves.push_back(MachineMove(ReadyLabelId, FPDst, FPSrc));
1565 }
Jim Laskey0e410942007-01-24 19:15:24 +00001566
1567 MachineLocation FPDst(hasFP(MF) ? FramePtr : StackPtr);
1568 MachineLocation FPSrc(MachineLocation::VirtualFP);
1569 Moves.push_back(MachineMove(ReadyLabelId, FPDst, FPSrc));
1570 }
1571
Evan Cheng3649b0e2006-06-02 22:38:37 +00001572 // If it's main() on Cygwin\Mingw32 we should align stack as well
1573 if (Fn->hasExternalLinkage() && Fn->getName() == "main" &&
Anton Korobeynikov317848f2007-01-03 11:43:14 +00001574 Subtarget->isTargetCygMing()) {
Evan Cheng89d16592007-07-17 07:59:08 +00001575 BuildMI(MBB, MBBI, TII.get(X86::AND32ri), X86::ESP)
Jim Laskey7ac947d2007-01-24 18:50:57 +00001576 .addReg(X86::ESP).addImm(-Align);
Evan Cheng004fb922006-06-13 05:14:44 +00001577
1578 // Probe the stack
Evan Cheng89d16592007-07-17 07:59:08 +00001579 BuildMI(MBB, MBBI, TII.get(X86::MOV32ri), X86::EAX).addImm(Align);
1580 BuildMI(MBB, MBBI, TII.get(X86::CALLpcrel32)).addExternalSymbol("_alloca");
Evan Cheng3649b0e2006-06-02 22:38:37 +00001581 }
Misha Brukman2adb3952002-12-04 23:57:03 +00001582}
1583
Chris Lattnerbb07ef92004-02-14 19:49:54 +00001584void X86RegisterInfo::emitEpilogue(MachineFunction &MF,
1585 MachineBasicBlock &MBB) const {
Chris Lattneraa09b752002-12-28 21:08:28 +00001586 const MachineFrameInfo *MFI = MF.getFrameInfo();
Anton Korobeynikov29be8482007-09-26 00:13:34 +00001587 const Function* Fn = MF.getFunction();
Evan Cheng89d16592007-07-17 07:59:08 +00001588 X86MachineFunctionInfo *X86FI = MF.getInfo<X86MachineFunctionInfo>();
Anton Korobeynikov29be8482007-09-26 00:13:34 +00001589 const X86Subtarget* Subtarget = &MF.getTarget().getSubtarget<X86Subtarget>();
Alkis Evlogimenosf81af212004-02-14 01:18:34 +00001590 MachineBasicBlock::iterator MBBI = prior(MBB.end());
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001591 unsigned RetOpcode = MBBI->getOpcode();
Chris Lattner2b3d56e2005-05-14 23:35:21 +00001592
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001593 switch (RetOpcode) {
Chris Lattner2b3d56e2005-05-14 23:35:21 +00001594 case X86::RET:
1595 case X86::RETI:
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001596 case X86::EH_RETURN:
Chris Lattner2b3d56e2005-05-14 23:35:21 +00001597 case X86::TAILJMPd:
1598 case X86::TAILJMPr:
1599 case X86::TAILJMPm: break; // These are ok
1600 default:
1601 assert(0 && "Can only insert epilog into returning blocks");
1602 }
Misha Brukman2adb3952002-12-04 23:57:03 +00001603
Evan Cheng89d16592007-07-17 07:59:08 +00001604 // Get the number of bytes to allocate from the FrameInfo
1605 uint64_t StackSize = MFI->getStackSize();
1606 unsigned CSSize = X86FI->getCalleeSavedFrameSize();
1607 uint64_t NumBytes = StackSize - CSSize;
1608
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001609 if (hasFP(MF)) {
Evan Cheng89d16592007-07-17 07:59:08 +00001610 // pop EBP.
Evan Chengc0f64ff2006-11-27 23:37:22 +00001611 BuildMI(MBB, MBBI, TII.get(Is64Bit ? X86::POP64r : X86::POP32r), FramePtr);
Evan Cheng89d16592007-07-17 07:59:08 +00001612 NumBytes -= SlotSize;
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001613 }
Anton Korobeynikov2365f512007-07-14 14:06:15 +00001614
Evan Chengf27795d2007-07-17 18:03:34 +00001615 // Skip the callee-saved pop instructions.
1616 while (MBBI != MBB.begin()) {
Evan Chengfcc87932007-07-26 17:45:41 +00001617 MachineBasicBlock::iterator PI = prior(MBBI);
1618 unsigned Opc = PI->getOpcode();
1619 if (Opc != X86::POP32r && Opc != X86::POP64r && !TII.isTerminatorInstr(Opc))
Evan Chengf27795d2007-07-17 18:03:34 +00001620 break;
1621 --MBBI;
1622 }
1623
Evan Cheng3c46eef2007-07-18 21:26:06 +00001624 if (NumBytes || MFI->hasVarSizedObjects()) {
Evan Cheng89d16592007-07-17 07:59:08 +00001625 // If there is an ADD32ri or SUB32ri of ESP immediately before this
1626 // instruction, merge the two instructions.
1627 if (MBBI != MBB.begin()) {
1628 MachineBasicBlock::iterator PI = prior(MBBI);
1629 unsigned Opc = PI->getOpcode();
1630 if ((Opc == X86::ADD64ri32 || Opc == X86::ADD64ri8 ||
1631 Opc == X86::ADD32ri || Opc == X86::ADD32ri8) &&
1632 PI->getOperand(0).getReg() == StackPtr) {
1633 NumBytes += PI->getOperand(2).getImm();
1634 MBB.erase(PI);
1635 } else if ((Opc == X86::SUB64ri32 || Opc == X86::SUB64ri8 ||
1636 Opc == X86::SUB32ri || Opc == X86::SUB32ri8) &&
1637 PI->getOperand(0).getReg() == StackPtr) {
1638 NumBytes -= PI->getOperand(2).getImm();
1639 MBB.erase(PI);
1640 }
1641 }
Evan Cheng89d16592007-07-17 07:59:08 +00001642 }
Evan Cheng5b3332c2007-07-17 18:40:47 +00001643
Evan Cheng3c46eef2007-07-18 21:26:06 +00001644 // If dynamic alloca is used, then reset esp to point to the last
1645 // callee-saved slot before popping them off!
Anton Korobeynikov29be8482007-09-26 00:13:34 +00001646 // Also, if it's main() on Cygwin/Mingw32 we aligned stack in the prologue, - revert
1647 // stack changes back. Note: we're assuming, that frame pointer was forced
1648 // for main()
1649 if (MFI->hasVarSizedObjects() ||
1650 (Fn->hasExternalLinkage() && Fn->getName() == "main" &&
1651 Subtarget->isTargetCygMing())) {
Evan Cheng3c46eef2007-07-18 21:26:06 +00001652 unsigned Opc = Is64Bit ? X86::LEA64r : X86::LEA32r;
1653 if (CSSize) {
1654 MachineInstr *MI = addRegOffset(BuildMI(TII.get(Opc), StackPtr),
1655 FramePtr, -CSSize);
1656 MBB.insert(MBBI, MI);
1657 } else
1658 BuildMI(MBB, MBBI, TII.get(Is64Bit ? X86::MOV64rr : X86::MOV32rr),StackPtr).
1659 addReg(FramePtr);
1660
1661 NumBytes = 0;
1662 }
1663
1664 // adjust stack pointer back: ESP += numbytes
1665 if (NumBytes)
1666 emitSPUpdate(MBB, MBBI, StackPtr, NumBytes, Is64Bit, TII);
1667
Evan Cheng5b3332c2007-07-17 18:40:47 +00001668 // We're returning from function via eh_return.
1669 if (RetOpcode == X86::EH_RETURN) {
1670 MBBI = prior(MBB.end());
1671 MachineOperand &DestAddr = MBBI->getOperand(0);
Dan Gohman92dfe202007-09-14 20:33:02 +00001672 assert(DestAddr.isRegister() && "Offset should be in register!");
Evan Cheng5b3332c2007-07-17 18:40:47 +00001673 BuildMI(MBB, MBBI, TII.get(Is64Bit ? X86::MOV64rr : X86::MOV32rr),StackPtr).
1674 addReg(DestAddr.getReg());
1675 }
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001676}
1677
Jim Laskey41886992006-04-07 16:34:46 +00001678unsigned X86RegisterInfo::getRARegister() const {
Anton Korobeynikov038082d2007-05-02 08:46:03 +00001679 if (Is64Bit)
1680 return X86::RIP; // Should have dwarf #16
1681 else
1682 return X86::EIP; // Should have dwarf #8
Jim Laskey41886992006-04-07 16:34:46 +00001683}
1684
Jim Laskeya9979182006-03-28 13:48:33 +00001685unsigned X86RegisterInfo::getFrameRegister(MachineFunction &MF) const {
Evan Cheng25ab6902006-09-08 06:48:29 +00001686 return hasFP(MF) ? FramePtr : StackPtr;
Jim Laskeyf1d78e82006-03-23 18:12:57 +00001687}
1688
Jim Laskey0e410942007-01-24 19:15:24 +00001689void X86RegisterInfo::getInitialFrameState(std::vector<MachineMove> &Moves)
1690 const {
Anton Korobeynikov0ff3ca42007-05-12 22:36:25 +00001691 // Calculate amount of bytes used for return address storing
1692 int stackGrowth = (Is64Bit ? -8 : -4);
1693
1694 // Initial state of the frame pointer is esp+4.
Jim Laskey0e410942007-01-24 19:15:24 +00001695 MachineLocation Dst(MachineLocation::VirtualFP);
Anton Korobeynikov0ff3ca42007-05-12 22:36:25 +00001696 MachineLocation Src(StackPtr, stackGrowth);
Jim Laskey0e410942007-01-24 19:15:24 +00001697 Moves.push_back(MachineMove(0, Dst, Src));
Anton Korobeynikov0ff3ca42007-05-12 22:36:25 +00001698
1699 // Add return address to move list
1700 MachineLocation CSDst(StackPtr, stackGrowth);
1701 MachineLocation CSSrc(getRARegister());
1702 Moves.push_back(MachineMove(0, CSDst, CSSrc));
Jim Laskey0e410942007-01-24 19:15:24 +00001703}
1704
Jim Laskey62819f32007-02-21 22:54:50 +00001705unsigned X86RegisterInfo::getEHExceptionRegister() const {
1706 assert(0 && "What is the exception register");
1707 return 0;
1708}
1709
1710unsigned X86RegisterInfo::getEHHandlerRegister() const {
1711 assert(0 && "What is the exception handler register");
1712 return 0;
1713}
1714
Evan Cheng8f7f7122006-05-05 05:40:20 +00001715namespace llvm {
1716unsigned getX86SubSuperRegister(unsigned Reg, MVT::ValueType VT, bool High) {
1717 switch (VT) {
1718 default: return Reg;
1719 case MVT::i8:
1720 if (High) {
1721 switch (Reg) {
Evan Cheng25ab6902006-09-08 06:48:29 +00001722 default: return 0;
1723 case X86::AH: case X86::AL: case X86::AX: case X86::EAX: case X86::RAX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001724 return X86::AH;
Evan Cheng25ab6902006-09-08 06:48:29 +00001725 case X86::DH: case X86::DL: case X86::DX: case X86::EDX: case X86::RDX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001726 return X86::DH;
Evan Cheng25ab6902006-09-08 06:48:29 +00001727 case X86::CH: case X86::CL: case X86::CX: case X86::ECX: case X86::RCX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001728 return X86::CH;
Evan Cheng25ab6902006-09-08 06:48:29 +00001729 case X86::BH: case X86::BL: case X86::BX: case X86::EBX: case X86::RBX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001730 return X86::BH;
1731 }
1732 } else {
1733 switch (Reg) {
Evan Cheng25ab6902006-09-08 06:48:29 +00001734 default: return 0;
1735 case X86::AH: case X86::AL: case X86::AX: case X86::EAX: case X86::RAX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001736 return X86::AL;
Evan Cheng25ab6902006-09-08 06:48:29 +00001737 case X86::DH: case X86::DL: case X86::DX: case X86::EDX: case X86::RDX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001738 return X86::DL;
Evan Cheng25ab6902006-09-08 06:48:29 +00001739 case X86::CH: case X86::CL: case X86::CX: case X86::ECX: case X86::RCX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001740 return X86::CL;
Evan Cheng25ab6902006-09-08 06:48:29 +00001741 case X86::BH: case X86::BL: case X86::BX: case X86::EBX: case X86::RBX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001742 return X86::BL;
Evan Cheng25ab6902006-09-08 06:48:29 +00001743 case X86::SIL: case X86::SI: case X86::ESI: case X86::RSI:
1744 return X86::SIL;
1745 case X86::DIL: case X86::DI: case X86::EDI: case X86::RDI:
1746 return X86::DIL;
1747 case X86::BPL: case X86::BP: case X86::EBP: case X86::RBP:
1748 return X86::BPL;
1749 case X86::SPL: case X86::SP: case X86::ESP: case X86::RSP:
1750 return X86::SPL;
1751 case X86::R8B: case X86::R8W: case X86::R8D: case X86::R8:
1752 return X86::R8B;
1753 case X86::R9B: case X86::R9W: case X86::R9D: case X86::R9:
1754 return X86::R9B;
1755 case X86::R10B: case X86::R10W: case X86::R10D: case X86::R10:
1756 return X86::R10B;
1757 case X86::R11B: case X86::R11W: case X86::R11D: case X86::R11:
1758 return X86::R11B;
1759 case X86::R12B: case X86::R12W: case X86::R12D: case X86::R12:
1760 return X86::R12B;
1761 case X86::R13B: case X86::R13W: case X86::R13D: case X86::R13:
1762 return X86::R13B;
1763 case X86::R14B: case X86::R14W: case X86::R14D: case X86::R14:
1764 return X86::R14B;
1765 case X86::R15B: case X86::R15W: case X86::R15D: case X86::R15:
1766 return X86::R15B;
Evan Cheng8f7f7122006-05-05 05:40:20 +00001767 }
1768 }
1769 case MVT::i16:
1770 switch (Reg) {
1771 default: return Reg;
Evan Cheng25ab6902006-09-08 06:48:29 +00001772 case X86::AH: case X86::AL: case X86::AX: case X86::EAX: case X86::RAX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001773 return X86::AX;
Evan Cheng25ab6902006-09-08 06:48:29 +00001774 case X86::DH: case X86::DL: case X86::DX: case X86::EDX: case X86::RDX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001775 return X86::DX;
Evan Cheng25ab6902006-09-08 06:48:29 +00001776 case X86::CH: case X86::CL: case X86::CX: case X86::ECX: case X86::RCX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001777 return X86::CX;
Evan Cheng25ab6902006-09-08 06:48:29 +00001778 case X86::BH: case X86::BL: case X86::BX: case X86::EBX: case X86::RBX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001779 return X86::BX;
Evan Cheng25ab6902006-09-08 06:48:29 +00001780 case X86::SIL: case X86::SI: case X86::ESI: case X86::RSI:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001781 return X86::SI;
Evan Cheng25ab6902006-09-08 06:48:29 +00001782 case X86::DIL: case X86::DI: case X86::EDI: case X86::RDI:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001783 return X86::DI;
Evan Cheng25ab6902006-09-08 06:48:29 +00001784 case X86::BPL: case X86::BP: case X86::EBP: case X86::RBP:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001785 return X86::BP;
Evan Cheng25ab6902006-09-08 06:48:29 +00001786 case X86::SPL: case X86::SP: case X86::ESP: case X86::RSP:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001787 return X86::SP;
Evan Cheng25ab6902006-09-08 06:48:29 +00001788 case X86::R8B: case X86::R8W: case X86::R8D: case X86::R8:
1789 return X86::R8W;
1790 case X86::R9B: case X86::R9W: case X86::R9D: case X86::R9:
1791 return X86::R9W;
1792 case X86::R10B: case X86::R10W: case X86::R10D: case X86::R10:
1793 return X86::R10W;
1794 case X86::R11B: case X86::R11W: case X86::R11D: case X86::R11:
1795 return X86::R11W;
1796 case X86::R12B: case X86::R12W: case X86::R12D: case X86::R12:
1797 return X86::R12W;
1798 case X86::R13B: case X86::R13W: case X86::R13D: case X86::R13:
1799 return X86::R13W;
1800 case X86::R14B: case X86::R14W: case X86::R14D: case X86::R14:
1801 return X86::R14W;
1802 case X86::R15B: case X86::R15W: case X86::R15D: case X86::R15:
1803 return X86::R15W;
Evan Cheng8f7f7122006-05-05 05:40:20 +00001804 }
1805 case MVT::i32:
1806 switch (Reg) {
Evan Cheng25ab6902006-09-08 06:48:29 +00001807 default: return Reg;
1808 case X86::AH: case X86::AL: case X86::AX: case X86::EAX: case X86::RAX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001809 return X86::EAX;
Evan Cheng25ab6902006-09-08 06:48:29 +00001810 case X86::DH: case X86::DL: case X86::DX: case X86::EDX: case X86::RDX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001811 return X86::EDX;
Evan Cheng25ab6902006-09-08 06:48:29 +00001812 case X86::CH: case X86::CL: case X86::CX: case X86::ECX: case X86::RCX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001813 return X86::ECX;
Evan Cheng25ab6902006-09-08 06:48:29 +00001814 case X86::BH: case X86::BL: case X86::BX: case X86::EBX: case X86::RBX:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001815 return X86::EBX;
Evan Cheng25ab6902006-09-08 06:48:29 +00001816 case X86::SIL: case X86::SI: case X86::ESI: case X86::RSI:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001817 return X86::ESI;
Evan Cheng25ab6902006-09-08 06:48:29 +00001818 case X86::DIL: case X86::DI: case X86::EDI: case X86::RDI:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001819 return X86::EDI;
Evan Cheng25ab6902006-09-08 06:48:29 +00001820 case X86::BPL: case X86::BP: case X86::EBP: case X86::RBP:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001821 return X86::EBP;
Evan Cheng25ab6902006-09-08 06:48:29 +00001822 case X86::SPL: case X86::SP: case X86::ESP: case X86::RSP:
Evan Cheng8f7f7122006-05-05 05:40:20 +00001823 return X86::ESP;
Evan Cheng25ab6902006-09-08 06:48:29 +00001824 case X86::R8B: case X86::R8W: case X86::R8D: case X86::R8:
1825 return X86::R8D;
1826 case X86::R9B: case X86::R9W: case X86::R9D: case X86::R9:
1827 return X86::R9D;
1828 case X86::R10B: case X86::R10W: case X86::R10D: case X86::R10:
1829 return X86::R10D;
1830 case X86::R11B: case X86::R11W: case X86::R11D: case X86::R11:
1831 return X86::R11D;
1832 case X86::R12B: case X86::R12W: case X86::R12D: case X86::R12:
1833 return X86::R12D;
1834 case X86::R13B: case X86::R13W: case X86::R13D: case X86::R13:
1835 return X86::R13D;
1836 case X86::R14B: case X86::R14W: case X86::R14D: case X86::R14:
1837 return X86::R14D;
1838 case X86::R15B: case X86::R15W: case X86::R15D: case X86::R15:
1839 return X86::R15D;
1840 }
1841 case MVT::i64:
1842 switch (Reg) {
1843 default: return Reg;
1844 case X86::AH: case X86::AL: case X86::AX: case X86::EAX: case X86::RAX:
1845 return X86::RAX;
1846 case X86::DH: case X86::DL: case X86::DX: case X86::EDX: case X86::RDX:
1847 return X86::RDX;
1848 case X86::CH: case X86::CL: case X86::CX: case X86::ECX: case X86::RCX:
1849 return X86::RCX;
1850 case X86::BH: case X86::BL: case X86::BX: case X86::EBX: case X86::RBX:
1851 return X86::RBX;
1852 case X86::SIL: case X86::SI: case X86::ESI: case X86::RSI:
1853 return X86::RSI;
1854 case X86::DIL: case X86::DI: case X86::EDI: case X86::RDI:
1855 return X86::RDI;
1856 case X86::BPL: case X86::BP: case X86::EBP: case X86::RBP:
1857 return X86::RBP;
1858 case X86::SPL: case X86::SP: case X86::ESP: case X86::RSP:
1859 return X86::RSP;
1860 case X86::R8B: case X86::R8W: case X86::R8D: case X86::R8:
1861 return X86::R8;
1862 case X86::R9B: case X86::R9W: case X86::R9D: case X86::R9:
1863 return X86::R9;
1864 case X86::R10B: case X86::R10W: case X86::R10D: case X86::R10:
1865 return X86::R10;
1866 case X86::R11B: case X86::R11W: case X86::R11D: case X86::R11:
1867 return X86::R11;
1868 case X86::R12B: case X86::R12W: case X86::R12D: case X86::R12:
1869 return X86::R12;
1870 case X86::R13B: case X86::R13W: case X86::R13D: case X86::R13:
1871 return X86::R13;
1872 case X86::R14B: case X86::R14W: case X86::R14D: case X86::R14:
1873 return X86::R14;
1874 case X86::R15B: case X86::R15W: case X86::R15D: case X86::R15:
1875 return X86::R15;
Evan Cheng8f7f7122006-05-05 05:40:20 +00001876 }
1877 }
1878
1879 return Reg;
1880}
1881}
1882
Chris Lattner7ad3e062003-08-03 15:48:14 +00001883#include "X86GenRegisterInfo.inc"
Chris Lattner3c1c03d2002-12-28 20:32:28 +00001884