blob: 856f3be57ce68f28bd9ef94c4e7c0cc37639c11b [file] [log] [blame]
Jia Liu31d157a2012-02-18 12:03:15 +00001//===-- X86InstrInfo.h - X86 Instruction Information ------------*- C++ -*-===//
Misha Brukman0e0a7a452005-04-21 23:38:14 +00002//
John Criswell856ba762003-10-21 15:17:13 +00003// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Misha Brukman0e0a7a452005-04-21 23:38:14 +00007//
John Criswell856ba762003-10-21 15:17:13 +00008//===----------------------------------------------------------------------===//
Chris Lattner72614082002-10-25 22:55:53 +00009//
Chris Lattner3501fea2003-01-14 22:00:31 +000010// This file contains the X86 implementation of the TargetInstrInfo class.
Chris Lattner72614082002-10-25 22:55:53 +000011//
12//===----------------------------------------------------------------------===//
13
14#ifndef X86INSTRUCTIONINFO_H
15#define X86INSTRUCTIONINFO_H
16
Nicolas Geoffray52e724a2008-04-16 20:10:13 +000017#include "X86.h"
Chris Lattner72614082002-10-25 22:55:53 +000018#include "X86RegisterInfo.h"
Dan Gohmand68a0762009-01-05 17:59:02 +000019#include "llvm/ADT/DenseMap.h"
Craig Topper79aa3412012-03-17 18:46:09 +000020#include "llvm/Target/TargetInstrInfo.h"
Chris Lattner72614082002-10-25 22:55:53 +000021
Evan Cheng4db3cff2011-07-01 17:57:27 +000022#define GET_INSTRINFO_HEADER
23#include "X86GenInstrInfo.inc"
24
Brian Gaeked0fde302003-11-11 22:41:34 +000025namespace llvm {
Evan Cheng25ab6902006-09-08 06:48:29 +000026 class X86RegisterInfo;
Evan Chengaa3c1412006-05-30 21:45:53 +000027 class X86TargetMachine;
Brian Gaeked0fde302003-11-11 22:41:34 +000028
Chris Lattner7fbe9722006-10-20 17:42:20 +000029namespace X86 {
30 // X86 specific condition code. These correspond to X86_*_COND in
31 // X86InstrInfo.td. They must be kept in synch.
32 enum CondCode {
33 COND_A = 0,
34 COND_AE = 1,
35 COND_B = 2,
36 COND_BE = 3,
37 COND_E = 4,
38 COND_G = 5,
39 COND_GE = 6,
40 COND_L = 7,
41 COND_LE = 8,
42 COND_NE = 9,
43 COND_NO = 10,
44 COND_NP = 11,
45 COND_NS = 12,
Dan Gohman653456c2009-01-07 00:15:08 +000046 COND_O = 13,
47 COND_P = 14,
48 COND_S = 15,
Dan Gohman279c22e2008-10-21 03:29:32 +000049
50 // Artificial condition codes. These are used by AnalyzeBranch
51 // to indicate a block terminated with two conditional branches to
52 // the same location. This occurs in code using FCMP_OEQ or FCMP_UNE,
53 // which can't be represented on x86 with a single condition. These
54 // are never used in MachineInstrs.
55 COND_NE_OR_P,
56 COND_NP_OR_E,
57
Chris Lattner7fbe9722006-10-20 17:42:20 +000058 COND_INVALID
59 };
Andrew Trick8d4a4222011-03-05 06:31:54 +000060
Chris Lattner7fbe9722006-10-20 17:42:20 +000061 // Turn condition code into conditional branch opcode.
62 unsigned GetCondBranchFromCond(CondCode CC);
Andrew Trick8d4a4222011-03-05 06:31:54 +000063
Chris Lattner9cd68752006-10-21 05:52:40 +000064 /// GetOppositeBranchCondition - Return the inverse of the specified cond,
65 /// e.g. turning COND_E to COND_NE.
66 CondCode GetOppositeBranchCondition(X86::CondCode CC);
Evan Cheng8c3fee52011-07-25 18:43:53 +000067} // end namespace X86;
Chris Lattner9cd68752006-10-21 05:52:40 +000068
Chris Lattner281bada2009-07-10 06:06:17 +000069
Chris Lattner3b6b36d2009-07-10 06:29:59 +000070/// isGlobalStubReference - Return true if the specified TargetFlag operand is
Chris Lattner281bada2009-07-10 06:06:17 +000071/// a reference to a stub for a global, not the global itself.
Chris Lattner3b6b36d2009-07-10 06:29:59 +000072inline static bool isGlobalStubReference(unsigned char TargetFlag) {
73 switch (TargetFlag) {
Chris Lattner281bada2009-07-10 06:06:17 +000074 case X86II::MO_DLLIMPORT: // dllimport stub.
75 case X86II::MO_GOTPCREL: // rip-relative GOT reference.
76 case X86II::MO_GOT: // normal GOT reference.
77 case X86II::MO_DARWIN_NONLAZY_PIC_BASE: // Normal $non_lazy_ptr ref.
78 case X86II::MO_DARWIN_NONLAZY: // Normal $non_lazy_ptr ref.
79 case X86II::MO_DARWIN_HIDDEN_NONLAZY_PIC_BASE: // Hidden $non_lazy_ptr ref.
Chris Lattner281bada2009-07-10 06:06:17 +000080 return true;
81 default:
82 return false;
83 }
84}
Chris Lattner7478ab82009-07-10 07:33:30 +000085
86/// isGlobalRelativeToPICBase - Return true if the specified global value
87/// reference is relative to a 32-bit PIC base (X86ISD::GlobalBaseReg). If this
88/// is true, the addressing mode has the PIC base register added in (e.g. EBX).
89inline static bool isGlobalRelativeToPICBase(unsigned char TargetFlag) {
90 switch (TargetFlag) {
91 case X86II::MO_GOTOFF: // isPICStyleGOT: local global.
92 case X86II::MO_GOT: // isPICStyleGOT: other global.
93 case X86II::MO_PIC_BASE_OFFSET: // Darwin local global.
94 case X86II::MO_DARWIN_NONLAZY_PIC_BASE: // Darwin/32 external global.
95 case X86II::MO_DARWIN_HIDDEN_NONLAZY_PIC_BASE: // Darwin/32 hidden global.
Eric Christopher30ef0e52010-06-03 04:07:48 +000096 case X86II::MO_TLVP: // ??? Pretty sure..
Chris Lattner7478ab82009-07-10 07:33:30 +000097 return true;
98 default:
99 return false;
100 }
101}
Andrew Trick8d4a4222011-03-05 06:31:54 +0000102
Anton Korobeynikov1c4b5ea2008-06-28 11:07:54 +0000103inline static bool isScale(const MachineOperand &MO) {
Dan Gohmand735b802008-10-03 15:45:36 +0000104 return MO.isImm() &&
Anton Korobeynikov1c4b5ea2008-06-28 11:07:54 +0000105 (MO.getImm() == 1 || MO.getImm() == 2 ||
106 MO.getImm() == 4 || MO.getImm() == 8);
107}
108
Rafael Espindola094fad32009-04-08 21:14:34 +0000109inline static bool isLeaMem(const MachineInstr *MI, unsigned Op) {
Dan Gohmand735b802008-10-03 15:45:36 +0000110 if (MI->getOperand(Op).isFI()) return true;
Anton Korobeynikov1c4b5ea2008-06-28 11:07:54 +0000111 return Op+4 <= MI->getNumOperands() &&
Dan Gohmand735b802008-10-03 15:45:36 +0000112 MI->getOperand(Op ).isReg() && isScale(MI->getOperand(Op+1)) &&
113 MI->getOperand(Op+2).isReg() &&
114 (MI->getOperand(Op+3).isImm() ||
115 MI->getOperand(Op+3).isGlobal() ||
116 MI->getOperand(Op+3).isCPI() ||
117 MI->getOperand(Op+3).isJTI());
Anton Korobeynikov1c4b5ea2008-06-28 11:07:54 +0000118}
119
Rafael Espindola094fad32009-04-08 21:14:34 +0000120inline static bool isMem(const MachineInstr *MI, unsigned Op) {
121 if (MI->getOperand(Op).isFI()) return true;
122 return Op+5 <= MI->getNumOperands() &&
123 MI->getOperand(Op+4).isReg() &&
124 isLeaMem(MI, Op);
125}
126
Evan Cheng4db3cff2011-07-01 17:57:27 +0000127class X86InstrInfo : public X86GenInstrInfo {
Evan Chengaa3c1412006-05-30 21:45:53 +0000128 X86TargetMachine &TM;
Chris Lattner72614082002-10-25 22:55:53 +0000129 const X86RegisterInfo RI;
Andrew Trick8d4a4222011-03-05 06:31:54 +0000130
Elena Demikhovsky177cf1e2012-05-31 09:20:20 +0000131 /// RegOp2MemOpTable3Addr, RegOp2MemOpTable2Addr,
132 /// RegOp2MemOpTable0, RegOp2MemOpTable1,
Owen Anderson43dbe052008-01-07 01:35:02 +0000133 /// RegOp2MemOpTable2 - Load / store folding opcode maps.
134 ///
Bruno Cardoso Lopescbf479d2011-09-08 18:35:57 +0000135 typedef DenseMap<unsigned,
136 std::pair<unsigned, unsigned> > RegOp2MemOpTableType;
137 RegOp2MemOpTableType RegOp2MemOpTable2Addr;
138 RegOp2MemOpTableType RegOp2MemOpTable0;
139 RegOp2MemOpTableType RegOp2MemOpTable1;
140 RegOp2MemOpTableType RegOp2MemOpTable2;
Elena Demikhovsky177cf1e2012-05-31 09:20:20 +0000141 RegOp2MemOpTableType RegOp2MemOpTable3;
Andrew Trick8d4a4222011-03-05 06:31:54 +0000142
Owen Anderson43dbe052008-01-07 01:35:02 +0000143 /// MemOp2RegOpTable - Load / store unfolding opcode map.
144 ///
Bruno Cardoso Lopescbf479d2011-09-08 18:35:57 +0000145 typedef DenseMap<unsigned,
146 std::pair<unsigned, unsigned> > MemOp2RegOpTableType;
147 MemOp2RegOpTableType MemOp2RegOpTable;
148
149 void AddTableEntry(RegOp2MemOpTableType &R2MTable,
150 MemOp2RegOpTableType &M2RTable,
151 unsigned RegOp, unsigned MemOp, unsigned Flags);
Jakob Stoklund Olesen352aa502010-03-25 17:25:00 +0000152
Chris Lattner72614082002-10-25 22:55:53 +0000153public:
Dan Gohman950a4c42008-03-25 22:06:05 +0000154 explicit X86InstrInfo(X86TargetMachine &tm);
Chris Lattner72614082002-10-25 22:55:53 +0000155
Chris Lattner3501fea2003-01-14 22:00:31 +0000156 /// getRegisterInfo - TargetInstrInfo is a superset of MRegister info. As
Chris Lattner72614082002-10-25 22:55:53 +0000157 /// such, whenever a client has an instance of instruction info, it should
158 /// always be able to get register info as well (through this method).
159 ///
Dan Gohmanc9f5f3f2008-05-14 01:58:56 +0000160 virtual const X86RegisterInfo &getRegisterInfo() const { return RI; }
Chris Lattner72614082002-10-25 22:55:53 +0000161
Evan Cheng7da9ecf2010-01-13 00:30:23 +0000162 /// isCoalescableExtInstr - Return true if the instruction is a "coalescable"
163 /// extension instruction. That is, it's like a copy where it's legal for the
164 /// source to overlap the destination. e.g. X86::MOVSX64rr32. If this returns
165 /// true, then it's expected the pre-extension value is available as a subreg
166 /// of the result register. This also returns the sub-register index in
167 /// SubIdx.
168 virtual bool isCoalescableExtInstr(const MachineInstr &MI,
169 unsigned &SrcReg, unsigned &DstReg,
170 unsigned &SubIdx) const;
Evan Chenga5a81d72010-01-12 00:09:37 +0000171
Dan Gohmancbad42c2008-11-18 19:49:32 +0000172 unsigned isLoadFromStackSlot(const MachineInstr *MI, int &FrameIndex) const;
David Greenedda39782009-11-13 00:29:53 +0000173 /// isLoadFromStackSlotPostFE - Check for post-frame ptr elimination
174 /// stack locations as well. This uses a heuristic so it isn't
175 /// reliable for correctness.
176 unsigned isLoadFromStackSlotPostFE(const MachineInstr *MI,
177 int &FrameIndex) const;
David Greeneb87bc952009-11-12 20:55:29 +0000178
Dan Gohmancbad42c2008-11-18 19:49:32 +0000179 unsigned isStoreToStackSlot(const MachineInstr *MI, int &FrameIndex) const;
David Greenedda39782009-11-13 00:29:53 +0000180 /// isStoreToStackSlotPostFE - Check for post-frame ptr elimination
181 /// stack locations as well. This uses a heuristic so it isn't
182 /// reliable for correctness.
183 unsigned isStoreToStackSlotPostFE(const MachineInstr *MI,
184 int &FrameIndex) const;
Evan Chengca1267c2008-03-31 20:40:39 +0000185
Dan Gohman3731bc02009-10-10 00:34:18 +0000186 bool isReallyTriviallyReMaterializable(const MachineInstr *MI,
187 AliasAnalysis *AA) const;
Evan Chengca1267c2008-03-31 20:40:39 +0000188 void reMaterialize(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI,
Evan Cheng37844532009-07-16 09:20:10 +0000189 unsigned DestReg, unsigned SubIdx,
Evan Chengd57cdd52009-11-14 02:55:43 +0000190 const MachineInstr *Orig,
Jakob Stoklund Olesen9edf7de2010-06-02 22:47:25 +0000191 const TargetRegisterInfo &TRI) const;
Evan Chengca1267c2008-03-31 20:40:39 +0000192
Chris Lattnerbcea4d62005-01-02 02:37:07 +0000193 /// convertToThreeAddress - This method must be implemented by targets that
194 /// set the M_CONVERTIBLE_TO_3_ADDR flag. When this flag is set, the target
195 /// may be able to convert a two-address instruction into a true
196 /// three-address instruction on demand. This allows the X86 target (for
197 /// example) to convert ADD and SHL instructions into LEA instructions if they
198 /// would require register copies due to two-addressness.
199 ///
200 /// This method returns a null pointer if the transformation cannot be
201 /// performed, otherwise it returns the new instruction.
202 ///
Evan Chengba59a1e2006-12-01 21:52:58 +0000203 virtual MachineInstr *convertToThreeAddress(MachineFunction::iterator &MFI,
204 MachineBasicBlock::iterator &MBBI,
Owen Andersonf660c172008-07-02 23:41:07 +0000205 LiveVariables *LV) const;
Chris Lattnerbcea4d62005-01-02 02:37:07 +0000206
Chris Lattner41e431b2005-01-19 07:11:01 +0000207 /// commuteInstruction - We have a few instructions that must be hacked on to
208 /// commute them.
209 ///
Evan Cheng58dcb0e2008-06-16 07:33:11 +0000210 virtual MachineInstr *commuteInstruction(MachineInstr *MI, bool NewMI) const;
Chris Lattner41e431b2005-01-19 07:11:01 +0000211
Chris Lattner7fbe9722006-10-20 17:42:20 +0000212 // Branch analysis.
Dale Johannesen318093b2007-06-14 22:03:45 +0000213 virtual bool isUnpredicatedTerminator(const MachineInstr* MI) const;
Chris Lattner7fbe9722006-10-20 17:42:20 +0000214 virtual bool AnalyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB,
215 MachineBasicBlock *&FBB,
Evan Chengdc54d312009-02-09 07:14:22 +0000216 SmallVectorImpl<MachineOperand> &Cond,
217 bool AllowModify) const;
Evan Cheng6ae36262007-05-18 00:18:17 +0000218 virtual unsigned RemoveBranch(MachineBasicBlock &MBB) const;
219 virtual unsigned InsertBranch(MachineBasicBlock &MBB, MachineBasicBlock *TBB,
220 MachineBasicBlock *FBB,
Stuart Hastings3bf91252010-06-17 22:43:56 +0000221 const SmallVectorImpl<MachineOperand> &Cond,
222 DebugLoc DL) const;
Jakob Stoklund Olesen320bdcb2010-07-08 19:46:25 +0000223 virtual void copyPhysReg(MachineBasicBlock &MBB,
224 MachineBasicBlock::iterator MI, DebugLoc DL,
225 unsigned DestReg, unsigned SrcReg,
226 bool KillSrc) const;
Owen Andersonf6372aa2008-01-01 21:11:32 +0000227 virtual void storeRegToStackSlot(MachineBasicBlock &MBB,
228 MachineBasicBlock::iterator MI,
229 unsigned SrcReg, bool isKill, int FrameIndex,
Evan Cheng746ad692010-05-06 19:06:44 +0000230 const TargetRegisterClass *RC,
231 const TargetRegisterInfo *TRI) const;
Owen Andersonf6372aa2008-01-01 21:11:32 +0000232
233 virtual void storeRegToAddr(MachineFunction &MF, unsigned SrcReg, bool isKill,
234 SmallVectorImpl<MachineOperand> &Addr,
235 const TargetRegisterClass *RC,
Dan Gohman91e69c32009-10-09 18:10:05 +0000236 MachineInstr::mmo_iterator MMOBegin,
237 MachineInstr::mmo_iterator MMOEnd,
Owen Andersonf6372aa2008-01-01 21:11:32 +0000238 SmallVectorImpl<MachineInstr*> &NewMIs) const;
239
240 virtual void loadRegFromStackSlot(MachineBasicBlock &MBB,
241 MachineBasicBlock::iterator MI,
242 unsigned DestReg, int FrameIndex,
Evan Cheng746ad692010-05-06 19:06:44 +0000243 const TargetRegisterClass *RC,
244 const TargetRegisterInfo *TRI) const;
Owen Andersonf6372aa2008-01-01 21:11:32 +0000245
246 virtual void loadRegFromAddr(MachineFunction &MF, unsigned DestReg,
247 SmallVectorImpl<MachineOperand> &Addr,
248 const TargetRegisterClass *RC,
Dan Gohman91e69c32009-10-09 18:10:05 +0000249 MachineInstr::mmo_iterator MMOBegin,
250 MachineInstr::mmo_iterator MMOEnd,
Owen Andersonf6372aa2008-01-01 21:11:32 +0000251 SmallVectorImpl<MachineInstr*> &NewMIs) const;
Jakob Stoklund Olesen92fb79b2011-09-29 05:10:54 +0000252
253 virtual bool expandPostRAPseudo(MachineBasicBlock::iterator MI) const;
254
Evan Cheng962021b2010-04-26 07:38:55 +0000255 virtual
256 MachineInstr *emitFrameIndexDebugValue(MachineFunction &MF,
Evan Cheng8601a3d2010-04-29 01:13:30 +0000257 int FrameIx, uint64_t Offset,
Evan Cheng962021b2010-04-26 07:38:55 +0000258 const MDNode *MDPtr,
259 DebugLoc DL) const;
260
Owen Anderson43dbe052008-01-07 01:35:02 +0000261 /// foldMemoryOperand - If this target supports it, fold a load or store of
262 /// the specified stack slot into the specified machine instruction for the
263 /// specified operand(s). If this is possible, the target should perform the
264 /// folding and return true, otherwise it should return false. If it folds
265 /// the instruction, it is likely that the MachineInstruction the iterator
266 /// references has been changed.
Dan Gohmanc54baa22008-12-03 18:43:12 +0000267 virtual MachineInstr* foldMemoryOperandImpl(MachineFunction &MF,
268 MachineInstr* MI,
269 const SmallVectorImpl<unsigned> &Ops,
270 int FrameIndex) const;
Owen Anderson43dbe052008-01-07 01:35:02 +0000271
272 /// foldMemoryOperand - Same as the previous version except it allows folding
273 /// of any load and store from / to any address, not just from a specific
274 /// stack slot.
Dan Gohmanc54baa22008-12-03 18:43:12 +0000275 virtual MachineInstr* foldMemoryOperandImpl(MachineFunction &MF,
276 MachineInstr* MI,
277 const SmallVectorImpl<unsigned> &Ops,
278 MachineInstr* LoadMI) const;
Owen Anderson43dbe052008-01-07 01:35:02 +0000279
280 /// canFoldMemoryOperand - Returns true if the specified load / store is
281 /// folding is possible.
Dan Gohman8e8b8a22008-10-16 01:49:15 +0000282 virtual bool canFoldMemoryOperand(const MachineInstr*,
283 const SmallVectorImpl<unsigned> &) const;
Owen Anderson43dbe052008-01-07 01:35:02 +0000284
285 /// unfoldMemoryOperand - Separate a single instruction which folded a load or
286 /// a store or a load and a store into two or more instruction. If this is
287 /// possible, returns true as well as the new instructions by reference.
288 virtual bool unfoldMemoryOperand(MachineFunction &MF, MachineInstr *MI,
289 unsigned Reg, bool UnfoldLoad, bool UnfoldStore,
290 SmallVectorImpl<MachineInstr*> &NewMIs) const;
291
292 virtual bool unfoldMemoryOperand(SelectionDAG &DAG, SDNode *N,
293 SmallVectorImpl<SDNode*> &NewNodes) const;
294
295 /// getOpcodeAfterMemoryUnfold - Returns the opcode of the would be new
296 /// instruction after load / store are unfolded from an instruction of the
297 /// specified opcode. It returns zero if the specified unfolding is not
Dan Gohman0115e162009-10-30 22:18:41 +0000298 /// possible. If LoadRegIndex is non-null, it is filled in with the operand
299 /// index of the operand which will hold the register holding the loaded
300 /// value.
Owen Anderson43dbe052008-01-07 01:35:02 +0000301 virtual unsigned getOpcodeAfterMemoryUnfold(unsigned Opc,
Dan Gohman0115e162009-10-30 22:18:41 +0000302 bool UnfoldLoad, bool UnfoldStore,
303 unsigned *LoadRegIndex = 0) const;
Andrew Trick8d4a4222011-03-05 06:31:54 +0000304
Evan Cheng96dc1152010-01-22 03:34:51 +0000305 /// areLoadsFromSameBasePtr - This is used by the pre-regalloc scheduler
306 /// to determine if two loads are loading from the same base address. It
307 /// should only return true if the base pointers are the same and the
308 /// only differences between the two addresses are the offset. It also returns
309 /// the offsets by reference.
310 virtual bool areLoadsFromSameBasePtr(SDNode *Load1, SDNode *Load2,
311 int64_t &Offset1, int64_t &Offset2) const;
312
313 /// shouldScheduleLoadsNear - This is a used by the pre-regalloc scheduler to
Chris Lattner7a2bdde2011-04-15 05:18:47 +0000314 /// determine (in conjunction with areLoadsFromSameBasePtr) if two loads should
Evan Cheng96dc1152010-01-22 03:34:51 +0000315 /// be scheduled togther. On some targets if two loads are loading from
316 /// addresses in the same cache line, it's better if they are scheduled
317 /// together. This function takes two integers that represent the load offsets
318 /// from the common base address. It returns true if it decides it's desirable
319 /// to schedule the two loads together. "NumLoads" is the number of loads that
320 /// have already been scheduled after Load1.
321 virtual bool shouldScheduleLoadsNear(SDNode *Load1, SDNode *Load2,
322 int64_t Offset1, int64_t Offset2,
323 unsigned NumLoads) const;
324
Chris Lattneree9eb412010-04-26 23:37:21 +0000325 virtual void getNoopForMachoTarget(MCInst &NopInst) const;
326
Owen Anderson44eb65c2008-08-14 22:49:33 +0000327 virtual
328 bool ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const;
Chris Lattner41e431b2005-01-19 07:11:01 +0000329
Evan Cheng4350eb82009-02-06 17:17:30 +0000330 /// isSafeToMoveRegClassDefs - Return true if it's safe to move a machine
331 /// instruction that defines the specified register class.
332 bool isSafeToMoveRegClassDefs(const TargetRegisterClass *RC) const;
Evan Cheng23066282008-10-27 07:14:50 +0000333
Chris Lattner39a612e2010-02-05 22:10:22 +0000334 static bool isX86_64ExtendedReg(const MachineOperand &MO) {
335 if (!MO.isReg()) return false;
Evan Cheng8c3fee52011-07-25 18:43:53 +0000336 return X86II::isX86_64ExtendedReg(MO.getReg());
Chris Lattner39a612e2010-02-05 22:10:22 +0000337 }
Nicolas Geoffray52e724a2008-04-16 20:10:13 +0000338
Dan Gohman57c3dac2008-09-30 00:58:23 +0000339 /// getGlobalBaseReg - Return a virtual register initialized with the
340 /// the global base register value. Output instructions required to
341 /// initialize the register in the function entry block, if necessary.
Dan Gohman8b746962008-09-23 18:22:58 +0000342 ///
Dan Gohman57c3dac2008-09-30 00:58:23 +0000343 unsigned getGlobalBaseReg(MachineFunction *MF) const;
Dan Gohman8b746962008-09-23 18:22:58 +0000344
Jakob Stoklund Olesen98e933f2011-09-27 22:57:18 +0000345 std::pair<uint16_t, uint16_t>
346 getExecutionDomain(const MachineInstr *MI) const;
Jakob Stoklund Olesene4b94b42010-03-29 23:24:21 +0000347
Jakob Stoklund Olesen98e933f2011-09-27 22:57:18 +0000348 void setExecutionDomain(MachineInstr *MI, unsigned Domain) const;
Jakob Stoklund Olesen352aa502010-03-25 17:25:00 +0000349
Jakob Stoklund Olesenc2ecf3e2011-11-15 01:15:30 +0000350 unsigned getPartialRegUpdateClearance(const MachineInstr *MI, unsigned OpNum,
351 const TargetRegisterInfo *TRI) const;
352 void breakPartialRegDependency(MachineBasicBlock::iterator MI, unsigned OpNum,
353 const TargetRegisterInfo *TRI) const;
354
Chris Lattnerbeac75d2010-09-05 02:18:34 +0000355 MachineInstr* foldMemoryOperandImpl(MachineFunction &MF,
356 MachineInstr* MI,
357 unsigned OpNum,
358 const SmallVectorImpl<MachineOperand> &MOs,
359 unsigned Size, unsigned Alignment) const;
Evan Cheng23128422010-10-19 18:58:51 +0000360
Andrew Tricke0ef5092011-03-05 08:00:22 +0000361 bool isHighLatencyDef(int opc) const;
362
Evan Cheng23128422010-10-19 18:58:51 +0000363 bool hasHighOperandLatency(const InstrItineraryData *ItinData,
364 const MachineRegisterInfo *MRI,
365 const MachineInstr *DefMI, unsigned DefIdx,
366 const MachineInstr *UseMI, unsigned UseIdx) const;
Andrew Trick8d4a4222011-03-05 06:31:54 +0000367
Owen Anderson43dbe052008-01-07 01:35:02 +0000368private:
Evan Cheng656e5142009-12-11 06:01:48 +0000369 MachineInstr * convertToThreeAddressWithLEA(unsigned MIOpc,
370 MachineFunction::iterator &MFI,
371 MachineBasicBlock::iterator &MBBI,
372 LiveVariables *LV) const;
373
David Greeneb87bc952009-11-12 20:55:29 +0000374 /// isFrameOperand - Return true and the FrameIndex if the specified
375 /// operand and follow operands form a reference to the stack frame.
376 bool isFrameOperand(const MachineInstr *MI, unsigned int Op,
377 int &FrameIndex) const;
Chris Lattner72614082002-10-25 22:55:53 +0000378};
379
Brian Gaeked0fde302003-11-11 22:41:34 +0000380} // End llvm namespace
381
Chris Lattner72614082002-10-25 22:55:53 +0000382#endif