Chris Lattner | 1e60a91 | 2003-12-20 01:22:19 +0000 | [diff] [blame] | 1 | //===- X86InstrInfo.h - X86 Instruction Information ------------*- C++ -*- ===// |
Misha Brukman | 0e0a7a45 | 2005-04-21 23:38:14 +0000 | [diff] [blame] | 2 | // |
John Criswell | 856ba76 | 2003-10-21 15:17:13 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | 0e0a7a45 | 2005-04-21 23:38:14 +0000 | [diff] [blame] | 7 | // |
John Criswell | 856ba76 | 2003-10-21 15:17:13 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 9 | // |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 10 | // This file contains the X86 implementation of the TargetInstrInfo class. |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #ifndef X86INSTRUCTIONINFO_H |
| 15 | #define X86INSTRUCTIONINFO_H |
| 16 | |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 17 | #include "llvm/Target/TargetInstrInfo.h" |
Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 18 | #include "X86.h" |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 19 | #include "X86RegisterInfo.h" |
Dan Gohman | d68a076 | 2009-01-05 17:59:02 +0000 | [diff] [blame] | 20 | #include "llvm/ADT/DenseMap.h" |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 21 | |
Evan Cheng | 4db3cff | 2011-07-01 17:57:27 +0000 | [diff] [blame] | 22 | #define GET_INSTRINFO_HEADER |
| 23 | #include "X86GenInstrInfo.inc" |
| 24 | |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 25 | namespace llvm { |
Evan Cheng | 25ab690 | 2006-09-08 06:48:29 +0000 | [diff] [blame] | 26 | class X86RegisterInfo; |
Evan Cheng | aa3c141 | 2006-05-30 21:45:53 +0000 | [diff] [blame] | 27 | class X86TargetMachine; |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 28 | |
Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 29 | namespace X86 { |
| 30 | // X86 specific condition code. These correspond to X86_*_COND in |
| 31 | // X86InstrInfo.td. They must be kept in synch. |
| 32 | enum CondCode { |
| 33 | COND_A = 0, |
| 34 | COND_AE = 1, |
| 35 | COND_B = 2, |
| 36 | COND_BE = 3, |
| 37 | COND_E = 4, |
| 38 | COND_G = 5, |
| 39 | COND_GE = 6, |
| 40 | COND_L = 7, |
| 41 | COND_LE = 8, |
| 42 | COND_NE = 9, |
| 43 | COND_NO = 10, |
| 44 | COND_NP = 11, |
| 45 | COND_NS = 12, |
Dan Gohman | 653456c | 2009-01-07 00:15:08 +0000 | [diff] [blame] | 46 | COND_O = 13, |
| 47 | COND_P = 14, |
| 48 | COND_S = 15, |
Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 49 | |
| 50 | // Artificial condition codes. These are used by AnalyzeBranch |
| 51 | // to indicate a block terminated with two conditional branches to |
| 52 | // the same location. This occurs in code using FCMP_OEQ or FCMP_UNE, |
| 53 | // which can't be represented on x86 with a single condition. These |
| 54 | // are never used in MachineInstrs. |
| 55 | COND_NE_OR_P, |
| 56 | COND_NP_OR_E, |
| 57 | |
Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 58 | COND_INVALID |
| 59 | }; |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 60 | |
Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 61 | // Turn condition code into conditional branch opcode. |
| 62 | unsigned GetCondBranchFromCond(CondCode CC); |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 63 | |
Chris Lattner | 9cd6875 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 64 | /// GetOppositeBranchCondition - Return the inverse of the specified cond, |
| 65 | /// e.g. turning COND_E to COND_NE. |
| 66 | CondCode GetOppositeBranchCondition(X86::CondCode CC); |
Evan Cheng | 8c3fee5 | 2011-07-25 18:43:53 +0000 | [diff] [blame] | 67 | } // end namespace X86; |
Chris Lattner | 9cd6875 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 68 | |
Chris Lattner | 281bada | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 69 | |
Chris Lattner | 3b6b36d | 2009-07-10 06:29:59 +0000 | [diff] [blame] | 70 | /// isGlobalStubReference - Return true if the specified TargetFlag operand is |
Chris Lattner | 281bada | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 71 | /// a reference to a stub for a global, not the global itself. |
Chris Lattner | 3b6b36d | 2009-07-10 06:29:59 +0000 | [diff] [blame] | 72 | inline static bool isGlobalStubReference(unsigned char TargetFlag) { |
| 73 | switch (TargetFlag) { |
Chris Lattner | 281bada | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 74 | case X86II::MO_DLLIMPORT: // dllimport stub. |
| 75 | case X86II::MO_GOTPCREL: // rip-relative GOT reference. |
| 76 | case X86II::MO_GOT: // normal GOT reference. |
| 77 | case X86II::MO_DARWIN_NONLAZY_PIC_BASE: // Normal $non_lazy_ptr ref. |
| 78 | case X86II::MO_DARWIN_NONLAZY: // Normal $non_lazy_ptr ref. |
| 79 | case X86II::MO_DARWIN_HIDDEN_NONLAZY_PIC_BASE: // Hidden $non_lazy_ptr ref. |
Chris Lattner | 281bada | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 80 | return true; |
| 81 | default: |
| 82 | return false; |
| 83 | } |
| 84 | } |
Chris Lattner | 7478ab8 | 2009-07-10 07:33:30 +0000 | [diff] [blame] | 85 | |
| 86 | /// isGlobalRelativeToPICBase - Return true if the specified global value |
| 87 | /// reference is relative to a 32-bit PIC base (X86ISD::GlobalBaseReg). If this |
| 88 | /// is true, the addressing mode has the PIC base register added in (e.g. EBX). |
| 89 | inline static bool isGlobalRelativeToPICBase(unsigned char TargetFlag) { |
| 90 | switch (TargetFlag) { |
| 91 | case X86II::MO_GOTOFF: // isPICStyleGOT: local global. |
| 92 | case X86II::MO_GOT: // isPICStyleGOT: other global. |
| 93 | case X86II::MO_PIC_BASE_OFFSET: // Darwin local global. |
| 94 | case X86II::MO_DARWIN_NONLAZY_PIC_BASE: // Darwin/32 external global. |
| 95 | case X86II::MO_DARWIN_HIDDEN_NONLAZY_PIC_BASE: // Darwin/32 hidden global. |
Eric Christopher | 30ef0e5 | 2010-06-03 04:07:48 +0000 | [diff] [blame] | 96 | case X86II::MO_TLVP: // ??? Pretty sure.. |
Chris Lattner | 7478ab8 | 2009-07-10 07:33:30 +0000 | [diff] [blame] | 97 | return true; |
| 98 | default: |
| 99 | return false; |
| 100 | } |
| 101 | } |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 102 | |
Anton Korobeynikov | 1c4b5ea | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 103 | inline static bool isScale(const MachineOperand &MO) { |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 104 | return MO.isImm() && |
Anton Korobeynikov | 1c4b5ea | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 105 | (MO.getImm() == 1 || MO.getImm() == 2 || |
| 106 | MO.getImm() == 4 || MO.getImm() == 8); |
| 107 | } |
| 108 | |
Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 109 | inline static bool isLeaMem(const MachineInstr *MI, unsigned Op) { |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 110 | if (MI->getOperand(Op).isFI()) return true; |
Anton Korobeynikov | 1c4b5ea | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 111 | return Op+4 <= MI->getNumOperands() && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 112 | MI->getOperand(Op ).isReg() && isScale(MI->getOperand(Op+1)) && |
| 113 | MI->getOperand(Op+2).isReg() && |
| 114 | (MI->getOperand(Op+3).isImm() || |
| 115 | MI->getOperand(Op+3).isGlobal() || |
| 116 | MI->getOperand(Op+3).isCPI() || |
| 117 | MI->getOperand(Op+3).isJTI()); |
Anton Korobeynikov | 1c4b5ea | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 118 | } |
| 119 | |
Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 120 | inline static bool isMem(const MachineInstr *MI, unsigned Op) { |
| 121 | if (MI->getOperand(Op).isFI()) return true; |
| 122 | return Op+5 <= MI->getNumOperands() && |
| 123 | MI->getOperand(Op+4).isReg() && |
| 124 | isLeaMem(MI, Op); |
| 125 | } |
| 126 | |
Evan Cheng | 4db3cff | 2011-07-01 17:57:27 +0000 | [diff] [blame] | 127 | class X86InstrInfo : public X86GenInstrInfo { |
Evan Cheng | aa3c141 | 2006-05-30 21:45:53 +0000 | [diff] [blame] | 128 | X86TargetMachine &TM; |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 129 | const X86RegisterInfo RI; |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 130 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 131 | /// RegOp2MemOpTable2Addr, RegOp2MemOpTable0, RegOp2MemOpTable1, |
| 132 | /// RegOp2MemOpTable2 - Load / store folding opcode maps. |
| 133 | /// |
Chris Lattner | 45a1cb2 | 2010-10-07 23:08:41 +0000 | [diff] [blame] | 134 | DenseMap<unsigned, std::pair<unsigned,unsigned> > RegOp2MemOpTable2Addr; |
| 135 | DenseMap<unsigned, std::pair<unsigned,unsigned> > RegOp2MemOpTable0; |
| 136 | DenseMap<unsigned, std::pair<unsigned,unsigned> > RegOp2MemOpTable1; |
| 137 | DenseMap<unsigned, std::pair<unsigned,unsigned> > RegOp2MemOpTable2; |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 138 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 139 | /// MemOp2RegOpTable - Load / store unfolding opcode map. |
| 140 | /// |
Chris Lattner | 45a1cb2 | 2010-10-07 23:08:41 +0000 | [diff] [blame] | 141 | DenseMap<unsigned, std::pair<unsigned, unsigned> > MemOp2RegOpTable; |
Jakob Stoklund Olesen | 352aa50 | 2010-03-25 17:25:00 +0000 | [diff] [blame] | 142 | |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 143 | public: |
Dan Gohman | 950a4c4 | 2008-03-25 22:06:05 +0000 | [diff] [blame] | 144 | explicit X86InstrInfo(X86TargetMachine &tm); |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 145 | |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 146 | /// getRegisterInfo - TargetInstrInfo is a superset of MRegister info. As |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 147 | /// such, whenever a client has an instance of instruction info, it should |
| 148 | /// always be able to get register info as well (through this method). |
| 149 | /// |
Dan Gohman | c9f5f3f | 2008-05-14 01:58:56 +0000 | [diff] [blame] | 150 | virtual const X86RegisterInfo &getRegisterInfo() const { return RI; } |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 151 | |
Evan Cheng | 7da9ecf | 2010-01-13 00:30:23 +0000 | [diff] [blame] | 152 | /// isCoalescableExtInstr - Return true if the instruction is a "coalescable" |
| 153 | /// extension instruction. That is, it's like a copy where it's legal for the |
| 154 | /// source to overlap the destination. e.g. X86::MOVSX64rr32. If this returns |
| 155 | /// true, then it's expected the pre-extension value is available as a subreg |
| 156 | /// of the result register. This also returns the sub-register index in |
| 157 | /// SubIdx. |
| 158 | virtual bool isCoalescableExtInstr(const MachineInstr &MI, |
| 159 | unsigned &SrcReg, unsigned &DstReg, |
| 160 | unsigned &SubIdx) const; |
Evan Cheng | a5a81d7 | 2010-01-12 00:09:37 +0000 | [diff] [blame] | 161 | |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 162 | unsigned isLoadFromStackSlot(const MachineInstr *MI, int &FrameIndex) const; |
David Greene | dda3978 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 163 | /// isLoadFromStackSlotPostFE - Check for post-frame ptr elimination |
| 164 | /// stack locations as well. This uses a heuristic so it isn't |
| 165 | /// reliable for correctness. |
| 166 | unsigned isLoadFromStackSlotPostFE(const MachineInstr *MI, |
| 167 | int &FrameIndex) const; |
David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 168 | |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 169 | unsigned isStoreToStackSlot(const MachineInstr *MI, int &FrameIndex) const; |
David Greene | dda3978 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 170 | /// isStoreToStackSlotPostFE - Check for post-frame ptr elimination |
| 171 | /// stack locations as well. This uses a heuristic so it isn't |
| 172 | /// reliable for correctness. |
| 173 | unsigned isStoreToStackSlotPostFE(const MachineInstr *MI, |
| 174 | int &FrameIndex) const; |
Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 175 | |
Dan Gohman | 3731bc0 | 2009-10-10 00:34:18 +0000 | [diff] [blame] | 176 | bool isReallyTriviallyReMaterializable(const MachineInstr *MI, |
| 177 | AliasAnalysis *AA) const; |
Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 178 | void reMaterialize(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, |
Evan Cheng | 3784453 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 179 | unsigned DestReg, unsigned SubIdx, |
Evan Cheng | d57cdd5 | 2009-11-14 02:55:43 +0000 | [diff] [blame] | 180 | const MachineInstr *Orig, |
Jakob Stoklund Olesen | 9edf7de | 2010-06-02 22:47:25 +0000 | [diff] [blame] | 181 | const TargetRegisterInfo &TRI) const; |
Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 182 | |
Chris Lattner | bcea4d6 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 183 | /// convertToThreeAddress - This method must be implemented by targets that |
| 184 | /// set the M_CONVERTIBLE_TO_3_ADDR flag. When this flag is set, the target |
| 185 | /// may be able to convert a two-address instruction into a true |
| 186 | /// three-address instruction on demand. This allows the X86 target (for |
| 187 | /// example) to convert ADD and SHL instructions into LEA instructions if they |
| 188 | /// would require register copies due to two-addressness. |
| 189 | /// |
| 190 | /// This method returns a null pointer if the transformation cannot be |
| 191 | /// performed, otherwise it returns the new instruction. |
| 192 | /// |
Evan Cheng | ba59a1e | 2006-12-01 21:52:58 +0000 | [diff] [blame] | 193 | virtual MachineInstr *convertToThreeAddress(MachineFunction::iterator &MFI, |
| 194 | MachineBasicBlock::iterator &MBBI, |
Owen Anderson | f660c17 | 2008-07-02 23:41:07 +0000 | [diff] [blame] | 195 | LiveVariables *LV) const; |
Chris Lattner | bcea4d6 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 196 | |
Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 197 | /// commuteInstruction - We have a few instructions that must be hacked on to |
| 198 | /// commute them. |
| 199 | /// |
Evan Cheng | 58dcb0e | 2008-06-16 07:33:11 +0000 | [diff] [blame] | 200 | virtual MachineInstr *commuteInstruction(MachineInstr *MI, bool NewMI) const; |
Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 201 | |
Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 202 | // Branch analysis. |
Dale Johannesen | 318093b | 2007-06-14 22:03:45 +0000 | [diff] [blame] | 203 | virtual bool isUnpredicatedTerminator(const MachineInstr* MI) const; |
Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 204 | virtual bool AnalyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, |
| 205 | MachineBasicBlock *&FBB, |
Evan Cheng | dc54d31 | 2009-02-09 07:14:22 +0000 | [diff] [blame] | 206 | SmallVectorImpl<MachineOperand> &Cond, |
| 207 | bool AllowModify) const; |
Evan Cheng | 6ae3626 | 2007-05-18 00:18:17 +0000 | [diff] [blame] | 208 | virtual unsigned RemoveBranch(MachineBasicBlock &MBB) const; |
| 209 | virtual unsigned InsertBranch(MachineBasicBlock &MBB, MachineBasicBlock *TBB, |
| 210 | MachineBasicBlock *FBB, |
Stuart Hastings | 3bf9125 | 2010-06-17 22:43:56 +0000 | [diff] [blame] | 211 | const SmallVectorImpl<MachineOperand> &Cond, |
| 212 | DebugLoc DL) const; |
Jakob Stoklund Olesen | 320bdcb | 2010-07-08 19:46:25 +0000 | [diff] [blame] | 213 | virtual void copyPhysReg(MachineBasicBlock &MBB, |
| 214 | MachineBasicBlock::iterator MI, DebugLoc DL, |
| 215 | unsigned DestReg, unsigned SrcReg, |
| 216 | bool KillSrc) const; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 217 | virtual void storeRegToStackSlot(MachineBasicBlock &MBB, |
| 218 | MachineBasicBlock::iterator MI, |
| 219 | unsigned SrcReg, bool isKill, int FrameIndex, |
Evan Cheng | 746ad69 | 2010-05-06 19:06:44 +0000 | [diff] [blame] | 220 | const TargetRegisterClass *RC, |
| 221 | const TargetRegisterInfo *TRI) const; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 222 | |
| 223 | virtual void storeRegToAddr(MachineFunction &MF, unsigned SrcReg, bool isKill, |
| 224 | SmallVectorImpl<MachineOperand> &Addr, |
| 225 | const TargetRegisterClass *RC, |
Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 226 | MachineInstr::mmo_iterator MMOBegin, |
| 227 | MachineInstr::mmo_iterator MMOEnd, |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 228 | SmallVectorImpl<MachineInstr*> &NewMIs) const; |
| 229 | |
| 230 | virtual void loadRegFromStackSlot(MachineBasicBlock &MBB, |
| 231 | MachineBasicBlock::iterator MI, |
| 232 | unsigned DestReg, int FrameIndex, |
Evan Cheng | 746ad69 | 2010-05-06 19:06:44 +0000 | [diff] [blame] | 233 | const TargetRegisterClass *RC, |
| 234 | const TargetRegisterInfo *TRI) const; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 235 | |
| 236 | virtual void loadRegFromAddr(MachineFunction &MF, unsigned DestReg, |
| 237 | SmallVectorImpl<MachineOperand> &Addr, |
| 238 | const TargetRegisterClass *RC, |
Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 239 | MachineInstr::mmo_iterator MMOBegin, |
| 240 | MachineInstr::mmo_iterator MMOEnd, |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 241 | SmallVectorImpl<MachineInstr*> &NewMIs) const; |
Evan Cheng | 962021b | 2010-04-26 07:38:55 +0000 | [diff] [blame] | 242 | virtual |
| 243 | MachineInstr *emitFrameIndexDebugValue(MachineFunction &MF, |
Evan Cheng | 8601a3d | 2010-04-29 01:13:30 +0000 | [diff] [blame] | 244 | int FrameIx, uint64_t Offset, |
Evan Cheng | 962021b | 2010-04-26 07:38:55 +0000 | [diff] [blame] | 245 | const MDNode *MDPtr, |
| 246 | DebugLoc DL) const; |
| 247 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 248 | /// foldMemoryOperand - If this target supports it, fold a load or store of |
| 249 | /// the specified stack slot into the specified machine instruction for the |
| 250 | /// specified operand(s). If this is possible, the target should perform the |
| 251 | /// folding and return true, otherwise it should return false. If it folds |
| 252 | /// the instruction, it is likely that the MachineInstruction the iterator |
| 253 | /// references has been changed. |
Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 254 | virtual MachineInstr* foldMemoryOperandImpl(MachineFunction &MF, |
| 255 | MachineInstr* MI, |
| 256 | const SmallVectorImpl<unsigned> &Ops, |
| 257 | int FrameIndex) const; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 258 | |
| 259 | /// foldMemoryOperand - Same as the previous version except it allows folding |
| 260 | /// of any load and store from / to any address, not just from a specific |
| 261 | /// stack slot. |
Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 262 | virtual MachineInstr* foldMemoryOperandImpl(MachineFunction &MF, |
| 263 | MachineInstr* MI, |
| 264 | const SmallVectorImpl<unsigned> &Ops, |
| 265 | MachineInstr* LoadMI) const; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 266 | |
| 267 | /// canFoldMemoryOperand - Returns true if the specified load / store is |
| 268 | /// folding is possible. |
Dan Gohman | 8e8b8a2 | 2008-10-16 01:49:15 +0000 | [diff] [blame] | 269 | virtual bool canFoldMemoryOperand(const MachineInstr*, |
| 270 | const SmallVectorImpl<unsigned> &) const; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 271 | |
| 272 | /// unfoldMemoryOperand - Separate a single instruction which folded a load or |
| 273 | /// a store or a load and a store into two or more instruction. If this is |
| 274 | /// possible, returns true as well as the new instructions by reference. |
| 275 | virtual bool unfoldMemoryOperand(MachineFunction &MF, MachineInstr *MI, |
| 276 | unsigned Reg, bool UnfoldLoad, bool UnfoldStore, |
| 277 | SmallVectorImpl<MachineInstr*> &NewMIs) const; |
| 278 | |
| 279 | virtual bool unfoldMemoryOperand(SelectionDAG &DAG, SDNode *N, |
| 280 | SmallVectorImpl<SDNode*> &NewNodes) const; |
| 281 | |
| 282 | /// getOpcodeAfterMemoryUnfold - Returns the opcode of the would be new |
| 283 | /// instruction after load / store are unfolded from an instruction of the |
| 284 | /// specified opcode. It returns zero if the specified unfolding is not |
Dan Gohman | 0115e16 | 2009-10-30 22:18:41 +0000 | [diff] [blame] | 285 | /// possible. If LoadRegIndex is non-null, it is filled in with the operand |
| 286 | /// index of the operand which will hold the register holding the loaded |
| 287 | /// value. |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 288 | virtual unsigned getOpcodeAfterMemoryUnfold(unsigned Opc, |
Dan Gohman | 0115e16 | 2009-10-30 22:18:41 +0000 | [diff] [blame] | 289 | bool UnfoldLoad, bool UnfoldStore, |
| 290 | unsigned *LoadRegIndex = 0) const; |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 291 | |
Evan Cheng | 96dc115 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 292 | /// areLoadsFromSameBasePtr - This is used by the pre-regalloc scheduler |
| 293 | /// to determine if two loads are loading from the same base address. It |
| 294 | /// should only return true if the base pointers are the same and the |
| 295 | /// only differences between the two addresses are the offset. It also returns |
| 296 | /// the offsets by reference. |
| 297 | virtual bool areLoadsFromSameBasePtr(SDNode *Load1, SDNode *Load2, |
| 298 | int64_t &Offset1, int64_t &Offset2) const; |
| 299 | |
| 300 | /// shouldScheduleLoadsNear - This is a used by the pre-regalloc scheduler to |
Chris Lattner | 7a2bdde | 2011-04-15 05:18:47 +0000 | [diff] [blame] | 301 | /// determine (in conjunction with areLoadsFromSameBasePtr) if two loads should |
Evan Cheng | 96dc115 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 302 | /// be scheduled togther. On some targets if two loads are loading from |
| 303 | /// addresses in the same cache line, it's better if they are scheduled |
| 304 | /// together. This function takes two integers that represent the load offsets |
| 305 | /// from the common base address. It returns true if it decides it's desirable |
| 306 | /// to schedule the two loads together. "NumLoads" is the number of loads that |
| 307 | /// have already been scheduled after Load1. |
| 308 | virtual bool shouldScheduleLoadsNear(SDNode *Load1, SDNode *Load2, |
| 309 | int64_t Offset1, int64_t Offset2, |
| 310 | unsigned NumLoads) const; |
| 311 | |
Chris Lattner | ee9eb41 | 2010-04-26 23:37:21 +0000 | [diff] [blame] | 312 | virtual void getNoopForMachoTarget(MCInst &NopInst) const; |
| 313 | |
Owen Anderson | 44eb65c | 2008-08-14 22:49:33 +0000 | [diff] [blame] | 314 | virtual |
| 315 | bool ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const; |
Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 316 | |
Evan Cheng | 4350eb8 | 2009-02-06 17:17:30 +0000 | [diff] [blame] | 317 | /// isSafeToMoveRegClassDefs - Return true if it's safe to move a machine |
| 318 | /// instruction that defines the specified register class. |
| 319 | bool isSafeToMoveRegClassDefs(const TargetRegisterClass *RC) const; |
Evan Cheng | 2306628 | 2008-10-27 07:14:50 +0000 | [diff] [blame] | 320 | |
Chris Lattner | 39a612e | 2010-02-05 22:10:22 +0000 | [diff] [blame] | 321 | static bool isX86_64ExtendedReg(const MachineOperand &MO) { |
| 322 | if (!MO.isReg()) return false; |
Evan Cheng | 8c3fee5 | 2011-07-25 18:43:53 +0000 | [diff] [blame] | 323 | return X86II::isX86_64ExtendedReg(MO.getReg()); |
Chris Lattner | 39a612e | 2010-02-05 22:10:22 +0000 | [diff] [blame] | 324 | } |
Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 325 | |
Dan Gohman | 57c3dac | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 326 | /// getGlobalBaseReg - Return a virtual register initialized with the |
| 327 | /// the global base register value. Output instructions required to |
| 328 | /// initialize the register in the function entry block, if necessary. |
Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 329 | /// |
Dan Gohman | 57c3dac | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 330 | unsigned getGlobalBaseReg(MachineFunction *MF) const; |
Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 331 | |
Jakob Stoklund Olesen | e4b94b4 | 2010-03-29 23:24:21 +0000 | [diff] [blame] | 332 | /// GetSSEDomain - Return the SSE execution domain of MI as the first element, |
| 333 | /// and a bitmask of possible arguments to SetSSEDomain ase the second. |
| 334 | std::pair<uint16_t, uint16_t> GetSSEDomain(const MachineInstr *MI) const; |
| 335 | |
| 336 | /// SetSSEDomain - Set the SSEDomain of MI. |
| 337 | void SetSSEDomain(MachineInstr *MI, unsigned Domain) const; |
Jakob Stoklund Olesen | 352aa50 | 2010-03-25 17:25:00 +0000 | [diff] [blame] | 338 | |
Chris Lattner | beac75d | 2010-09-05 02:18:34 +0000 | [diff] [blame] | 339 | MachineInstr* foldMemoryOperandImpl(MachineFunction &MF, |
| 340 | MachineInstr* MI, |
| 341 | unsigned OpNum, |
| 342 | const SmallVectorImpl<MachineOperand> &MOs, |
| 343 | unsigned Size, unsigned Alignment) const; |
Evan Cheng | 2312842 | 2010-10-19 18:58:51 +0000 | [diff] [blame] | 344 | |
Andrew Trick | e0ef509 | 2011-03-05 08:00:22 +0000 | [diff] [blame] | 345 | bool isHighLatencyDef(int opc) const; |
| 346 | |
Evan Cheng | 2312842 | 2010-10-19 18:58:51 +0000 | [diff] [blame] | 347 | bool hasHighOperandLatency(const InstrItineraryData *ItinData, |
| 348 | const MachineRegisterInfo *MRI, |
| 349 | const MachineInstr *DefMI, unsigned DefIdx, |
| 350 | const MachineInstr *UseMI, unsigned UseIdx) const; |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 351 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 352 | private: |
Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 353 | MachineInstr * convertToThreeAddressWithLEA(unsigned MIOpc, |
| 354 | MachineFunction::iterator &MFI, |
| 355 | MachineBasicBlock::iterator &MBBI, |
| 356 | LiveVariables *LV) const; |
| 357 | |
David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 358 | /// isFrameOperand - Return true and the FrameIndex if the specified |
| 359 | /// operand and follow operands form a reference to the stack frame. |
| 360 | bool isFrameOperand(const MachineInstr *MI, unsigned int Op, |
| 361 | int &FrameIndex) const; |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 362 | }; |
| 363 | |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 364 | } // End llvm namespace |
| 365 | |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 366 | #endif |