Chris Lattner | 1e60a91 | 2003-12-20 01:22:19 +0000 | [diff] [blame] | 1 | //===- X86InstrInfo.h - X86 Instruction Information ------------*- C++ -*- ===// |
Misha Brukman | 0e0a7a45 | 2005-04-21 23:38:14 +0000 | [diff] [blame] | 2 | // |
John Criswell | 856ba76 | 2003-10-21 15:17:13 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | 0e0a7a45 | 2005-04-21 23:38:14 +0000 | [diff] [blame] | 7 | // |
John Criswell | 856ba76 | 2003-10-21 15:17:13 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 9 | // |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 10 | // This file contains the X86 implementation of the TargetInstrInfo class. |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #ifndef X86INSTRUCTIONINFO_H |
| 15 | #define X86INSTRUCTIONINFO_H |
| 16 | |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 17 | #include "llvm/Target/TargetInstrInfo.h" |
Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 18 | #include "X86.h" |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 19 | #include "X86RegisterInfo.h" |
Dan Gohman | d68a076 | 2009-01-05 17:59:02 +0000 | [diff] [blame] | 20 | #include "llvm/ADT/DenseMap.h" |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 21 | |
Evan Cheng | 4db3cff | 2011-07-01 17:57:27 +0000 | [diff] [blame] | 22 | #define GET_INSTRINFO_HEADER |
| 23 | #include "X86GenInstrInfo.inc" |
| 24 | |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 25 | namespace llvm { |
Evan Cheng | 25ab690 | 2006-09-08 06:48:29 +0000 | [diff] [blame] | 26 | class X86RegisterInfo; |
Evan Cheng | aa3c141 | 2006-05-30 21:45:53 +0000 | [diff] [blame] | 27 | class X86TargetMachine; |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 28 | |
Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 29 | namespace X86 { |
| 30 | // X86 specific condition code. These correspond to X86_*_COND in |
| 31 | // X86InstrInfo.td. They must be kept in synch. |
| 32 | enum CondCode { |
| 33 | COND_A = 0, |
| 34 | COND_AE = 1, |
| 35 | COND_B = 2, |
| 36 | COND_BE = 3, |
| 37 | COND_E = 4, |
| 38 | COND_G = 5, |
| 39 | COND_GE = 6, |
| 40 | COND_L = 7, |
| 41 | COND_LE = 8, |
| 42 | COND_NE = 9, |
| 43 | COND_NO = 10, |
| 44 | COND_NP = 11, |
| 45 | COND_NS = 12, |
Dan Gohman | 653456c | 2009-01-07 00:15:08 +0000 | [diff] [blame] | 46 | COND_O = 13, |
| 47 | COND_P = 14, |
| 48 | COND_S = 15, |
Dan Gohman | 279c22e | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 49 | |
| 50 | // Artificial condition codes. These are used by AnalyzeBranch |
| 51 | // to indicate a block terminated with two conditional branches to |
| 52 | // the same location. This occurs in code using FCMP_OEQ or FCMP_UNE, |
| 53 | // which can't be represented on x86 with a single condition. These |
| 54 | // are never used in MachineInstrs. |
| 55 | COND_NE_OR_P, |
| 56 | COND_NP_OR_E, |
| 57 | |
Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 58 | COND_INVALID |
| 59 | }; |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 60 | |
Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 61 | // Turn condition code into conditional branch opcode. |
| 62 | unsigned GetCondBranchFromCond(CondCode CC); |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 63 | |
Chris Lattner | 9cd6875 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 64 | /// GetOppositeBranchCondition - Return the inverse of the specified cond, |
| 65 | /// e.g. turning COND_E to COND_NE. |
| 66 | CondCode GetOppositeBranchCondition(X86::CondCode CC); |
Evan Cheng | 8c3fee5 | 2011-07-25 18:43:53 +0000 | [diff] [blame] | 67 | } // end namespace X86; |
Chris Lattner | 9cd6875 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 68 | |
Chris Lattner | 281bada | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 69 | |
Chris Lattner | 3b6b36d | 2009-07-10 06:29:59 +0000 | [diff] [blame] | 70 | /// isGlobalStubReference - Return true if the specified TargetFlag operand is |
Chris Lattner | 281bada | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 71 | /// a reference to a stub for a global, not the global itself. |
Chris Lattner | 3b6b36d | 2009-07-10 06:29:59 +0000 | [diff] [blame] | 72 | inline static bool isGlobalStubReference(unsigned char TargetFlag) { |
| 73 | switch (TargetFlag) { |
Chris Lattner | 281bada | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 74 | case X86II::MO_DLLIMPORT: // dllimport stub. |
| 75 | case X86II::MO_GOTPCREL: // rip-relative GOT reference. |
| 76 | case X86II::MO_GOT: // normal GOT reference. |
| 77 | case X86II::MO_DARWIN_NONLAZY_PIC_BASE: // Normal $non_lazy_ptr ref. |
| 78 | case X86II::MO_DARWIN_NONLAZY: // Normal $non_lazy_ptr ref. |
| 79 | case X86II::MO_DARWIN_HIDDEN_NONLAZY_PIC_BASE: // Hidden $non_lazy_ptr ref. |
Chris Lattner | 281bada | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 80 | return true; |
| 81 | default: |
| 82 | return false; |
| 83 | } |
| 84 | } |
Chris Lattner | 7478ab8 | 2009-07-10 07:33:30 +0000 | [diff] [blame] | 85 | |
| 86 | /// isGlobalRelativeToPICBase - Return true if the specified global value |
| 87 | /// reference is relative to a 32-bit PIC base (X86ISD::GlobalBaseReg). If this |
| 88 | /// is true, the addressing mode has the PIC base register added in (e.g. EBX). |
| 89 | inline static bool isGlobalRelativeToPICBase(unsigned char TargetFlag) { |
| 90 | switch (TargetFlag) { |
| 91 | case X86II::MO_GOTOFF: // isPICStyleGOT: local global. |
| 92 | case X86II::MO_GOT: // isPICStyleGOT: other global. |
| 93 | case X86II::MO_PIC_BASE_OFFSET: // Darwin local global. |
| 94 | case X86II::MO_DARWIN_NONLAZY_PIC_BASE: // Darwin/32 external global. |
| 95 | case X86II::MO_DARWIN_HIDDEN_NONLAZY_PIC_BASE: // Darwin/32 hidden global. |
Eric Christopher | 30ef0e5 | 2010-06-03 04:07:48 +0000 | [diff] [blame] | 96 | case X86II::MO_TLVP: // ??? Pretty sure.. |
Chris Lattner | 7478ab8 | 2009-07-10 07:33:30 +0000 | [diff] [blame] | 97 | return true; |
| 98 | default: |
| 99 | return false; |
| 100 | } |
| 101 | } |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 102 | |
Anton Korobeynikov | 1c4b5ea | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 103 | inline static bool isScale(const MachineOperand &MO) { |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 104 | return MO.isImm() && |
Anton Korobeynikov | 1c4b5ea | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 105 | (MO.getImm() == 1 || MO.getImm() == 2 || |
| 106 | MO.getImm() == 4 || MO.getImm() == 8); |
| 107 | } |
| 108 | |
Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 109 | inline static bool isLeaMem(const MachineInstr *MI, unsigned Op) { |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 110 | if (MI->getOperand(Op).isFI()) return true; |
Anton Korobeynikov | 1c4b5ea | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 111 | return Op+4 <= MI->getNumOperands() && |
Dan Gohman | d735b80 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 112 | MI->getOperand(Op ).isReg() && isScale(MI->getOperand(Op+1)) && |
| 113 | MI->getOperand(Op+2).isReg() && |
| 114 | (MI->getOperand(Op+3).isImm() || |
| 115 | MI->getOperand(Op+3).isGlobal() || |
| 116 | MI->getOperand(Op+3).isCPI() || |
| 117 | MI->getOperand(Op+3).isJTI()); |
Anton Korobeynikov | 1c4b5ea | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 118 | } |
| 119 | |
Rafael Espindola | 094fad3 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 120 | inline static bool isMem(const MachineInstr *MI, unsigned Op) { |
| 121 | if (MI->getOperand(Op).isFI()) return true; |
| 122 | return Op+5 <= MI->getNumOperands() && |
| 123 | MI->getOperand(Op+4).isReg() && |
| 124 | isLeaMem(MI, Op); |
| 125 | } |
| 126 | |
Evan Cheng | 4db3cff | 2011-07-01 17:57:27 +0000 | [diff] [blame] | 127 | class X86InstrInfo : public X86GenInstrInfo { |
Evan Cheng | aa3c141 | 2006-05-30 21:45:53 +0000 | [diff] [blame] | 128 | X86TargetMachine &TM; |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 129 | const X86RegisterInfo RI; |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 130 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 131 | /// RegOp2MemOpTable2Addr, RegOp2MemOpTable0, RegOp2MemOpTable1, |
| 132 | /// RegOp2MemOpTable2 - Load / store folding opcode maps. |
| 133 | /// |
Bruno Cardoso Lopes | cbf479d | 2011-09-08 18:35:57 +0000 | [diff] [blame] | 134 | typedef DenseMap<unsigned, |
| 135 | std::pair<unsigned, unsigned> > RegOp2MemOpTableType; |
| 136 | RegOp2MemOpTableType RegOp2MemOpTable2Addr; |
| 137 | RegOp2MemOpTableType RegOp2MemOpTable0; |
| 138 | RegOp2MemOpTableType RegOp2MemOpTable1; |
| 139 | RegOp2MemOpTableType RegOp2MemOpTable2; |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 140 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 141 | /// MemOp2RegOpTable - Load / store unfolding opcode map. |
| 142 | /// |
Bruno Cardoso Lopes | cbf479d | 2011-09-08 18:35:57 +0000 | [diff] [blame] | 143 | typedef DenseMap<unsigned, |
| 144 | std::pair<unsigned, unsigned> > MemOp2RegOpTableType; |
| 145 | MemOp2RegOpTableType MemOp2RegOpTable; |
| 146 | |
| 147 | void AddTableEntry(RegOp2MemOpTableType &R2MTable, |
| 148 | MemOp2RegOpTableType &M2RTable, |
| 149 | unsigned RegOp, unsigned MemOp, unsigned Flags); |
Jakob Stoklund Olesen | 352aa50 | 2010-03-25 17:25:00 +0000 | [diff] [blame] | 150 | |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 151 | public: |
Dan Gohman | 950a4c4 | 2008-03-25 22:06:05 +0000 | [diff] [blame] | 152 | explicit X86InstrInfo(X86TargetMachine &tm); |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 153 | |
Chris Lattner | 3501fea | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 154 | /// getRegisterInfo - TargetInstrInfo is a superset of MRegister info. As |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 155 | /// such, whenever a client has an instance of instruction info, it should |
| 156 | /// always be able to get register info as well (through this method). |
| 157 | /// |
Dan Gohman | c9f5f3f | 2008-05-14 01:58:56 +0000 | [diff] [blame] | 158 | virtual const X86RegisterInfo &getRegisterInfo() const { return RI; } |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 159 | |
Evan Cheng | 7da9ecf | 2010-01-13 00:30:23 +0000 | [diff] [blame] | 160 | /// isCoalescableExtInstr - Return true if the instruction is a "coalescable" |
| 161 | /// extension instruction. That is, it's like a copy where it's legal for the |
| 162 | /// source to overlap the destination. e.g. X86::MOVSX64rr32. If this returns |
| 163 | /// true, then it's expected the pre-extension value is available as a subreg |
| 164 | /// of the result register. This also returns the sub-register index in |
| 165 | /// SubIdx. |
| 166 | virtual bool isCoalescableExtInstr(const MachineInstr &MI, |
| 167 | unsigned &SrcReg, unsigned &DstReg, |
| 168 | unsigned &SubIdx) const; |
Evan Cheng | a5a81d7 | 2010-01-12 00:09:37 +0000 | [diff] [blame] | 169 | |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 170 | unsigned isLoadFromStackSlot(const MachineInstr *MI, int &FrameIndex) const; |
David Greene | dda3978 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 171 | /// isLoadFromStackSlotPostFE - Check for post-frame ptr elimination |
| 172 | /// stack locations as well. This uses a heuristic so it isn't |
| 173 | /// reliable for correctness. |
| 174 | unsigned isLoadFromStackSlotPostFE(const MachineInstr *MI, |
| 175 | int &FrameIndex) const; |
David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 176 | |
Dan Gohman | cbad42c | 2008-11-18 19:49:32 +0000 | [diff] [blame] | 177 | unsigned isStoreToStackSlot(const MachineInstr *MI, int &FrameIndex) const; |
David Greene | dda3978 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 178 | /// isStoreToStackSlotPostFE - Check for post-frame ptr elimination |
| 179 | /// stack locations as well. This uses a heuristic so it isn't |
| 180 | /// reliable for correctness. |
| 181 | unsigned isStoreToStackSlotPostFE(const MachineInstr *MI, |
| 182 | int &FrameIndex) const; |
Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 183 | |
Dan Gohman | 3731bc0 | 2009-10-10 00:34:18 +0000 | [diff] [blame] | 184 | bool isReallyTriviallyReMaterializable(const MachineInstr *MI, |
| 185 | AliasAnalysis *AA) const; |
Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 186 | void reMaterialize(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, |
Evan Cheng | 3784453 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 187 | unsigned DestReg, unsigned SubIdx, |
Evan Cheng | d57cdd5 | 2009-11-14 02:55:43 +0000 | [diff] [blame] | 188 | const MachineInstr *Orig, |
Jakob Stoklund Olesen | 9edf7de | 2010-06-02 22:47:25 +0000 | [diff] [blame] | 189 | const TargetRegisterInfo &TRI) const; |
Evan Cheng | ca1267c | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 190 | |
Chris Lattner | bcea4d6 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 191 | /// convertToThreeAddress - This method must be implemented by targets that |
| 192 | /// set the M_CONVERTIBLE_TO_3_ADDR flag. When this flag is set, the target |
| 193 | /// may be able to convert a two-address instruction into a true |
| 194 | /// three-address instruction on demand. This allows the X86 target (for |
| 195 | /// example) to convert ADD and SHL instructions into LEA instructions if they |
| 196 | /// would require register copies due to two-addressness. |
| 197 | /// |
| 198 | /// This method returns a null pointer if the transformation cannot be |
| 199 | /// performed, otherwise it returns the new instruction. |
| 200 | /// |
Evan Cheng | ba59a1e | 2006-12-01 21:52:58 +0000 | [diff] [blame] | 201 | virtual MachineInstr *convertToThreeAddress(MachineFunction::iterator &MFI, |
| 202 | MachineBasicBlock::iterator &MBBI, |
Owen Anderson | f660c17 | 2008-07-02 23:41:07 +0000 | [diff] [blame] | 203 | LiveVariables *LV) const; |
Chris Lattner | bcea4d6 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 204 | |
Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 205 | /// commuteInstruction - We have a few instructions that must be hacked on to |
| 206 | /// commute them. |
| 207 | /// |
Evan Cheng | 58dcb0e | 2008-06-16 07:33:11 +0000 | [diff] [blame] | 208 | virtual MachineInstr *commuteInstruction(MachineInstr *MI, bool NewMI) const; |
Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 209 | |
Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 210 | // Branch analysis. |
Dale Johannesen | 318093b | 2007-06-14 22:03:45 +0000 | [diff] [blame] | 211 | virtual bool isUnpredicatedTerminator(const MachineInstr* MI) const; |
Chris Lattner | 7fbe972 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 212 | virtual bool AnalyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, |
| 213 | MachineBasicBlock *&FBB, |
Evan Cheng | dc54d31 | 2009-02-09 07:14:22 +0000 | [diff] [blame] | 214 | SmallVectorImpl<MachineOperand> &Cond, |
| 215 | bool AllowModify) const; |
Evan Cheng | 6ae3626 | 2007-05-18 00:18:17 +0000 | [diff] [blame] | 216 | virtual unsigned RemoveBranch(MachineBasicBlock &MBB) const; |
| 217 | virtual unsigned InsertBranch(MachineBasicBlock &MBB, MachineBasicBlock *TBB, |
| 218 | MachineBasicBlock *FBB, |
Stuart Hastings | 3bf9125 | 2010-06-17 22:43:56 +0000 | [diff] [blame] | 219 | const SmallVectorImpl<MachineOperand> &Cond, |
| 220 | DebugLoc DL) const; |
Jakob Stoklund Olesen | 320bdcb | 2010-07-08 19:46:25 +0000 | [diff] [blame] | 221 | virtual void copyPhysReg(MachineBasicBlock &MBB, |
| 222 | MachineBasicBlock::iterator MI, DebugLoc DL, |
| 223 | unsigned DestReg, unsigned SrcReg, |
| 224 | bool KillSrc) const; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 225 | virtual void storeRegToStackSlot(MachineBasicBlock &MBB, |
| 226 | MachineBasicBlock::iterator MI, |
| 227 | unsigned SrcReg, bool isKill, int FrameIndex, |
Evan Cheng | 746ad69 | 2010-05-06 19:06:44 +0000 | [diff] [blame] | 228 | const TargetRegisterClass *RC, |
| 229 | const TargetRegisterInfo *TRI) const; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 230 | |
| 231 | virtual void storeRegToAddr(MachineFunction &MF, unsigned SrcReg, bool isKill, |
| 232 | SmallVectorImpl<MachineOperand> &Addr, |
| 233 | const TargetRegisterClass *RC, |
Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 234 | MachineInstr::mmo_iterator MMOBegin, |
| 235 | MachineInstr::mmo_iterator MMOEnd, |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 236 | SmallVectorImpl<MachineInstr*> &NewMIs) const; |
| 237 | |
| 238 | virtual void loadRegFromStackSlot(MachineBasicBlock &MBB, |
| 239 | MachineBasicBlock::iterator MI, |
| 240 | unsigned DestReg, int FrameIndex, |
Evan Cheng | 746ad69 | 2010-05-06 19:06:44 +0000 | [diff] [blame] | 241 | const TargetRegisterClass *RC, |
| 242 | const TargetRegisterInfo *TRI) const; |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 243 | |
| 244 | virtual void loadRegFromAddr(MachineFunction &MF, unsigned DestReg, |
| 245 | SmallVectorImpl<MachineOperand> &Addr, |
| 246 | const TargetRegisterClass *RC, |
Dan Gohman | 91e69c3 | 2009-10-09 18:10:05 +0000 | [diff] [blame] | 247 | MachineInstr::mmo_iterator MMOBegin, |
| 248 | MachineInstr::mmo_iterator MMOEnd, |
Owen Anderson | f6372aa | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 249 | SmallVectorImpl<MachineInstr*> &NewMIs) const; |
Jakob Stoklund Olesen | 92fb79b | 2011-09-29 05:10:54 +0000 | [diff] [blame^] | 250 | |
| 251 | virtual bool expandPostRAPseudo(MachineBasicBlock::iterator MI) const; |
| 252 | |
Evan Cheng | 962021b | 2010-04-26 07:38:55 +0000 | [diff] [blame] | 253 | virtual |
| 254 | MachineInstr *emitFrameIndexDebugValue(MachineFunction &MF, |
Evan Cheng | 8601a3d | 2010-04-29 01:13:30 +0000 | [diff] [blame] | 255 | int FrameIx, uint64_t Offset, |
Evan Cheng | 962021b | 2010-04-26 07:38:55 +0000 | [diff] [blame] | 256 | const MDNode *MDPtr, |
| 257 | DebugLoc DL) const; |
| 258 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 259 | /// foldMemoryOperand - If this target supports it, fold a load or store of |
| 260 | /// the specified stack slot into the specified machine instruction for the |
| 261 | /// specified operand(s). If this is possible, the target should perform the |
| 262 | /// folding and return true, otherwise it should return false. If it folds |
| 263 | /// the instruction, it is likely that the MachineInstruction the iterator |
| 264 | /// references has been changed. |
Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 265 | virtual MachineInstr* foldMemoryOperandImpl(MachineFunction &MF, |
| 266 | MachineInstr* MI, |
| 267 | const SmallVectorImpl<unsigned> &Ops, |
| 268 | int FrameIndex) const; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 269 | |
| 270 | /// foldMemoryOperand - Same as the previous version except it allows folding |
| 271 | /// of any load and store from / to any address, not just from a specific |
| 272 | /// stack slot. |
Dan Gohman | c54baa2 | 2008-12-03 18:43:12 +0000 | [diff] [blame] | 273 | virtual MachineInstr* foldMemoryOperandImpl(MachineFunction &MF, |
| 274 | MachineInstr* MI, |
| 275 | const SmallVectorImpl<unsigned> &Ops, |
| 276 | MachineInstr* LoadMI) const; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 277 | |
| 278 | /// canFoldMemoryOperand - Returns true if the specified load / store is |
| 279 | /// folding is possible. |
Dan Gohman | 8e8b8a2 | 2008-10-16 01:49:15 +0000 | [diff] [blame] | 280 | virtual bool canFoldMemoryOperand(const MachineInstr*, |
| 281 | const SmallVectorImpl<unsigned> &) const; |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 282 | |
| 283 | /// unfoldMemoryOperand - Separate a single instruction which folded a load or |
| 284 | /// a store or a load and a store into two or more instruction. If this is |
| 285 | /// possible, returns true as well as the new instructions by reference. |
| 286 | virtual bool unfoldMemoryOperand(MachineFunction &MF, MachineInstr *MI, |
| 287 | unsigned Reg, bool UnfoldLoad, bool UnfoldStore, |
| 288 | SmallVectorImpl<MachineInstr*> &NewMIs) const; |
| 289 | |
| 290 | virtual bool unfoldMemoryOperand(SelectionDAG &DAG, SDNode *N, |
| 291 | SmallVectorImpl<SDNode*> &NewNodes) const; |
| 292 | |
| 293 | /// getOpcodeAfterMemoryUnfold - Returns the opcode of the would be new |
| 294 | /// instruction after load / store are unfolded from an instruction of the |
| 295 | /// specified opcode. It returns zero if the specified unfolding is not |
Dan Gohman | 0115e16 | 2009-10-30 22:18:41 +0000 | [diff] [blame] | 296 | /// possible. If LoadRegIndex is non-null, it is filled in with the operand |
| 297 | /// index of the operand which will hold the register holding the loaded |
| 298 | /// value. |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 299 | virtual unsigned getOpcodeAfterMemoryUnfold(unsigned Opc, |
Dan Gohman | 0115e16 | 2009-10-30 22:18:41 +0000 | [diff] [blame] | 300 | bool UnfoldLoad, bool UnfoldStore, |
| 301 | unsigned *LoadRegIndex = 0) const; |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 302 | |
Evan Cheng | 96dc115 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 303 | /// areLoadsFromSameBasePtr - This is used by the pre-regalloc scheduler |
| 304 | /// to determine if two loads are loading from the same base address. It |
| 305 | /// should only return true if the base pointers are the same and the |
| 306 | /// only differences between the two addresses are the offset. It also returns |
| 307 | /// the offsets by reference. |
| 308 | virtual bool areLoadsFromSameBasePtr(SDNode *Load1, SDNode *Load2, |
| 309 | int64_t &Offset1, int64_t &Offset2) const; |
| 310 | |
| 311 | /// shouldScheduleLoadsNear - This is a used by the pre-regalloc scheduler to |
Chris Lattner | 7a2bdde | 2011-04-15 05:18:47 +0000 | [diff] [blame] | 312 | /// determine (in conjunction with areLoadsFromSameBasePtr) if two loads should |
Evan Cheng | 96dc115 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 313 | /// be scheduled togther. On some targets if two loads are loading from |
| 314 | /// addresses in the same cache line, it's better if they are scheduled |
| 315 | /// together. This function takes two integers that represent the load offsets |
| 316 | /// from the common base address. It returns true if it decides it's desirable |
| 317 | /// to schedule the two loads together. "NumLoads" is the number of loads that |
| 318 | /// have already been scheduled after Load1. |
| 319 | virtual bool shouldScheduleLoadsNear(SDNode *Load1, SDNode *Load2, |
| 320 | int64_t Offset1, int64_t Offset2, |
| 321 | unsigned NumLoads) const; |
| 322 | |
Chris Lattner | ee9eb41 | 2010-04-26 23:37:21 +0000 | [diff] [blame] | 323 | virtual void getNoopForMachoTarget(MCInst &NopInst) const; |
| 324 | |
Owen Anderson | 44eb65c | 2008-08-14 22:49:33 +0000 | [diff] [blame] | 325 | virtual |
| 326 | bool ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const; |
Chris Lattner | 41e431b | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 327 | |
Evan Cheng | 4350eb8 | 2009-02-06 17:17:30 +0000 | [diff] [blame] | 328 | /// isSafeToMoveRegClassDefs - Return true if it's safe to move a machine |
| 329 | /// instruction that defines the specified register class. |
| 330 | bool isSafeToMoveRegClassDefs(const TargetRegisterClass *RC) const; |
Evan Cheng | 2306628 | 2008-10-27 07:14:50 +0000 | [diff] [blame] | 331 | |
Chris Lattner | 39a612e | 2010-02-05 22:10:22 +0000 | [diff] [blame] | 332 | static bool isX86_64ExtendedReg(const MachineOperand &MO) { |
| 333 | if (!MO.isReg()) return false; |
Evan Cheng | 8c3fee5 | 2011-07-25 18:43:53 +0000 | [diff] [blame] | 334 | return X86II::isX86_64ExtendedReg(MO.getReg()); |
Chris Lattner | 39a612e | 2010-02-05 22:10:22 +0000 | [diff] [blame] | 335 | } |
Nicolas Geoffray | 52e724a | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 336 | |
Dan Gohman | 57c3dac | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 337 | /// getGlobalBaseReg - Return a virtual register initialized with the |
| 338 | /// the global base register value. Output instructions required to |
| 339 | /// initialize the register in the function entry block, if necessary. |
Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 340 | /// |
Dan Gohman | 57c3dac | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 341 | unsigned getGlobalBaseReg(MachineFunction *MF) const; |
Dan Gohman | 8b74696 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 342 | |
Jakob Stoklund Olesen | 98e933f | 2011-09-27 22:57:18 +0000 | [diff] [blame] | 343 | std::pair<uint16_t, uint16_t> |
| 344 | getExecutionDomain(const MachineInstr *MI) const; |
Jakob Stoklund Olesen | e4b94b4 | 2010-03-29 23:24:21 +0000 | [diff] [blame] | 345 | |
Jakob Stoklund Olesen | 98e933f | 2011-09-27 22:57:18 +0000 | [diff] [blame] | 346 | void setExecutionDomain(MachineInstr *MI, unsigned Domain) const; |
Jakob Stoklund Olesen | 352aa50 | 2010-03-25 17:25:00 +0000 | [diff] [blame] | 347 | |
Chris Lattner | beac75d | 2010-09-05 02:18:34 +0000 | [diff] [blame] | 348 | MachineInstr* foldMemoryOperandImpl(MachineFunction &MF, |
| 349 | MachineInstr* MI, |
| 350 | unsigned OpNum, |
| 351 | const SmallVectorImpl<MachineOperand> &MOs, |
| 352 | unsigned Size, unsigned Alignment) const; |
Evan Cheng | 2312842 | 2010-10-19 18:58:51 +0000 | [diff] [blame] | 353 | |
Andrew Trick | e0ef509 | 2011-03-05 08:00:22 +0000 | [diff] [blame] | 354 | bool isHighLatencyDef(int opc) const; |
| 355 | |
Evan Cheng | 2312842 | 2010-10-19 18:58:51 +0000 | [diff] [blame] | 356 | bool hasHighOperandLatency(const InstrItineraryData *ItinData, |
| 357 | const MachineRegisterInfo *MRI, |
| 358 | const MachineInstr *DefMI, unsigned DefIdx, |
| 359 | const MachineInstr *UseMI, unsigned UseIdx) const; |
Andrew Trick | 8d4a422 | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 360 | |
Owen Anderson | 43dbe05 | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 361 | private: |
Evan Cheng | 656e514 | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 362 | MachineInstr * convertToThreeAddressWithLEA(unsigned MIOpc, |
| 363 | MachineFunction::iterator &MFI, |
| 364 | MachineBasicBlock::iterator &MBBI, |
| 365 | LiveVariables *LV) const; |
| 366 | |
David Greene | b87bc95 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 367 | /// isFrameOperand - Return true and the FrameIndex if the specified |
| 368 | /// operand and follow operands form a reference to the stack frame. |
| 369 | bool isFrameOperand(const MachineInstr *MI, unsigned int Op, |
| 370 | int &FrameIndex) const; |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 371 | }; |
| 372 | |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 373 | } // End llvm namespace |
| 374 | |
Chris Lattner | 7261408 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 375 | #endif |