Jia Liu | b22310f | 2012-02-18 12:03:15 +0000 | [diff] [blame] | 1 | //===-- X86InstrInfo.h - X86 Instruction Information ------------*- C++ -*-===// |
Misha Brukman | c88330a | 2005-04-21 23:38:14 +0000 | [diff] [blame] | 2 | // |
John Criswell | 29265fe | 2003-10-21 15:17:13 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | f3ebc3f | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | c88330a | 2005-04-21 23:38:14 +0000 | [diff] [blame] | 7 | // |
John Criswell | 29265fe | 2003-10-21 15:17:13 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 9 | // |
Chris Lattner | b4d58d7 | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 10 | // This file contains the X86 implementation of the TargetInstrInfo class. |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
Benjamin Kramer | a7c40ef | 2014-08-13 16:26:38 +0000 | [diff] [blame] | 14 | #ifndef LLVM_LIB_TARGET_X86_X86INSTRINFO_H |
| 15 | #define LLVM_LIB_TARGET_X86_X86INSTRINFO_H |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 16 | |
Craig Topper | c6d4efa | 2014-03-19 06:53:25 +0000 | [diff] [blame] | 17 | #include "MCTargetDesc/X86BaseInfo.h" |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 18 | #include "X86RegisterInfo.h" |
Dan Gohman | 906152a | 2009-01-05 17:59:02 +0000 | [diff] [blame] | 19 | #include "llvm/ADT/DenseMap.h" |
Craig Topper | b25fda9 | 2012-03-17 18:46:09 +0000 | [diff] [blame] | 20 | #include "llvm/Target/TargetInstrInfo.h" |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 21 | |
Evan Cheng | 703a0fb | 2011-07-01 17:57:27 +0000 | [diff] [blame] | 22 | #define GET_INSTRINFO_HEADER |
| 23 | #include "X86GenInstrInfo.inc" |
| 24 | |
Brian Gaeke | 960707c | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 25 | namespace llvm { |
Evan Cheng | 11b0a5d | 2006-09-08 06:48:29 +0000 | [diff] [blame] | 26 | class X86RegisterInfo; |
Eric Christopher | 6c786a1 | 2014-06-10 22:34:31 +0000 | [diff] [blame] | 27 | class X86Subtarget; |
Brian Gaeke | 960707c | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 28 | |
Sanjay Patel | 08829ba | 2015-06-10 20:32:21 +0000 | [diff] [blame] | 29 | namespace MachineCombinerPattern { |
| 30 | enum MC_PATTERN : int { |
| 31 | // These are commutative variants for reassociating a computation chain |
| 32 | // of the form: |
| 33 | // B = A op X (Prev) |
| 34 | // C = B op Y (Root) |
| 35 | MC_REASSOC_AX_BY = 0, |
| 36 | MC_REASSOC_AX_YB = 1, |
| 37 | MC_REASSOC_XA_BY = 2, |
| 38 | MC_REASSOC_XA_YB = 3, |
| 39 | }; |
| 40 | } // end namespace MachineCombinerPattern |
| 41 | |
Chris Lattner | c0fb567 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 42 | namespace X86 { |
| 43 | // X86 specific condition code. These correspond to X86_*_COND in |
| 44 | // X86InstrInfo.td. They must be kept in synch. |
| 45 | enum CondCode { |
| 46 | COND_A = 0, |
| 47 | COND_AE = 1, |
| 48 | COND_B = 2, |
| 49 | COND_BE = 3, |
| 50 | COND_E = 4, |
| 51 | COND_G = 5, |
| 52 | COND_GE = 6, |
| 53 | COND_L = 7, |
| 54 | COND_LE = 8, |
| 55 | COND_NE = 9, |
| 56 | COND_NO = 10, |
| 57 | COND_NP = 11, |
| 58 | COND_NS = 12, |
Dan Gohman | 33e6fcd | 2009-01-07 00:15:08 +0000 | [diff] [blame] | 59 | COND_O = 13, |
| 60 | COND_P = 14, |
| 61 | COND_S = 15, |
Juergen Ributzka | 2da1bbc | 2014-06-16 23:58:24 +0000 | [diff] [blame] | 62 | LAST_VALID_COND = COND_S, |
Dan Gohman | 97d95d6 | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 63 | |
| 64 | // Artificial condition codes. These are used by AnalyzeBranch |
| 65 | // to indicate a block terminated with two conditional branches to |
| 66 | // the same location. This occurs in code using FCMP_OEQ or FCMP_UNE, |
| 67 | // which can't be represented on x86 with a single condition. These |
| 68 | // are never used in MachineInstrs. |
| 69 | COND_NE_OR_P, |
| 70 | COND_NP_OR_E, |
| 71 | |
Chris Lattner | c0fb567 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 72 | COND_INVALID |
| 73 | }; |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 74 | |
Chris Lattner | c0fb567 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 75 | // Turn condition code into conditional branch opcode. |
| 76 | unsigned GetCondBranchFromCond(CondCode CC); |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 77 | |
Juergen Ributzka | 2da1bbc | 2014-06-16 23:58:24 +0000 | [diff] [blame] | 78 | /// \brief Return a set opcode for the given condition and whether it has |
| 79 | /// a memory operand. |
| 80 | unsigned getSETFromCond(CondCode CC, bool HasMemoryOperand = false); |
| 81 | |
Juergen Ributzka | 6ef06f9 | 2014-06-23 21:55:36 +0000 | [diff] [blame] | 82 | /// \brief Return a cmov opcode for the given condition, register size in |
| 83 | /// bytes, and operand type. |
| 84 | unsigned getCMovFromCond(CondCode CC, unsigned RegBytes, |
| 85 | bool HasMemoryOperand = false); |
| 86 | |
Michael Liao | 3237662 | 2012-09-20 03:06:15 +0000 | [diff] [blame] | 87 | // Turn CMov opcode into condition code. |
| 88 | CondCode getCondFromCMovOpc(unsigned Opc); |
| 89 | |
Chris Lattner | 3a897f3 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 90 | /// GetOppositeBranchCondition - Return the inverse of the specified cond, |
| 91 | /// e.g. turning COND_E to COND_NE. |
Juergen Ributzka | 2da1bbc | 2014-06-16 23:58:24 +0000 | [diff] [blame] | 92 | CondCode GetOppositeBranchCondition(CondCode CC); |
Evan Cheng | 7e763d8 | 2011-07-25 18:43:53 +0000 | [diff] [blame] | 93 | } // end namespace X86; |
Chris Lattner | 3a897f3 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 94 | |
Chris Lattner | 377f1d5 | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 95 | |
Chris Lattner | ca9d784 | 2009-07-10 06:29:59 +0000 | [diff] [blame] | 96 | /// isGlobalStubReference - Return true if the specified TargetFlag operand is |
Chris Lattner | 377f1d5 | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 97 | /// a reference to a stub for a global, not the global itself. |
Chris Lattner | ca9d784 | 2009-07-10 06:29:59 +0000 | [diff] [blame] | 98 | inline static bool isGlobalStubReference(unsigned char TargetFlag) { |
| 99 | switch (TargetFlag) { |
Chris Lattner | 377f1d5 | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 100 | case X86II::MO_DLLIMPORT: // dllimport stub. |
| 101 | case X86II::MO_GOTPCREL: // rip-relative GOT reference. |
| 102 | case X86II::MO_GOT: // normal GOT reference. |
| 103 | case X86II::MO_DARWIN_NONLAZY_PIC_BASE: // Normal $non_lazy_ptr ref. |
| 104 | case X86II::MO_DARWIN_NONLAZY: // Normal $non_lazy_ptr ref. |
| 105 | case X86II::MO_DARWIN_HIDDEN_NONLAZY_PIC_BASE: // Hidden $non_lazy_ptr ref. |
Chris Lattner | 377f1d5 | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 106 | return true; |
| 107 | default: |
| 108 | return false; |
| 109 | } |
| 110 | } |
Chris Lattner | d3f32c7 | 2009-07-10 07:33:30 +0000 | [diff] [blame] | 111 | |
| 112 | /// isGlobalRelativeToPICBase - Return true if the specified global value |
| 113 | /// reference is relative to a 32-bit PIC base (X86ISD::GlobalBaseReg). If this |
| 114 | /// is true, the addressing mode has the PIC base register added in (e.g. EBX). |
| 115 | inline static bool isGlobalRelativeToPICBase(unsigned char TargetFlag) { |
| 116 | switch (TargetFlag) { |
| 117 | case X86II::MO_GOTOFF: // isPICStyleGOT: local global. |
| 118 | case X86II::MO_GOT: // isPICStyleGOT: other global. |
| 119 | case X86II::MO_PIC_BASE_OFFSET: // Darwin local global. |
| 120 | case X86II::MO_DARWIN_NONLAZY_PIC_BASE: // Darwin/32 external global. |
| 121 | case X86II::MO_DARWIN_HIDDEN_NONLAZY_PIC_BASE: // Darwin/32 hidden global. |
Eric Christopher | b0e1a45 | 2010-06-03 04:07:48 +0000 | [diff] [blame] | 122 | case X86II::MO_TLVP: // ??? Pretty sure.. |
Chris Lattner | d3f32c7 | 2009-07-10 07:33:30 +0000 | [diff] [blame] | 123 | return true; |
| 124 | default: |
| 125 | return false; |
| 126 | } |
| 127 | } |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 128 | |
Anton Korobeynikov | 4e9dfe8 | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 129 | inline static bool isScale(const MachineOperand &MO) { |
Dan Gohman | 0d1e9a8 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 130 | return MO.isImm() && |
Anton Korobeynikov | 4e9dfe8 | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 131 | (MO.getImm() == 1 || MO.getImm() == 2 || |
| 132 | MO.getImm() == 4 || MO.getImm() == 8); |
| 133 | } |
| 134 | |
Rafael Espindola | 3b2df10 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 135 | inline static bool isLeaMem(const MachineInstr *MI, unsigned Op) { |
Dan Gohman | 0d1e9a8 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 136 | if (MI->getOperand(Op).isFI()) return true; |
Manuel Jacob | dcb78db | 2014-03-18 16:14:11 +0000 | [diff] [blame] | 137 | return Op+X86::AddrSegmentReg <= MI->getNumOperands() && |
| 138 | MI->getOperand(Op+X86::AddrBaseReg).isReg() && |
| 139 | isScale(MI->getOperand(Op+X86::AddrScaleAmt)) && |
| 140 | MI->getOperand(Op+X86::AddrIndexReg).isReg() && |
| 141 | (MI->getOperand(Op+X86::AddrDisp).isImm() || |
| 142 | MI->getOperand(Op+X86::AddrDisp).isGlobal() || |
| 143 | MI->getOperand(Op+X86::AddrDisp).isCPI() || |
| 144 | MI->getOperand(Op+X86::AddrDisp).isJTI()); |
Anton Korobeynikov | 4e9dfe8 | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 145 | } |
| 146 | |
Rafael Espindola | 3b2df10 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 147 | inline static bool isMem(const MachineInstr *MI, unsigned Op) { |
| 148 | if (MI->getOperand(Op).isFI()) return true; |
Manuel Jacob | dcb78db | 2014-03-18 16:14:11 +0000 | [diff] [blame] | 149 | return Op+X86::AddrNumOperands <= MI->getNumOperands() && |
| 150 | MI->getOperand(Op+X86::AddrSegmentReg).isReg() && |
Rafael Espindola | 3b2df10 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 151 | isLeaMem(MI, Op); |
| 152 | } |
| 153 | |
Craig Topper | ec82847 | 2014-03-31 06:53:13 +0000 | [diff] [blame] | 154 | class X86InstrInfo final : public X86GenInstrInfo { |
Eric Christopher | 6c786a1 | 2014-06-10 22:34:31 +0000 | [diff] [blame] | 155 | X86Subtarget &Subtarget; |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 156 | const X86RegisterInfo RI; |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 157 | |
Craig Topper | 9eadcfd | 2012-06-01 05:34:01 +0000 | [diff] [blame] | 158 | /// RegOp2MemOpTable3Addr, RegOp2MemOpTable0, RegOp2MemOpTable1, |
| 159 | /// RegOp2MemOpTable2, RegOp2MemOpTable3 - Load / store folding opcode maps. |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 160 | /// |
Bruno Cardoso Lopes | 23eb526 | 2011-09-08 18:35:57 +0000 | [diff] [blame] | 161 | typedef DenseMap<unsigned, |
| 162 | std::pair<unsigned, unsigned> > RegOp2MemOpTableType; |
| 163 | RegOp2MemOpTableType RegOp2MemOpTable2Addr; |
| 164 | RegOp2MemOpTableType RegOp2MemOpTable0; |
| 165 | RegOp2MemOpTableType RegOp2MemOpTable1; |
| 166 | RegOp2MemOpTableType RegOp2MemOpTable2; |
Elena Demikhovsky | 602f3a2 | 2012-05-31 09:20:20 +0000 | [diff] [blame] | 167 | RegOp2MemOpTableType RegOp2MemOpTable3; |
Robert Khasanov | 79fb729 | 2014-12-18 12:28:22 +0000 | [diff] [blame] | 168 | RegOp2MemOpTableType RegOp2MemOpTable4; |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 169 | |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 170 | /// MemOp2RegOpTable - Load / store unfolding opcode map. |
| 171 | /// |
Bruno Cardoso Lopes | 23eb526 | 2011-09-08 18:35:57 +0000 | [diff] [blame] | 172 | typedef DenseMap<unsigned, |
| 173 | std::pair<unsigned, unsigned> > MemOp2RegOpTableType; |
| 174 | MemOp2RegOpTableType MemOp2RegOpTable; |
| 175 | |
Craig Topper | d9c7d0d | 2012-06-23 04:58:41 +0000 | [diff] [blame] | 176 | static void AddTableEntry(RegOp2MemOpTableType &R2MTable, |
| 177 | MemOp2RegOpTableType &M2RTable, |
| 178 | unsigned RegOp, unsigned MemOp, unsigned Flags); |
Jakob Stoklund Olesen | 49e121d | 2010-03-25 17:25:00 +0000 | [diff] [blame] | 179 | |
Juergen Ributzka | d12ccbd | 2013-11-19 00:57:56 +0000 | [diff] [blame] | 180 | virtual void anchor(); |
| 181 | |
Sanjoy Das | 6b34a46 | 2015-06-15 18:44:21 +0000 | [diff] [blame^] | 182 | bool AnalyzeBranchImpl(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, |
| 183 | MachineBasicBlock *&FBB, |
| 184 | SmallVectorImpl<MachineOperand> &Cond, |
| 185 | SmallVectorImpl<MachineInstr *> &CondBranches, |
| 186 | bool AllowModify) const; |
| 187 | |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 188 | public: |
Eric Christopher | 6c786a1 | 2014-06-10 22:34:31 +0000 | [diff] [blame] | 189 | explicit X86InstrInfo(X86Subtarget &STI); |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 190 | |
Chris Lattner | b4d58d7 | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 191 | /// getRegisterInfo - TargetInstrInfo is a superset of MRegister info. As |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 192 | /// such, whenever a client has an instance of instruction info, it should |
| 193 | /// always be able to get register info as well (through this method). |
| 194 | /// |
Craig Topper | f5e3b0b | 2014-03-09 07:58:15 +0000 | [diff] [blame] | 195 | const X86RegisterInfo &getRegisterInfo() const { return RI; } |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 196 | |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 197 | /// getSPAdjust - This returns the stack pointer adjustment made by |
| 198 | /// this instruction. For x86, we need to handle more complex call |
| 199 | /// sequences involving PUSHes. |
| 200 | int getSPAdjust(const MachineInstr *MI) const override; |
| 201 | |
Evan Cheng | 30bebff | 2010-01-13 00:30:23 +0000 | [diff] [blame] | 202 | /// isCoalescableExtInstr - Return true if the instruction is a "coalescable" |
| 203 | /// extension instruction. That is, it's like a copy where it's legal for the |
| 204 | /// source to overlap the destination. e.g. X86::MOVSX64rr32. If this returns |
| 205 | /// true, then it's expected the pre-extension value is available as a subreg |
| 206 | /// of the result register. This also returns the sub-register index in |
| 207 | /// SubIdx. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 208 | bool isCoalescableExtInstr(const MachineInstr &MI, |
| 209 | unsigned &SrcReg, unsigned &DstReg, |
| 210 | unsigned &SubIdx) const override; |
Evan Cheng | 4216615 | 2010-01-12 00:09:37 +0000 | [diff] [blame] | 211 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 212 | unsigned isLoadFromStackSlot(const MachineInstr *MI, |
| 213 | int &FrameIndex) const override; |
David Greene | 2f4c374 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 214 | /// isLoadFromStackSlotPostFE - Check for post-frame ptr elimination |
| 215 | /// stack locations as well. This uses a heuristic so it isn't |
| 216 | /// reliable for correctness. |
| 217 | unsigned isLoadFromStackSlotPostFE(const MachineInstr *MI, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 218 | int &FrameIndex) const override; |
David Greene | 70fdd57 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 219 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 220 | unsigned isStoreToStackSlot(const MachineInstr *MI, |
| 221 | int &FrameIndex) const override; |
David Greene | 2f4c374 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 222 | /// isStoreToStackSlotPostFE - Check for post-frame ptr elimination |
| 223 | /// stack locations as well. This uses a heuristic so it isn't |
| 224 | /// reliable for correctness. |
| 225 | unsigned isStoreToStackSlotPostFE(const MachineInstr *MI, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 226 | int &FrameIndex) const override; |
Evan Cheng | ed6e34f | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 227 | |
Dan Gohman | e919de5 | 2009-10-10 00:34:18 +0000 | [diff] [blame] | 228 | bool isReallyTriviallyReMaterializable(const MachineInstr *MI, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 229 | AliasAnalysis *AA) const override; |
Evan Cheng | ed6e34f | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 230 | void reMaterialize(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, |
Evan Cheng | 8451744 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 231 | unsigned DestReg, unsigned SubIdx, |
Evan Cheng | 6ad7da9 | 2009-11-14 02:55:43 +0000 | [diff] [blame] | 232 | const MachineInstr *Orig, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 233 | const TargetRegisterInfo &TRI) const override; |
Evan Cheng | ed6e34f | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 234 | |
Tim Northover | 6833e3f | 2013-06-10 20:43:49 +0000 | [diff] [blame] | 235 | /// Given an operand within a MachineInstr, insert preceding code to put it |
| 236 | /// into the right format for a particular kind of LEA instruction. This may |
| 237 | /// involve using an appropriate super-register instead (with an implicit use |
| 238 | /// of the original) or creating a new virtual register and inserting COPY |
| 239 | /// instructions to get the data into the right class. |
| 240 | /// |
| 241 | /// Reference parameters are set to indicate how caller should add this |
| 242 | /// operand to the LEA instruction. |
| 243 | bool classifyLEAReg(MachineInstr *MI, const MachineOperand &Src, |
| 244 | unsigned LEAOpcode, bool AllowSP, |
| 245 | unsigned &NewSrc, bool &isKill, |
| 246 | bool &isUndef, MachineOperand &ImplicitOp) const; |
| 247 | |
Chris Lattner | b7782d7 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 248 | /// convertToThreeAddress - This method must be implemented by targets that |
| 249 | /// set the M_CONVERTIBLE_TO_3_ADDR flag. When this flag is set, the target |
| 250 | /// may be able to convert a two-address instruction into a true |
| 251 | /// three-address instruction on demand. This allows the X86 target (for |
| 252 | /// example) to convert ADD and SHL instructions into LEA instructions if they |
| 253 | /// would require register copies due to two-addressness. |
| 254 | /// |
| 255 | /// This method returns a null pointer if the transformation cannot be |
| 256 | /// performed, otherwise it returns the new instruction. |
| 257 | /// |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 258 | MachineInstr *convertToThreeAddress(MachineFunction::iterator &MFI, |
| 259 | MachineBasicBlock::iterator &MBBI, |
| 260 | LiveVariables *LV) const override; |
Chris Lattner | b7782d7 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 261 | |
Chris Lattner | 2947801 | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 262 | /// commuteInstruction - We have a few instructions that must be hacked on to |
| 263 | /// commute them. |
| 264 | /// |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 265 | MachineInstr *commuteInstruction(MachineInstr *MI, bool NewMI) const override; |
Chris Lattner | 2947801 | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 266 | |
Lang Hames | c59a2d0 | 2014-04-02 23:57:49 +0000 | [diff] [blame] | 267 | bool findCommutedOpIndices(MachineInstr *MI, unsigned &SrcOpIdx1, |
| 268 | unsigned &SrcOpIdx2) const override; |
| 269 | |
Chris Lattner | c0fb567 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 270 | // Branch analysis. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 271 | bool isUnpredicatedTerminator(const MachineInstr* MI) const override; |
| 272 | bool AnalyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, |
| 273 | MachineBasicBlock *&FBB, |
| 274 | SmallVectorImpl<MachineOperand> &Cond, |
| 275 | bool AllowModify) const override; |
Sanjoy Das | b666ea3 | 2015-06-15 18:44:14 +0000 | [diff] [blame] | 276 | |
| 277 | bool getMemOpBaseRegImmOfs(MachineInstr *LdSt, unsigned &BaseReg, |
| 278 | unsigned &Offset, |
| 279 | const TargetRegisterInfo *TRI) const override; |
Sanjoy Das | 6b34a46 | 2015-06-15 18:44:21 +0000 | [diff] [blame^] | 280 | bool AnalyzeBranchPredicate(MachineBasicBlock &MBB, |
| 281 | TargetInstrInfo::MachineBranchPredicate &MBP, |
| 282 | bool AllowModify = false) const override; |
| 283 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 284 | unsigned RemoveBranch(MachineBasicBlock &MBB) const override; |
| 285 | unsigned InsertBranch(MachineBasicBlock &MBB, MachineBasicBlock *TBB, |
Ahmed Bougacha | c88bf54 | 2015-06-11 19:30:37 +0000 | [diff] [blame] | 286 | MachineBasicBlock *FBB, ArrayRef<MachineOperand> Cond, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 287 | DebugLoc DL) const override; |
Ahmed Bougacha | c88bf54 | 2015-06-11 19:30:37 +0000 | [diff] [blame] | 288 | bool canInsertSelect(const MachineBasicBlock&, ArrayRef<MachineOperand> Cond, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 289 | unsigned, unsigned, int&, int&, int&) const override; |
| 290 | void insertSelect(MachineBasicBlock &MBB, |
| 291 | MachineBasicBlock::iterator MI, DebugLoc DL, |
Ahmed Bougacha | c88bf54 | 2015-06-11 19:30:37 +0000 | [diff] [blame] | 292 | unsigned DstReg, ArrayRef<MachineOperand> Cond, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 293 | unsigned TrueReg, unsigned FalseReg) const override; |
| 294 | void copyPhysReg(MachineBasicBlock &MBB, |
| 295 | MachineBasicBlock::iterator MI, DebugLoc DL, |
| 296 | unsigned DestReg, unsigned SrcReg, |
| 297 | bool KillSrc) const override; |
| 298 | void storeRegToStackSlot(MachineBasicBlock &MBB, |
| 299 | MachineBasicBlock::iterator MI, |
| 300 | unsigned SrcReg, bool isKill, int FrameIndex, |
| 301 | const TargetRegisterClass *RC, |
| 302 | const TargetRegisterInfo *TRI) const override; |
Owen Anderson | eee1460 | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 303 | |
Craig Topper | f5e3b0b | 2014-03-09 07:58:15 +0000 | [diff] [blame] | 304 | void storeRegToAddr(MachineFunction &MF, unsigned SrcReg, bool isKill, |
| 305 | SmallVectorImpl<MachineOperand> &Addr, |
| 306 | const TargetRegisterClass *RC, |
| 307 | MachineInstr::mmo_iterator MMOBegin, |
| 308 | MachineInstr::mmo_iterator MMOEnd, |
| 309 | SmallVectorImpl<MachineInstr*> &NewMIs) const; |
Owen Anderson | eee1460 | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 310 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 311 | void loadRegFromStackSlot(MachineBasicBlock &MBB, |
| 312 | MachineBasicBlock::iterator MI, |
| 313 | unsigned DestReg, int FrameIndex, |
| 314 | const TargetRegisterClass *RC, |
| 315 | const TargetRegisterInfo *TRI) const override; |
Owen Anderson | eee1460 | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 316 | |
Craig Topper | f5e3b0b | 2014-03-09 07:58:15 +0000 | [diff] [blame] | 317 | void loadRegFromAddr(MachineFunction &MF, unsigned DestReg, |
| 318 | SmallVectorImpl<MachineOperand> &Addr, |
| 319 | const TargetRegisterClass *RC, |
| 320 | MachineInstr::mmo_iterator MMOBegin, |
| 321 | MachineInstr::mmo_iterator MMOEnd, |
| 322 | SmallVectorImpl<MachineInstr*> &NewMIs) const; |
Jakob Stoklund Olesen | dd1904e | 2011-09-29 05:10:54 +0000 | [diff] [blame] | 323 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 324 | bool expandPostRAPseudo(MachineBasicBlock::iterator MI) const override; |
Jakob Stoklund Olesen | dd1904e | 2011-09-29 05:10:54 +0000 | [diff] [blame] | 325 | |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 326 | /// foldMemoryOperand - If this target supports it, fold a load or store of |
| 327 | /// the specified stack slot into the specified machine instruction for the |
| 328 | /// specified operand(s). If this is possible, the target should perform the |
| 329 | /// folding and return true, otherwise it should return false. If it folds |
| 330 | /// the instruction, it is likely that the MachineInstruction the iterator |
| 331 | /// references has been changed. |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 332 | MachineInstr *foldMemoryOperandImpl(MachineFunction &MF, MachineInstr *MI, |
| 333 | ArrayRef<unsigned> Ops, |
Keno Fischer | e70b31f | 2015-06-08 20:09:58 +0000 | [diff] [blame] | 334 | MachineBasicBlock::iterator InsertPt, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 335 | int FrameIndex) const override; |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 336 | |
| 337 | /// foldMemoryOperand - Same as the previous version except it allows folding |
| 338 | /// of any load and store from / to any address, not just from a specific |
| 339 | /// stack slot. |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 340 | MachineInstr *foldMemoryOperandImpl(MachineFunction &MF, MachineInstr *MI, |
| 341 | ArrayRef<unsigned> Ops, |
Keno Fischer | e70b31f | 2015-06-08 20:09:58 +0000 | [diff] [blame] | 342 | MachineBasicBlock::iterator InsertPt, |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 343 | MachineInstr *LoadMI) const override; |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 344 | |
| 345 | /// canFoldMemoryOperand - Returns true if the specified load / store is |
| 346 | /// folding is possible. |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 347 | bool canFoldMemoryOperand(const MachineInstr *, |
| 348 | ArrayRef<unsigned>) const override; |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 349 | |
| 350 | /// unfoldMemoryOperand - Separate a single instruction which folded a load or |
| 351 | /// a store or a load and a store into two or more instruction. If this is |
| 352 | /// possible, returns true as well as the new instructions by reference. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 353 | bool unfoldMemoryOperand(MachineFunction &MF, MachineInstr *MI, |
| 354 | unsigned Reg, bool UnfoldLoad, bool UnfoldStore, |
| 355 | SmallVectorImpl<MachineInstr*> &NewMIs) const override; |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 356 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 357 | bool unfoldMemoryOperand(SelectionDAG &DAG, SDNode *N, |
| 358 | SmallVectorImpl<SDNode*> &NewNodes) const override; |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 359 | |
| 360 | /// getOpcodeAfterMemoryUnfold - Returns the opcode of the would be new |
| 361 | /// instruction after load / store are unfolded from an instruction of the |
| 362 | /// specified opcode. It returns zero if the specified unfolding is not |
Dan Gohman | 49fa51d | 2009-10-30 22:18:41 +0000 | [diff] [blame] | 363 | /// possible. If LoadRegIndex is non-null, it is filled in with the operand |
| 364 | /// index of the operand which will hold the register holding the loaded |
| 365 | /// value. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 366 | unsigned getOpcodeAfterMemoryUnfold(unsigned Opc, |
| 367 | bool UnfoldLoad, bool UnfoldStore, |
Craig Topper | e73658d | 2014-04-28 04:05:08 +0000 | [diff] [blame] | 368 | unsigned *LoadRegIndex = nullptr) const override; |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 369 | |
Evan Cheng | 4f026f3 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 370 | /// areLoadsFromSameBasePtr - This is used by the pre-regalloc scheduler |
| 371 | /// to determine if two loads are loading from the same base address. It |
| 372 | /// should only return true if the base pointers are the same and the |
| 373 | /// only differences between the two addresses are the offset. It also returns |
| 374 | /// the offsets by reference. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 375 | bool areLoadsFromSameBasePtr(SDNode *Load1, SDNode *Load2, int64_t &Offset1, |
| 376 | int64_t &Offset2) const override; |
Evan Cheng | 4f026f3 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 377 | |
| 378 | /// shouldScheduleLoadsNear - This is a used by the pre-regalloc scheduler to |
Chris Lattner | 0ab5e2c | 2011-04-15 05:18:47 +0000 | [diff] [blame] | 379 | /// determine (in conjunction with areLoadsFromSameBasePtr) if two loads should |
Evan Cheng | 4f026f3 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 380 | /// be scheduled togther. On some targets if two loads are loading from |
| 381 | /// addresses in the same cache line, it's better if they are scheduled |
| 382 | /// together. This function takes two integers that represent the load offsets |
| 383 | /// from the common base address. It returns true if it decides it's desirable |
| 384 | /// to schedule the two loads together. "NumLoads" is the number of loads that |
| 385 | /// have already been scheduled after Load1. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 386 | bool shouldScheduleLoadsNear(SDNode *Load1, SDNode *Load2, |
| 387 | int64_t Offset1, int64_t Offset2, |
| 388 | unsigned NumLoads) const override; |
Evan Cheng | 4f026f3 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 389 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 390 | bool shouldScheduleAdjacent(MachineInstr* First, |
| 391 | MachineInstr *Second) const override; |
Andrew Trick | 47740de | 2013-06-23 09:00:28 +0000 | [diff] [blame] | 392 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 393 | void getNoopForMachoTarget(MCInst &NopInst) const override; |
Chris Lattner | 6a5e706 | 2010-04-26 23:37:21 +0000 | [diff] [blame] | 394 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 395 | bool |
| 396 | ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const override; |
Chris Lattner | 2947801 | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 397 | |
Evan Cheng | b5f0ec3 | 2009-02-06 17:17:30 +0000 | [diff] [blame] | 398 | /// isSafeToMoveRegClassDefs - Return true if it's safe to move a machine |
| 399 | /// instruction that defines the specified register class. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 400 | bool isSafeToMoveRegClassDefs(const TargetRegisterClass *RC) const override; |
Evan Cheng | f713722 | 2008-10-27 07:14:50 +0000 | [diff] [blame] | 401 | |
Alexey Volkov | 6226de6 | 2014-05-20 08:55:50 +0000 | [diff] [blame] | 402 | /// isSafeToClobberEFLAGS - Return true if it's safe insert an instruction tha |
| 403 | /// would clobber the EFLAGS condition register. Note the result may be |
| 404 | /// conservative. If it cannot definitely determine the safety after visiting |
| 405 | /// a few instructions in each direction it assumes it's not safe. |
| 406 | bool isSafeToClobberEFLAGS(MachineBasicBlock &MBB, |
| 407 | MachineBasicBlock::iterator I) const; |
| 408 | |
Chris Lattner | 58827ff | 2010-02-05 22:10:22 +0000 | [diff] [blame] | 409 | static bool isX86_64ExtendedReg(const MachineOperand &MO) { |
| 410 | if (!MO.isReg()) return false; |
Evan Cheng | 7e763d8 | 2011-07-25 18:43:53 +0000 | [diff] [blame] | 411 | return X86II::isX86_64ExtendedReg(MO.getReg()); |
Chris Lattner | 58827ff | 2010-02-05 22:10:22 +0000 | [diff] [blame] | 412 | } |
Nicolas Geoffray | ae84bbd | 2008-04-16 20:10:13 +0000 | [diff] [blame] | 413 | |
Dan Gohman | 6ebe734 | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 414 | /// getGlobalBaseReg - Return a virtual register initialized with the |
| 415 | /// the global base register value. Output instructions required to |
| 416 | /// initialize the register in the function entry block, if necessary. |
Dan Gohman | 2430073 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 417 | /// |
Dan Gohman | 6ebe734 | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 418 | unsigned getGlobalBaseReg(MachineFunction *MF) const; |
Dan Gohman | 2430073 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 419 | |
Jakob Stoklund Olesen | b48c994 | 2011-09-27 22:57:18 +0000 | [diff] [blame] | 420 | std::pair<uint16_t, uint16_t> |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 421 | getExecutionDomain(const MachineInstr *MI) const override; |
Jakob Stoklund Olesen | b551aa4 | 2010-03-29 23:24:21 +0000 | [diff] [blame] | 422 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 423 | void setExecutionDomain(MachineInstr *MI, unsigned Domain) const override; |
Jakob Stoklund Olesen | 49e121d | 2010-03-25 17:25:00 +0000 | [diff] [blame] | 424 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 425 | unsigned |
| 426 | getPartialRegUpdateClearance(const MachineInstr *MI, unsigned OpNum, |
| 427 | const TargetRegisterInfo *TRI) const override; |
Andrew Trick | b6d56be | 2013-10-14 22:19:03 +0000 | [diff] [blame] | 428 | unsigned getUndefRegClearance(const MachineInstr *MI, unsigned &OpNum, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 429 | const TargetRegisterInfo *TRI) const override; |
Jakob Stoklund Olesen | f8ad336 | 2011-11-15 01:15:30 +0000 | [diff] [blame] | 430 | void breakPartialRegDependency(MachineBasicBlock::iterator MI, unsigned OpNum, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 431 | const TargetRegisterInfo *TRI) const override; |
Jakob Stoklund Olesen | f8ad336 | 2011-11-15 01:15:30 +0000 | [diff] [blame] | 432 | |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 433 | MachineInstr *foldMemoryOperandImpl(MachineFunction &MF, MachineInstr *MI, |
Chris Lattner | eeba0c7 | 2010-09-05 02:18:34 +0000 | [diff] [blame] | 434 | unsigned OpNum, |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 435 | ArrayRef<MachineOperand> MOs, |
Keno Fischer | e70b31f | 2015-06-08 20:09:58 +0000 | [diff] [blame] | 436 | MachineBasicBlock::iterator InsertPt, |
Simon Pilgrim | 2f9548a | 2014-10-20 22:14:22 +0000 | [diff] [blame] | 437 | unsigned Size, unsigned Alignment, |
| 438 | bool AllowCommute) const; |
Evan Cheng | 63c7608 | 2010-10-19 18:58:51 +0000 | [diff] [blame] | 439 | |
Tom Roeder | 44cb65f | 2014-06-05 19:29:43 +0000 | [diff] [blame] | 440 | void |
| 441 | getUnconditionalBranch(MCInst &Branch, |
| 442 | const MCSymbolRefExpr *BranchTarget) const override; |
| 443 | |
| 444 | void getTrap(MCInst &MI) const override; |
| 445 | |
Tom Roeder | eb7a303 | 2014-11-11 21:08:02 +0000 | [diff] [blame] | 446 | unsigned getJumpInstrTableEntryBound() const override; |
| 447 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 448 | bool isHighLatencyDef(int opc) const override; |
Andrew Trick | 641e2d4 | 2011-03-05 08:00:22 +0000 | [diff] [blame] | 449 | |
Matthias Braun | 88e2131 | 2015-06-13 03:42:11 +0000 | [diff] [blame] | 450 | bool hasHighOperandLatency(const TargetSchedModel &SchedModel, |
Evan Cheng | 63c7608 | 2010-10-19 18:58:51 +0000 | [diff] [blame] | 451 | const MachineRegisterInfo *MRI, |
| 452 | const MachineInstr *DefMI, unsigned DefIdx, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 453 | const MachineInstr *UseMI, |
| 454 | unsigned UseIdx) const override; |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 455 | |
Sanjay Patel | 08829ba | 2015-06-10 20:32:21 +0000 | [diff] [blame] | 456 | |
| 457 | bool useMachineCombiner() const override { |
| 458 | return true; |
| 459 | } |
| 460 | |
| 461 | /// Return true when there is potentially a faster code sequence |
| 462 | /// for an instruction chain ending in <Root>. All potential patterns are |
| 463 | /// output in the <Pattern> array. |
| 464 | bool hasPattern( |
| 465 | MachineInstr &Root, |
| 466 | SmallVectorImpl<MachineCombinerPattern::MC_PATTERN> &P) const override; |
| 467 | |
| 468 | /// When hasPattern() finds a pattern, this function generates the |
| 469 | /// instructions that could replace the original code sequence. |
| 470 | void genAlternativeCodeSequence( |
| 471 | MachineInstr &Root, MachineCombinerPattern::MC_PATTERN P, |
| 472 | SmallVectorImpl<MachineInstr *> &InsInstrs, |
| 473 | SmallVectorImpl<MachineInstr *> &DelInstrs, |
| 474 | DenseMap<unsigned, unsigned> &InstrIdxForVirtReg) const override; |
| 475 | |
Manman Ren | c965673 | 2012-07-06 17:36:20 +0000 | [diff] [blame] | 476 | /// analyzeCompare - For a comparison instruction, return the source registers |
| 477 | /// in SrcReg and SrcReg2 if having two register operands, and the value it |
| 478 | /// compares against in CmpValue. Return true if the comparison instruction |
| 479 | /// can be analyzed. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 480 | bool analyzeCompare(const MachineInstr *MI, unsigned &SrcReg, |
| 481 | unsigned &SrcReg2, int &CmpMask, |
| 482 | int &CmpValue) const override; |
Manman Ren | c965673 | 2012-07-06 17:36:20 +0000 | [diff] [blame] | 483 | |
| 484 | /// optimizeCompareInstr - Check if there exists an earlier instruction that |
| 485 | /// operates on the same source operands and sets flags in the same way as |
| 486 | /// Compare; remove Compare if possible. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 487 | bool optimizeCompareInstr(MachineInstr *CmpInstr, unsigned SrcReg, |
| 488 | unsigned SrcReg2, int CmpMask, int CmpValue, |
| 489 | const MachineRegisterInfo *MRI) const override; |
Manman Ren | c965673 | 2012-07-06 17:36:20 +0000 | [diff] [blame] | 490 | |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 491 | /// optimizeLoadInstr - Try to remove the load by folding it to a register |
| 492 | /// operand at the use. We fold the load instructions if and only if the |
Manman Ren | ba8122c | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 493 | /// def and use are in the same BB. We only look at one load and see |
| 494 | /// whether it can be folded into MI. FoldAsLoadDefReg is the virtual register |
| 495 | /// defined by the load we are trying to fold. DefMI returns the machine |
| 496 | /// instruction that defines FoldAsLoadDefReg, and the function returns |
| 497 | /// the machine instruction generated due to folding. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 498 | MachineInstr* optimizeLoadInstr(MachineInstr *MI, |
| 499 | const MachineRegisterInfo *MRI, |
| 500 | unsigned &FoldAsLoadDefReg, |
| 501 | MachineInstr *&DefMI) const override; |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 502 | |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 503 | private: |
Evan Cheng | 766a73f | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 504 | MachineInstr * convertToThreeAddressWithLEA(unsigned MIOpc, |
| 505 | MachineFunction::iterator &MFI, |
| 506 | MachineBasicBlock::iterator &MBBI, |
| 507 | LiveVariables *LV) const; |
| 508 | |
David Greene | 70fdd57 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 509 | /// isFrameOperand - Return true and the FrameIndex if the specified |
| 510 | /// operand and follow operands form a reference to the stack frame. |
| 511 | bool isFrameOperand(const MachineInstr *MI, unsigned int Op, |
| 512 | int &FrameIndex) const; |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 513 | }; |
| 514 | |
Brian Gaeke | 960707c | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 515 | } // End llvm namespace |
| 516 | |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 517 | #endif |