Jia Liu | b22310f | 2012-02-18 12:03:15 +0000 | [diff] [blame] | 1 | //===-- X86InstrInfo.h - X86 Instruction Information ------------*- C++ -*-===// |
Misha Brukman | c88330a | 2005-04-21 23:38:14 +0000 | [diff] [blame] | 2 | // |
John Criswell | 29265fe | 2003-10-21 15:17:13 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | f3ebc3f | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | c88330a | 2005-04-21 23:38:14 +0000 | [diff] [blame] | 7 | // |
John Criswell | 29265fe | 2003-10-21 15:17:13 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 9 | // |
Chris Lattner | b4d58d7 | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 10 | // This file contains the X86 implementation of the TargetInstrInfo class. |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
Benjamin Kramer | a7c40ef | 2014-08-13 16:26:38 +0000 | [diff] [blame] | 14 | #ifndef LLVM_LIB_TARGET_X86_X86INSTRINFO_H |
| 15 | #define LLVM_LIB_TARGET_X86_X86INSTRINFO_H |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 16 | |
Craig Topper | c6d4efa | 2014-03-19 06:53:25 +0000 | [diff] [blame] | 17 | #include "MCTargetDesc/X86BaseInfo.h" |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 18 | #include "X86RegisterInfo.h" |
Dan Gohman | 906152a | 2009-01-05 17:59:02 +0000 | [diff] [blame] | 19 | #include "llvm/ADT/DenseMap.h" |
Craig Topper | b25fda9 | 2012-03-17 18:46:09 +0000 | [diff] [blame] | 20 | #include "llvm/Target/TargetInstrInfo.h" |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 21 | |
Evan Cheng | 703a0fb | 2011-07-01 17:57:27 +0000 | [diff] [blame] | 22 | #define GET_INSTRINFO_HEADER |
| 23 | #include "X86GenInstrInfo.inc" |
| 24 | |
Brian Gaeke | 960707c | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 25 | namespace llvm { |
Evan Cheng | 11b0a5d | 2006-09-08 06:48:29 +0000 | [diff] [blame] | 26 | class X86RegisterInfo; |
Eric Christopher | 6c786a1 | 2014-06-10 22:34:31 +0000 | [diff] [blame] | 27 | class X86Subtarget; |
Brian Gaeke | 960707c | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 28 | |
Chris Lattner | c0fb567 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 29 | namespace X86 { |
| 30 | // X86 specific condition code. These correspond to X86_*_COND in |
| 31 | // X86InstrInfo.td. They must be kept in synch. |
Cong Hou | 9471084 | 2016-03-23 21:45:37 +0000 | [diff] [blame^] | 32 | enum CondCode { |
| 33 | COND_A = 0, |
| 34 | COND_AE = 1, |
| 35 | COND_B = 2, |
| 36 | COND_BE = 3, |
| 37 | COND_E = 4, |
| 38 | COND_G = 5, |
| 39 | COND_GE = 6, |
| 40 | COND_L = 7, |
| 41 | COND_LE = 8, |
| 42 | COND_NE = 9, |
| 43 | COND_NO = 10, |
| 44 | COND_NP = 11, |
| 45 | COND_NS = 12, |
| 46 | COND_O = 13, |
| 47 | COND_P = 14, |
| 48 | COND_S = 15, |
| 49 | LAST_VALID_COND = COND_S, |
Dan Gohman | 97d95d6 | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 50 | |
Cong Hou | 9471084 | 2016-03-23 21:45:37 +0000 | [diff] [blame^] | 51 | // Artificial condition codes. These are used by AnalyzeBranch |
| 52 | // to indicate a block terminated with two conditional branches that together |
| 53 | // form a compound condition. They occur in code using FCMP_OEQ or FCMP_UNE, |
| 54 | // which can't be represented on x86 with a single condition. These |
| 55 | // are never used in MachineInstrs and are inverses of one another. |
| 56 | COND_NE_OR_P, |
| 57 | COND_E_AND_NP, |
Dan Gohman | 97d95d6 | 2008-10-21 03:29:32 +0000 | [diff] [blame] | 58 | |
Cong Hou | 9471084 | 2016-03-23 21:45:37 +0000 | [diff] [blame^] | 59 | COND_INVALID |
| 60 | }; |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 61 | |
Cong Hou | 9471084 | 2016-03-23 21:45:37 +0000 | [diff] [blame^] | 62 | // Turn condition code into conditional branch opcode. |
| 63 | unsigned GetCondBranchFromCond(CondCode CC); |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 64 | |
Cong Hou | 9471084 | 2016-03-23 21:45:37 +0000 | [diff] [blame^] | 65 | /// \brief Return a set opcode for the given condition and whether it has |
| 66 | /// a memory operand. |
| 67 | unsigned getSETFromCond(CondCode CC, bool HasMemoryOperand = false); |
Juergen Ributzka | 2da1bbc | 2014-06-16 23:58:24 +0000 | [diff] [blame] | 68 | |
Cong Hou | 9471084 | 2016-03-23 21:45:37 +0000 | [diff] [blame^] | 69 | /// \brief Return a cmov opcode for the given condition, register size in |
| 70 | /// bytes, and operand type. |
| 71 | unsigned getCMovFromCond(CondCode CC, unsigned RegBytes, |
| 72 | bool HasMemoryOperand = false); |
Juergen Ributzka | 6ef06f9 | 2014-06-23 21:55:36 +0000 | [diff] [blame] | 73 | |
Cong Hou | 9471084 | 2016-03-23 21:45:37 +0000 | [diff] [blame^] | 74 | // Turn CMov opcode into condition code. |
| 75 | CondCode getCondFromCMovOpc(unsigned Opc); |
Michael Liao | 3237662 | 2012-09-20 03:06:15 +0000 | [diff] [blame] | 76 | |
Cong Hou | 9471084 | 2016-03-23 21:45:37 +0000 | [diff] [blame^] | 77 | /// GetOppositeBranchCondition - Return the inverse of the specified cond, |
| 78 | /// e.g. turning COND_E to COND_NE. |
| 79 | CondCode GetOppositeBranchCondition(CondCode CC); |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 80 | } // end namespace X86; |
Chris Lattner | 3a897f3 | 2006-10-21 05:52:40 +0000 | [diff] [blame] | 81 | |
Chris Lattner | 377f1d5 | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 82 | |
Chris Lattner | ca9d784 | 2009-07-10 06:29:59 +0000 | [diff] [blame] | 83 | /// isGlobalStubReference - Return true if the specified TargetFlag operand is |
Chris Lattner | 377f1d5 | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 84 | /// a reference to a stub for a global, not the global itself. |
Chris Lattner | ca9d784 | 2009-07-10 06:29:59 +0000 | [diff] [blame] | 85 | inline static bool isGlobalStubReference(unsigned char TargetFlag) { |
| 86 | switch (TargetFlag) { |
Chris Lattner | 377f1d5 | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 87 | case X86II::MO_DLLIMPORT: // dllimport stub. |
| 88 | case X86II::MO_GOTPCREL: // rip-relative GOT reference. |
| 89 | case X86II::MO_GOT: // normal GOT reference. |
| 90 | case X86II::MO_DARWIN_NONLAZY_PIC_BASE: // Normal $non_lazy_ptr ref. |
| 91 | case X86II::MO_DARWIN_NONLAZY: // Normal $non_lazy_ptr ref. |
| 92 | case X86II::MO_DARWIN_HIDDEN_NONLAZY_PIC_BASE: // Hidden $non_lazy_ptr ref. |
Chris Lattner | 377f1d5 | 2009-07-10 06:06:17 +0000 | [diff] [blame] | 93 | return true; |
| 94 | default: |
| 95 | return false; |
| 96 | } |
| 97 | } |
Chris Lattner | d3f32c7 | 2009-07-10 07:33:30 +0000 | [diff] [blame] | 98 | |
| 99 | /// isGlobalRelativeToPICBase - Return true if the specified global value |
| 100 | /// reference is relative to a 32-bit PIC base (X86ISD::GlobalBaseReg). If this |
| 101 | /// is true, the addressing mode has the PIC base register added in (e.g. EBX). |
| 102 | inline static bool isGlobalRelativeToPICBase(unsigned char TargetFlag) { |
| 103 | switch (TargetFlag) { |
| 104 | case X86II::MO_GOTOFF: // isPICStyleGOT: local global. |
| 105 | case X86II::MO_GOT: // isPICStyleGOT: other global. |
| 106 | case X86II::MO_PIC_BASE_OFFSET: // Darwin local global. |
| 107 | case X86II::MO_DARWIN_NONLAZY_PIC_BASE: // Darwin/32 external global. |
| 108 | case X86II::MO_DARWIN_HIDDEN_NONLAZY_PIC_BASE: // Darwin/32 hidden global. |
Eric Christopher | b0e1a45 | 2010-06-03 04:07:48 +0000 | [diff] [blame] | 109 | case X86II::MO_TLVP: // ??? Pretty sure.. |
Chris Lattner | d3f32c7 | 2009-07-10 07:33:30 +0000 | [diff] [blame] | 110 | return true; |
| 111 | default: |
| 112 | return false; |
| 113 | } |
| 114 | } |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 115 | |
Anton Korobeynikov | 4e9dfe8 | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 116 | inline static bool isScale(const MachineOperand &MO) { |
Dan Gohman | 0d1e9a8 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 117 | return MO.isImm() && |
Anton Korobeynikov | 4e9dfe8 | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 118 | (MO.getImm() == 1 || MO.getImm() == 2 || |
| 119 | MO.getImm() == 4 || MO.getImm() == 8); |
| 120 | } |
| 121 | |
Rafael Espindola | 3b2df10 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 122 | inline static bool isLeaMem(const MachineInstr *MI, unsigned Op) { |
Dan Gohman | 0d1e9a8 | 2008-10-03 15:45:36 +0000 | [diff] [blame] | 123 | if (MI->getOperand(Op).isFI()) return true; |
Manuel Jacob | dcb78db | 2014-03-18 16:14:11 +0000 | [diff] [blame] | 124 | return Op+X86::AddrSegmentReg <= MI->getNumOperands() && |
| 125 | MI->getOperand(Op+X86::AddrBaseReg).isReg() && |
| 126 | isScale(MI->getOperand(Op+X86::AddrScaleAmt)) && |
| 127 | MI->getOperand(Op+X86::AddrIndexReg).isReg() && |
| 128 | (MI->getOperand(Op+X86::AddrDisp).isImm() || |
| 129 | MI->getOperand(Op+X86::AddrDisp).isGlobal() || |
| 130 | MI->getOperand(Op+X86::AddrDisp).isCPI() || |
| 131 | MI->getOperand(Op+X86::AddrDisp).isJTI()); |
Anton Korobeynikov | 4e9dfe8 | 2008-06-28 11:07:54 +0000 | [diff] [blame] | 132 | } |
| 133 | |
Rafael Espindola | 3b2df10 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 134 | inline static bool isMem(const MachineInstr *MI, unsigned Op) { |
| 135 | if (MI->getOperand(Op).isFI()) return true; |
Manuel Jacob | dcb78db | 2014-03-18 16:14:11 +0000 | [diff] [blame] | 136 | return Op+X86::AddrNumOperands <= MI->getNumOperands() && |
| 137 | MI->getOperand(Op+X86::AddrSegmentReg).isReg() && |
Rafael Espindola | 3b2df10 | 2009-04-08 21:14:34 +0000 | [diff] [blame] | 138 | isLeaMem(MI, Op); |
| 139 | } |
| 140 | |
Craig Topper | ec82847 | 2014-03-31 06:53:13 +0000 | [diff] [blame] | 141 | class X86InstrInfo final : public X86GenInstrInfo { |
Eric Christopher | 6c786a1 | 2014-06-10 22:34:31 +0000 | [diff] [blame] | 142 | X86Subtarget &Subtarget; |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 143 | const X86RegisterInfo RI; |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 144 | |
Craig Topper | 9eadcfd | 2012-06-01 05:34:01 +0000 | [diff] [blame] | 145 | /// RegOp2MemOpTable3Addr, RegOp2MemOpTable0, RegOp2MemOpTable1, |
| 146 | /// RegOp2MemOpTable2, RegOp2MemOpTable3 - Load / store folding opcode maps. |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 147 | /// |
Bruno Cardoso Lopes | 23eb526 | 2011-09-08 18:35:57 +0000 | [diff] [blame] | 148 | typedef DenseMap<unsigned, |
| 149 | std::pair<unsigned, unsigned> > RegOp2MemOpTableType; |
| 150 | RegOp2MemOpTableType RegOp2MemOpTable2Addr; |
| 151 | RegOp2MemOpTableType RegOp2MemOpTable0; |
| 152 | RegOp2MemOpTableType RegOp2MemOpTable1; |
| 153 | RegOp2MemOpTableType RegOp2MemOpTable2; |
Elena Demikhovsky | 602f3a2 | 2012-05-31 09:20:20 +0000 | [diff] [blame] | 154 | RegOp2MemOpTableType RegOp2MemOpTable3; |
Robert Khasanov | 79fb729 | 2014-12-18 12:28:22 +0000 | [diff] [blame] | 155 | RegOp2MemOpTableType RegOp2MemOpTable4; |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 156 | |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 157 | /// MemOp2RegOpTable - Load / store unfolding opcode map. |
| 158 | /// |
Bruno Cardoso Lopes | 23eb526 | 2011-09-08 18:35:57 +0000 | [diff] [blame] | 159 | typedef DenseMap<unsigned, |
| 160 | std::pair<unsigned, unsigned> > MemOp2RegOpTableType; |
| 161 | MemOp2RegOpTableType MemOp2RegOpTable; |
| 162 | |
Craig Topper | d9c7d0d | 2012-06-23 04:58:41 +0000 | [diff] [blame] | 163 | static void AddTableEntry(RegOp2MemOpTableType &R2MTable, |
| 164 | MemOp2RegOpTableType &M2RTable, |
| 165 | unsigned RegOp, unsigned MemOp, unsigned Flags); |
Jakob Stoklund Olesen | 49e121d | 2010-03-25 17:25:00 +0000 | [diff] [blame] | 166 | |
Juergen Ributzka | d12ccbd | 2013-11-19 00:57:56 +0000 | [diff] [blame] | 167 | virtual void anchor(); |
| 168 | |
Sanjoy Das | 6b34a46 | 2015-06-15 18:44:21 +0000 | [diff] [blame] | 169 | bool AnalyzeBranchImpl(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, |
| 170 | MachineBasicBlock *&FBB, |
| 171 | SmallVectorImpl<MachineOperand> &Cond, |
| 172 | SmallVectorImpl<MachineInstr *> &CondBranches, |
| 173 | bool AllowModify) const; |
| 174 | |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 175 | public: |
Eric Christopher | 6c786a1 | 2014-06-10 22:34:31 +0000 | [diff] [blame] | 176 | explicit X86InstrInfo(X86Subtarget &STI); |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 177 | |
Chris Lattner | b4d58d7 | 2003-01-14 22:00:31 +0000 | [diff] [blame] | 178 | /// getRegisterInfo - TargetInstrInfo is a superset of MRegister info. As |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 179 | /// such, whenever a client has an instance of instruction info, it should |
| 180 | /// always be able to get register info as well (through this method). |
| 181 | /// |
Craig Topper | f5e3b0b | 2014-03-09 07:58:15 +0000 | [diff] [blame] | 182 | const X86RegisterInfo &getRegisterInfo() const { return RI; } |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 183 | |
Michael Kuperstein | 13fbd45 | 2015-02-01 16:56:04 +0000 | [diff] [blame] | 184 | /// getSPAdjust - This returns the stack pointer adjustment made by |
| 185 | /// this instruction. For x86, we need to handle more complex call |
| 186 | /// sequences involving PUSHes. |
| 187 | int getSPAdjust(const MachineInstr *MI) const override; |
| 188 | |
Evan Cheng | 30bebff | 2010-01-13 00:30:23 +0000 | [diff] [blame] | 189 | /// isCoalescableExtInstr - Return true if the instruction is a "coalescable" |
| 190 | /// extension instruction. That is, it's like a copy where it's legal for the |
| 191 | /// source to overlap the destination. e.g. X86::MOVSX64rr32. If this returns |
| 192 | /// true, then it's expected the pre-extension value is available as a subreg |
| 193 | /// of the result register. This also returns the sub-register index in |
| 194 | /// SubIdx. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 195 | bool isCoalescableExtInstr(const MachineInstr &MI, |
| 196 | unsigned &SrcReg, unsigned &DstReg, |
| 197 | unsigned &SubIdx) const override; |
Evan Cheng | 4216615 | 2010-01-12 00:09:37 +0000 | [diff] [blame] | 198 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 199 | unsigned isLoadFromStackSlot(const MachineInstr *MI, |
| 200 | int &FrameIndex) const override; |
David Greene | 2f4c374 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 201 | /// isLoadFromStackSlotPostFE - Check for post-frame ptr elimination |
| 202 | /// stack locations as well. This uses a heuristic so it isn't |
| 203 | /// reliable for correctness. |
| 204 | unsigned isLoadFromStackSlotPostFE(const MachineInstr *MI, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 205 | int &FrameIndex) const override; |
David Greene | 70fdd57 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 206 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 207 | unsigned isStoreToStackSlot(const MachineInstr *MI, |
| 208 | int &FrameIndex) const override; |
David Greene | 2f4c374 | 2009-11-13 00:29:53 +0000 | [diff] [blame] | 209 | /// isStoreToStackSlotPostFE - Check for post-frame ptr elimination |
| 210 | /// stack locations as well. This uses a heuristic so it isn't |
| 211 | /// reliable for correctness. |
| 212 | unsigned isStoreToStackSlotPostFE(const MachineInstr *MI, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 213 | int &FrameIndex) const override; |
Evan Cheng | ed6e34f | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 214 | |
Dan Gohman | e919de5 | 2009-10-10 00:34:18 +0000 | [diff] [blame] | 215 | bool isReallyTriviallyReMaterializable(const MachineInstr *MI, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 216 | AliasAnalysis *AA) const override; |
Evan Cheng | ed6e34f | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 217 | void reMaterialize(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, |
Evan Cheng | 8451744 | 2009-07-16 09:20:10 +0000 | [diff] [blame] | 218 | unsigned DestReg, unsigned SubIdx, |
Evan Cheng | 6ad7da9 | 2009-11-14 02:55:43 +0000 | [diff] [blame] | 219 | const MachineInstr *Orig, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 220 | const TargetRegisterInfo &TRI) const override; |
Evan Cheng | ed6e34f | 2008-03-31 20:40:39 +0000 | [diff] [blame] | 221 | |
Tim Northover | 6833e3f | 2013-06-10 20:43:49 +0000 | [diff] [blame] | 222 | /// Given an operand within a MachineInstr, insert preceding code to put it |
| 223 | /// into the right format for a particular kind of LEA instruction. This may |
| 224 | /// involve using an appropriate super-register instead (with an implicit use |
| 225 | /// of the original) or creating a new virtual register and inserting COPY |
| 226 | /// instructions to get the data into the right class. |
| 227 | /// |
| 228 | /// Reference parameters are set to indicate how caller should add this |
| 229 | /// operand to the LEA instruction. |
| 230 | bool classifyLEAReg(MachineInstr *MI, const MachineOperand &Src, |
| 231 | unsigned LEAOpcode, bool AllowSP, |
| 232 | unsigned &NewSrc, bool &isKill, |
| 233 | bool &isUndef, MachineOperand &ImplicitOp) const; |
| 234 | |
Chris Lattner | b7782d7 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 235 | /// convertToThreeAddress - This method must be implemented by targets that |
| 236 | /// set the M_CONVERTIBLE_TO_3_ADDR flag. When this flag is set, the target |
| 237 | /// may be able to convert a two-address instruction into a true |
| 238 | /// three-address instruction on demand. This allows the X86 target (for |
| 239 | /// example) to convert ADD and SHL instructions into LEA instructions if they |
| 240 | /// would require register copies due to two-addressness. |
| 241 | /// |
| 242 | /// This method returns a null pointer if the transformation cannot be |
| 243 | /// performed, otherwise it returns the new instruction. |
| 244 | /// |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 245 | MachineInstr *convertToThreeAddress(MachineFunction::iterator &MFI, |
| 246 | MachineBasicBlock::iterator &MBBI, |
| 247 | LiveVariables *LV) const override; |
Chris Lattner | b7782d7 | 2005-01-02 02:37:07 +0000 | [diff] [blame] | 248 | |
Andrew Kaylor | 16c4da0 | 2015-09-28 20:33:22 +0000 | [diff] [blame] | 249 | /// Returns true iff the routine could find two commutable operands in the |
| 250 | /// given machine instruction. |
| 251 | /// The 'SrcOpIdx1' and 'SrcOpIdx2' are INPUT and OUTPUT arguments. Their |
| 252 | /// input values can be re-defined in this method only if the input values |
| 253 | /// are not pre-defined, which is designated by the special value |
| 254 | /// 'CommuteAnyOperandIndex' assigned to it. |
| 255 | /// If both of indices are pre-defined and refer to some operands, then the |
| 256 | /// method simply returns true if the corresponding operands are commutable |
| 257 | /// and returns false otherwise. |
Chris Lattner | 2947801 | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 258 | /// |
Andrew Kaylor | 16c4da0 | 2015-09-28 20:33:22 +0000 | [diff] [blame] | 259 | /// For example, calling this method this way: |
| 260 | /// unsigned Op1 = 1, Op2 = CommuteAnyOperandIndex; |
| 261 | /// findCommutedOpIndices(MI, Op1, Op2); |
| 262 | /// can be interpreted as a query asking to find an operand that would be |
| 263 | /// commutable with the operand#1. |
Lang Hames | c59a2d0 | 2014-04-02 23:57:49 +0000 | [diff] [blame] | 264 | bool findCommutedOpIndices(MachineInstr *MI, unsigned &SrcOpIdx1, |
| 265 | unsigned &SrcOpIdx2) const override; |
| 266 | |
Andrew Kaylor | 4731bea | 2015-11-06 19:47:25 +0000 | [diff] [blame] | 267 | /// Returns true if the routine could find two commutable operands |
| 268 | /// in the given FMA instruction. Otherwise, returns false. |
| 269 | /// |
| 270 | /// \p SrcOpIdx1 and \p SrcOpIdx2 are INPUT and OUTPUT arguments. |
| 271 | /// The output indices of the commuted operands are returned in these |
| 272 | /// arguments. Also, the input values of these arguments may be preset either |
| 273 | /// to indices of operands that must be commuted or be equal to a special |
| 274 | /// value 'CommuteAnyOperandIndex' which means that the corresponding |
| 275 | /// operand index is not set and this method is free to pick any of |
| 276 | /// available commutable operands. |
| 277 | /// |
| 278 | /// For example, calling this method this way: |
| 279 | /// unsigned Idx1 = 1, Idx2 = CommuteAnyOperandIndex; |
| 280 | /// findFMA3CommutedOpIndices(MI, Idx1, Idx2); |
| 281 | /// can be interpreted as a query asking if the operand #1 can be swapped |
| 282 | /// with any other available operand (e.g. operand #2, operand #3, etc.). |
| 283 | /// |
| 284 | /// The returned FMA opcode may differ from the opcode in the given MI. |
| 285 | /// For example, commuting the operands #1 and #3 in the following FMA |
| 286 | /// FMA213 #1, #2, #3 |
| 287 | /// results into instruction with adjusted opcode: |
| 288 | /// FMA231 #3, #2, #1 |
| 289 | bool findFMA3CommutedOpIndices(MachineInstr *MI, |
| 290 | unsigned &SrcOpIdx1, |
| 291 | unsigned &SrcOpIdx2) const; |
| 292 | |
| 293 | /// Returns an adjusted FMA opcode that must be used in FMA instruction that |
| 294 | /// performs the same computations as the given MI but which has the operands |
| 295 | /// \p SrcOpIdx1 and \p SrcOpIdx2 commuted. |
| 296 | /// It may return 0 if it is unsafe to commute the operands. |
| 297 | /// |
| 298 | /// The returned FMA opcode may differ from the opcode in the given \p MI. |
| 299 | /// For example, commuting the operands #1 and #3 in the following FMA |
| 300 | /// FMA213 #1, #2, #3 |
| 301 | /// results into instruction with adjusted opcode: |
| 302 | /// FMA231 #3, #2, #1 |
| 303 | unsigned getFMA3OpcodeToCommuteOperands(MachineInstr *MI, |
| 304 | unsigned SrcOpIdx1, |
| 305 | unsigned SrcOpIdx2) const; |
| 306 | |
Chris Lattner | c0fb567 | 2006-10-20 17:42:20 +0000 | [diff] [blame] | 307 | // Branch analysis. |
Duncan P. N. Exon Smith | 6307eb5 | 2016-02-23 02:46:52 +0000 | [diff] [blame] | 308 | bool isUnpredicatedTerminator(const MachineInstr &MI) const override; |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 309 | bool AnalyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, |
| 310 | MachineBasicBlock *&FBB, |
| 311 | SmallVectorImpl<MachineOperand> &Cond, |
| 312 | bool AllowModify) const override; |
Sanjoy Das | b666ea3 | 2015-06-15 18:44:14 +0000 | [diff] [blame] | 313 | |
| 314 | bool getMemOpBaseRegImmOfs(MachineInstr *LdSt, unsigned &BaseReg, |
Chad Rosier | c27a18f | 2016-03-09 16:00:35 +0000 | [diff] [blame] | 315 | int64_t &Offset, |
Sanjoy Das | b666ea3 | 2015-06-15 18:44:14 +0000 | [diff] [blame] | 316 | const TargetRegisterInfo *TRI) const override; |
Sanjoy Das | 6b34a46 | 2015-06-15 18:44:21 +0000 | [diff] [blame] | 317 | bool AnalyzeBranchPredicate(MachineBasicBlock &MBB, |
| 318 | TargetInstrInfo::MachineBranchPredicate &MBP, |
| 319 | bool AllowModify = false) const override; |
| 320 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 321 | unsigned RemoveBranch(MachineBasicBlock &MBB) const override; |
| 322 | unsigned InsertBranch(MachineBasicBlock &MBB, MachineBasicBlock *TBB, |
Ahmed Bougacha | c88bf54 | 2015-06-11 19:30:37 +0000 | [diff] [blame] | 323 | MachineBasicBlock *FBB, ArrayRef<MachineOperand> Cond, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 324 | DebugLoc DL) const override; |
Ahmed Bougacha | c88bf54 | 2015-06-11 19:30:37 +0000 | [diff] [blame] | 325 | bool canInsertSelect(const MachineBasicBlock&, ArrayRef<MachineOperand> Cond, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 326 | unsigned, unsigned, int&, int&, int&) const override; |
| 327 | void insertSelect(MachineBasicBlock &MBB, |
| 328 | MachineBasicBlock::iterator MI, DebugLoc DL, |
Ahmed Bougacha | c88bf54 | 2015-06-11 19:30:37 +0000 | [diff] [blame] | 329 | unsigned DstReg, ArrayRef<MachineOperand> Cond, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 330 | unsigned TrueReg, unsigned FalseReg) const override; |
| 331 | void copyPhysReg(MachineBasicBlock &MBB, |
| 332 | MachineBasicBlock::iterator MI, DebugLoc DL, |
| 333 | unsigned DestReg, unsigned SrcReg, |
| 334 | bool KillSrc) const override; |
| 335 | void storeRegToStackSlot(MachineBasicBlock &MBB, |
| 336 | MachineBasicBlock::iterator MI, |
| 337 | unsigned SrcReg, bool isKill, int FrameIndex, |
| 338 | const TargetRegisterClass *RC, |
| 339 | const TargetRegisterInfo *TRI) const override; |
Owen Anderson | eee1460 | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 340 | |
Craig Topper | f5e3b0b | 2014-03-09 07:58:15 +0000 | [diff] [blame] | 341 | void storeRegToAddr(MachineFunction &MF, unsigned SrcReg, bool isKill, |
| 342 | SmallVectorImpl<MachineOperand> &Addr, |
| 343 | const TargetRegisterClass *RC, |
| 344 | MachineInstr::mmo_iterator MMOBegin, |
| 345 | MachineInstr::mmo_iterator MMOEnd, |
| 346 | SmallVectorImpl<MachineInstr*> &NewMIs) const; |
Owen Anderson | eee1460 | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 347 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 348 | void loadRegFromStackSlot(MachineBasicBlock &MBB, |
| 349 | MachineBasicBlock::iterator MI, |
| 350 | unsigned DestReg, int FrameIndex, |
| 351 | const TargetRegisterClass *RC, |
| 352 | const TargetRegisterInfo *TRI) const override; |
Owen Anderson | eee1460 | 2008-01-01 21:11:32 +0000 | [diff] [blame] | 353 | |
Craig Topper | f5e3b0b | 2014-03-09 07:58:15 +0000 | [diff] [blame] | 354 | void loadRegFromAddr(MachineFunction &MF, unsigned DestReg, |
| 355 | SmallVectorImpl<MachineOperand> &Addr, |
| 356 | const TargetRegisterClass *RC, |
| 357 | MachineInstr::mmo_iterator MMOBegin, |
| 358 | MachineInstr::mmo_iterator MMOEnd, |
| 359 | SmallVectorImpl<MachineInstr*> &NewMIs) const; |
Jakob Stoklund Olesen | dd1904e | 2011-09-29 05:10:54 +0000 | [diff] [blame] | 360 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 361 | bool expandPostRAPseudo(MachineBasicBlock::iterator MI) const override; |
Jakob Stoklund Olesen | dd1904e | 2011-09-29 05:10:54 +0000 | [diff] [blame] | 362 | |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 363 | /// foldMemoryOperand - If this target supports it, fold a load or store of |
| 364 | /// the specified stack slot into the specified machine instruction for the |
| 365 | /// specified operand(s). If this is possible, the target should perform the |
| 366 | /// folding and return true, otherwise it should return false. If it folds |
| 367 | /// the instruction, it is likely that the MachineInstruction the iterator |
| 368 | /// references has been changed. |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 369 | MachineInstr *foldMemoryOperandImpl(MachineFunction &MF, MachineInstr *MI, |
| 370 | ArrayRef<unsigned> Ops, |
Keno Fischer | e70b31f | 2015-06-08 20:09:58 +0000 | [diff] [blame] | 371 | MachineBasicBlock::iterator InsertPt, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 372 | int FrameIndex) const override; |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 373 | |
| 374 | /// foldMemoryOperand - Same as the previous version except it allows folding |
| 375 | /// of any load and store from / to any address, not just from a specific |
| 376 | /// stack slot. |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 377 | MachineInstr *foldMemoryOperandImpl(MachineFunction &MF, MachineInstr *MI, |
| 378 | ArrayRef<unsigned> Ops, |
Keno Fischer | e70b31f | 2015-06-08 20:09:58 +0000 | [diff] [blame] | 379 | MachineBasicBlock::iterator InsertPt, |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 380 | MachineInstr *LoadMI) const override; |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 381 | |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 382 | /// unfoldMemoryOperand - Separate a single instruction which folded a load or |
| 383 | /// a store or a load and a store into two or more instruction. If this is |
| 384 | /// possible, returns true as well as the new instructions by reference. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 385 | bool unfoldMemoryOperand(MachineFunction &MF, MachineInstr *MI, |
| 386 | unsigned Reg, bool UnfoldLoad, bool UnfoldStore, |
| 387 | SmallVectorImpl<MachineInstr*> &NewMIs) const override; |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 388 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 389 | bool unfoldMemoryOperand(SelectionDAG &DAG, SDNode *N, |
| 390 | SmallVectorImpl<SDNode*> &NewNodes) const override; |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 391 | |
| 392 | /// getOpcodeAfterMemoryUnfold - Returns the opcode of the would be new |
| 393 | /// instruction after load / store are unfolded from an instruction of the |
| 394 | /// specified opcode. It returns zero if the specified unfolding is not |
Dan Gohman | 49fa51d | 2009-10-30 22:18:41 +0000 | [diff] [blame] | 395 | /// possible. If LoadRegIndex is non-null, it is filled in with the operand |
| 396 | /// index of the operand which will hold the register holding the loaded |
| 397 | /// value. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 398 | unsigned getOpcodeAfterMemoryUnfold(unsigned Opc, |
| 399 | bool UnfoldLoad, bool UnfoldStore, |
Craig Topper | e73658d | 2014-04-28 04:05:08 +0000 | [diff] [blame] | 400 | unsigned *LoadRegIndex = nullptr) const override; |
Andrew Trick | 27c079e | 2011-03-05 06:31:54 +0000 | [diff] [blame] | 401 | |
Evan Cheng | 4f026f3 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 402 | /// areLoadsFromSameBasePtr - This is used by the pre-regalloc scheduler |
| 403 | /// to determine if two loads are loading from the same base address. It |
| 404 | /// should only return true if the base pointers are the same and the |
| 405 | /// only differences between the two addresses are the offset. It also returns |
| 406 | /// the offsets by reference. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 407 | bool areLoadsFromSameBasePtr(SDNode *Load1, SDNode *Load2, int64_t &Offset1, |
| 408 | int64_t &Offset2) const override; |
Evan Cheng | 4f026f3 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 409 | |
| 410 | /// shouldScheduleLoadsNear - This is a used by the pre-regalloc scheduler to |
Chris Lattner | 0ab5e2c | 2011-04-15 05:18:47 +0000 | [diff] [blame] | 411 | /// determine (in conjunction with areLoadsFromSameBasePtr) if two loads should |
Evan Cheng | 4f026f3 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 412 | /// be scheduled togther. On some targets if two loads are loading from |
| 413 | /// addresses in the same cache line, it's better if they are scheduled |
| 414 | /// together. This function takes two integers that represent the load offsets |
| 415 | /// from the common base address. It returns true if it decides it's desirable |
| 416 | /// to schedule the two loads together. "NumLoads" is the number of loads that |
| 417 | /// have already been scheduled after Load1. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 418 | bool shouldScheduleLoadsNear(SDNode *Load1, SDNode *Load2, |
| 419 | int64_t Offset1, int64_t Offset2, |
| 420 | unsigned NumLoads) const override; |
Evan Cheng | 4f026f3 | 2010-01-22 03:34:51 +0000 | [diff] [blame] | 421 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 422 | bool shouldScheduleAdjacent(MachineInstr* First, |
| 423 | MachineInstr *Second) const override; |
Andrew Trick | 47740de | 2013-06-23 09:00:28 +0000 | [diff] [blame] | 424 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 425 | void getNoopForMachoTarget(MCInst &NopInst) const override; |
Chris Lattner | 6a5e706 | 2010-04-26 23:37:21 +0000 | [diff] [blame] | 426 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 427 | bool |
| 428 | ReverseBranchCondition(SmallVectorImpl<MachineOperand> &Cond) const override; |
Chris Lattner | 2947801 | 2005-01-19 07:11:01 +0000 | [diff] [blame] | 429 | |
Evan Cheng | b5f0ec3 | 2009-02-06 17:17:30 +0000 | [diff] [blame] | 430 | /// isSafeToMoveRegClassDefs - Return true if it's safe to move a machine |
| 431 | /// instruction that defines the specified register class. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 432 | bool isSafeToMoveRegClassDefs(const TargetRegisterClass *RC) const override; |
Evan Cheng | f713722 | 2008-10-27 07:14:50 +0000 | [diff] [blame] | 433 | |
Alexey Volkov | 6226de6 | 2014-05-20 08:55:50 +0000 | [diff] [blame] | 434 | /// isSafeToClobberEFLAGS - Return true if it's safe insert an instruction tha |
| 435 | /// would clobber the EFLAGS condition register. Note the result may be |
| 436 | /// conservative. If it cannot definitely determine the safety after visiting |
| 437 | /// a few instructions in each direction it assumes it's not safe. |
| 438 | bool isSafeToClobberEFLAGS(MachineBasicBlock &MBB, |
| 439 | MachineBasicBlock::iterator I) const; |
| 440 | |
Andrew Kaylor | af083d4 | 2015-08-26 20:36:52 +0000 | [diff] [blame] | 441 | /// True if MI has a condition code def, e.g. EFLAGS, that is |
| 442 | /// not marked dead. |
| 443 | bool hasLiveCondCodeDef(MachineInstr *MI) const; |
| 444 | |
Dan Gohman | 6ebe734 | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 445 | /// getGlobalBaseReg - Return a virtual register initialized with the |
| 446 | /// the global base register value. Output instructions required to |
| 447 | /// initialize the register in the function entry block, if necessary. |
Dan Gohman | 2430073 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 448 | /// |
Dan Gohman | 6ebe734 | 2008-09-30 00:58:23 +0000 | [diff] [blame] | 449 | unsigned getGlobalBaseReg(MachineFunction *MF) const; |
Dan Gohman | 2430073 | 2008-09-23 18:22:58 +0000 | [diff] [blame] | 450 | |
Jakob Stoklund Olesen | b48c994 | 2011-09-27 22:57:18 +0000 | [diff] [blame] | 451 | std::pair<uint16_t, uint16_t> |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 452 | getExecutionDomain(const MachineInstr *MI) const override; |
Jakob Stoklund Olesen | b551aa4 | 2010-03-29 23:24:21 +0000 | [diff] [blame] | 453 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 454 | void setExecutionDomain(MachineInstr *MI, unsigned Domain) const override; |
Jakob Stoklund Olesen | 49e121d | 2010-03-25 17:25:00 +0000 | [diff] [blame] | 455 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 456 | unsigned |
| 457 | getPartialRegUpdateClearance(const MachineInstr *MI, unsigned OpNum, |
| 458 | const TargetRegisterInfo *TRI) const override; |
Andrew Trick | b6d56be | 2013-10-14 22:19:03 +0000 | [diff] [blame] | 459 | unsigned getUndefRegClearance(const MachineInstr *MI, unsigned &OpNum, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 460 | const TargetRegisterInfo *TRI) const override; |
Jakob Stoklund Olesen | f8ad336 | 2011-11-15 01:15:30 +0000 | [diff] [blame] | 461 | void breakPartialRegDependency(MachineBasicBlock::iterator MI, unsigned OpNum, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 462 | const TargetRegisterInfo *TRI) const override; |
Jakob Stoklund Olesen | f8ad336 | 2011-11-15 01:15:30 +0000 | [diff] [blame] | 463 | |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 464 | MachineInstr *foldMemoryOperandImpl(MachineFunction &MF, MachineInstr *MI, |
Chris Lattner | eeba0c7 | 2010-09-05 02:18:34 +0000 | [diff] [blame] | 465 | unsigned OpNum, |
Benjamin Kramer | f1362f6 | 2015-02-28 12:04:00 +0000 | [diff] [blame] | 466 | ArrayRef<MachineOperand> MOs, |
Keno Fischer | e70b31f | 2015-06-08 20:09:58 +0000 | [diff] [blame] | 467 | MachineBasicBlock::iterator InsertPt, |
Simon Pilgrim | 2f9548a | 2014-10-20 22:14:22 +0000 | [diff] [blame] | 468 | unsigned Size, unsigned Alignment, |
| 469 | bool AllowCommute) const; |
Evan Cheng | 63c7608 | 2010-10-19 18:58:51 +0000 | [diff] [blame] | 470 | |
Tom Roeder | 44cb65f | 2014-06-05 19:29:43 +0000 | [diff] [blame] | 471 | void |
| 472 | getUnconditionalBranch(MCInst &Branch, |
| 473 | const MCSymbolRefExpr *BranchTarget) const override; |
| 474 | |
| 475 | void getTrap(MCInst &MI) const override; |
| 476 | |
Tom Roeder | eb7a303 | 2014-11-11 21:08:02 +0000 | [diff] [blame] | 477 | unsigned getJumpInstrTableEntryBound() const override; |
| 478 | |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 479 | bool isHighLatencyDef(int opc) const override; |
Andrew Trick | 641e2d4 | 2011-03-05 08:00:22 +0000 | [diff] [blame] | 480 | |
Matthias Braun | 88e2131 | 2015-06-13 03:42:11 +0000 | [diff] [blame] | 481 | bool hasHighOperandLatency(const TargetSchedModel &SchedModel, |
Evan Cheng | 63c7608 | 2010-10-19 18:58:51 +0000 | [diff] [blame] | 482 | const MachineRegisterInfo *MRI, |
| 483 | const MachineInstr *DefMI, unsigned DefIdx, |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 484 | const MachineInstr *UseMI, |
| 485 | unsigned UseIdx) const override; |
Sanjay Patel | 08829ba | 2015-06-10 20:32:21 +0000 | [diff] [blame] | 486 | |
| 487 | bool useMachineCombiner() const override { |
| 488 | return true; |
| 489 | } |
Chad Rosier | 03a4730 | 2015-09-21 15:09:11 +0000 | [diff] [blame] | 490 | |
| 491 | bool isAssociativeAndCommutative(const MachineInstr &Inst) const override; |
| 492 | |
| 493 | bool hasReassociableOperands(const MachineInstr &Inst, |
| 494 | const MachineBasicBlock *MBB) const override; |
| 495 | |
| 496 | void setSpecialOperandAttr(MachineInstr &OldMI1, MachineInstr &OldMI2, |
| 497 | MachineInstr &NewMI1, |
| 498 | MachineInstr &NewMI2) const override; |
Sanjay Patel | 08829ba | 2015-06-10 20:32:21 +0000 | [diff] [blame] | 499 | |
Manman Ren | c965673 | 2012-07-06 17:36:20 +0000 | [diff] [blame] | 500 | /// analyzeCompare - For a comparison instruction, return the source registers |
| 501 | /// in SrcReg and SrcReg2 if having two register operands, and the value it |
| 502 | /// compares against in CmpValue. Return true if the comparison instruction |
| 503 | /// can be analyzed. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 504 | bool analyzeCompare(const MachineInstr *MI, unsigned &SrcReg, |
| 505 | unsigned &SrcReg2, int &CmpMask, |
| 506 | int &CmpValue) const override; |
Manman Ren | c965673 | 2012-07-06 17:36:20 +0000 | [diff] [blame] | 507 | |
| 508 | /// optimizeCompareInstr - Check if there exists an earlier instruction that |
| 509 | /// operates on the same source operands and sets flags in the same way as |
| 510 | /// Compare; remove Compare if possible. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 511 | bool optimizeCompareInstr(MachineInstr *CmpInstr, unsigned SrcReg, |
| 512 | unsigned SrcReg2, int CmpMask, int CmpValue, |
| 513 | const MachineRegisterInfo *MRI) const override; |
Manman Ren | c965673 | 2012-07-06 17:36:20 +0000 | [diff] [blame] | 514 | |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 515 | /// optimizeLoadInstr - Try to remove the load by folding it to a register |
| 516 | /// operand at the use. We fold the load instructions if and only if the |
Manman Ren | ba8122c | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 517 | /// def and use are in the same BB. We only look at one load and see |
| 518 | /// whether it can be folded into MI. FoldAsLoadDefReg is the virtual register |
| 519 | /// defined by the load we are trying to fold. DefMI returns the machine |
| 520 | /// instruction that defines FoldAsLoadDefReg, and the function returns |
| 521 | /// the machine instruction generated due to folding. |
Craig Topper | 2d9361e | 2014-03-09 07:44:38 +0000 | [diff] [blame] | 522 | MachineInstr* optimizeLoadInstr(MachineInstr *MI, |
| 523 | const MachineRegisterInfo *MRI, |
| 524 | unsigned &FoldAsLoadDefReg, |
| 525 | MachineInstr *&DefMI) const override; |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 526 | |
Alex Lorenz | 49873a8 | 2015-08-06 00:44:07 +0000 | [diff] [blame] | 527 | std::pair<unsigned, unsigned> |
| 528 | decomposeMachineOperandsTargetFlags(unsigned TF) const override; |
| 529 | |
| 530 | ArrayRef<std::pair<unsigned, const char *>> |
| 531 | getSerializableDirectMachineOperandTargetFlags() const override; |
| 532 | |
Andrew Kaylor | 16c4da0 | 2015-09-28 20:33:22 +0000 | [diff] [blame] | 533 | protected: |
| 534 | /// Commutes the operands in the given instruction by changing the operands |
| 535 | /// order and/or changing the instruction's opcode and/or the immediate value |
| 536 | /// operand. |
| 537 | /// |
| 538 | /// The arguments 'CommuteOpIdx1' and 'CommuteOpIdx2' specify the operands |
| 539 | /// to be commuted. |
| 540 | /// |
| 541 | /// Do not call this method for a non-commutable instruction or |
| 542 | /// non-commutable operands. |
| 543 | /// Even though the instruction is commutable, the method may still |
| 544 | /// fail to commute the operands, null pointer is returned in such cases. |
| 545 | MachineInstr *commuteInstructionImpl(MachineInstr *MI, bool NewMI, |
| 546 | unsigned CommuteOpIdx1, |
| 547 | unsigned CommuteOpIdx2) const override; |
| 548 | |
Owen Anderson | 2a3be7b | 2008-01-07 01:35:02 +0000 | [diff] [blame] | 549 | private: |
Evan Cheng | 766a73f | 2009-12-11 06:01:48 +0000 | [diff] [blame] | 550 | MachineInstr * convertToThreeAddressWithLEA(unsigned MIOpc, |
| 551 | MachineFunction::iterator &MFI, |
| 552 | MachineBasicBlock::iterator &MBBI, |
| 553 | LiveVariables *LV) const; |
| 554 | |
Simon Pilgrim | 7e6606f | 2015-11-04 20:48:09 +0000 | [diff] [blame] | 555 | /// Handles memory folding for special case instructions, for instance those |
| 556 | /// requiring custom manipulation of the address. |
| 557 | MachineInstr *foldMemoryOperandCustom(MachineFunction &MF, MachineInstr *MI, |
| 558 | unsigned OpNum, |
| 559 | ArrayRef<MachineOperand> MOs, |
| 560 | MachineBasicBlock::iterator InsertPt, |
| 561 | unsigned Size, unsigned Align) const; |
| 562 | |
David Greene | 70fdd57 | 2009-11-12 20:55:29 +0000 | [diff] [blame] | 563 | /// isFrameOperand - Return true and the FrameIndex if the specified |
| 564 | /// operand and follow operands form a reference to the stack frame. |
| 565 | bool isFrameOperand(const MachineInstr *MI, unsigned int Op, |
| 566 | int &FrameIndex) const; |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 567 | }; |
| 568 | |
Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 569 | } // End llvm namespace |
Brian Gaeke | 960707c | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 570 | |
Chris Lattner | d92fb00 | 2002-10-25 22:55:53 +0000 | [diff] [blame] | 571 | #endif |