Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1 | //===-- PeepholeOptimizer.cpp - Peephole Optimizations --------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // Perform peephole optimizations on the machine code: |
| 11 | // |
| 12 | // - Optimize Extensions |
| 13 | // |
| 14 | // Optimization of sign / zero extension instructions. It may be extended to |
| 15 | // handle other instructions with similar properties. |
| 16 | // |
| 17 | // On some targets, some instructions, e.g. X86 sign / zero extension, may |
| 18 | // leave the source value in the lower part of the result. This optimization |
| 19 | // will replace some uses of the pre-extension value with uses of the |
| 20 | // sub-register of the results. |
| 21 | // |
| 22 | // - Optimize Comparisons |
| 23 | // |
| 24 | // Optimization of comparison instructions. For instance, in this code: |
| 25 | // |
| 26 | // sub r1, 1 |
| 27 | // cmp r1, 0 |
| 28 | // bz L1 |
| 29 | // |
| 30 | // If the "sub" instruction all ready sets (or could be modified to set) the |
| 31 | // same flag that the "cmp" instruction sets and that "bz" uses, then we can |
| 32 | // eliminate the "cmp" instruction. |
Evan Cheng | e4b8ac9 | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 33 | // |
Manman Ren | dc8ad00 | 2012-05-11 01:30:47 +0000 | [diff] [blame] | 34 | // Another instance, in this code: |
| 35 | // |
| 36 | // sub r1, r3 | sub r1, imm |
| 37 | // cmp r3, r1 or cmp r1, r3 | cmp r1, imm |
| 38 | // bge L1 |
| 39 | // |
| 40 | // If the branch instruction can use flag from "sub", then we can replace |
| 41 | // "sub" with "subs" and eliminate the "cmp" instruction. |
| 42 | // |
Joel Jones | 24e440d | 2012-12-11 16:10:25 +0000 | [diff] [blame] | 43 | // - Optimize Loads: |
| 44 | // |
| 45 | // Loads that can be folded into a later instruction. A load is foldable |
| 46 | // if it loads to virtual registers and the virtual register defined has |
| 47 | // a single use. |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 48 | // |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 49 | // - Optimize Copies and Bitcast (more generally, target specific copies): |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 50 | // |
| 51 | // Rewrite copies and bitcasts to avoid cross register bank copies |
| 52 | // when possible. |
| 53 | // E.g., Consider the following example, where capital and lower |
| 54 | // letters denote different register file: |
| 55 | // b = copy A <-- cross-bank copy |
| 56 | // C = copy b <-- cross-bank copy |
| 57 | // => |
| 58 | // b = copy A <-- cross-bank copy |
| 59 | // C = copy A <-- same-bank copy |
| 60 | // |
| 61 | // E.g., for bitcast: |
| 62 | // b = bitcast A <-- cross-bank copy |
| 63 | // C = bitcast b <-- cross-bank copy |
| 64 | // => |
| 65 | // b = bitcast A <-- cross-bank copy |
| 66 | // C = copy A <-- same-bank copy |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 67 | //===----------------------------------------------------------------------===// |
| 68 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 69 | #include "llvm/CodeGen/Passes.h" |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 70 | #include "llvm/ADT/DenseMap.h" |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 71 | #include "llvm/ADT/SmallPtrSet.h" |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 72 | #include "llvm/ADT/SmallSet.h" |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 73 | #include "llvm/ADT/Statistic.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 74 | #include "llvm/CodeGen/MachineDominators.h" |
| 75 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
| 76 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
| 77 | #include "llvm/Support/CommandLine.h" |
Craig Topper | 588ceec | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 78 | #include "llvm/Support/Debug.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 79 | #include "llvm/Target/TargetInstrInfo.h" |
| 80 | #include "llvm/Target/TargetRegisterInfo.h" |
Eric Christopher | d913448 | 2014-08-04 21:25:23 +0000 | [diff] [blame] | 81 | #include "llvm/Target/TargetSubtargetInfo.h" |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 82 | #include <utility> |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 83 | using namespace llvm; |
| 84 | |
Chandler Carruth | 1b9dde0 | 2014-04-22 02:02:50 +0000 | [diff] [blame] | 85 | #define DEBUG_TYPE "peephole-opt" |
| 86 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 87 | // Optimize Extensions |
| 88 | static cl::opt<bool> |
| 89 | Aggressive("aggressive-ext-opt", cl::Hidden, |
| 90 | cl::desc("Aggressive extension optimization")); |
| 91 | |
Bill Wendling | c6627ee | 2010-11-01 20:41:43 +0000 | [diff] [blame] | 92 | static cl::opt<bool> |
| 93 | DisablePeephole("disable-peephole", cl::Hidden, cl::init(false), |
| 94 | cl::desc("Disable the peephole optimizer")); |
| 95 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 96 | static cl::opt<bool> |
Quentin Colombet | 6674b09 | 2014-08-21 22:23:52 +0000 | [diff] [blame] | 97 | DisableAdvCopyOpt("disable-adv-copy-opt", cl::Hidden, cl::init(false), |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 98 | cl::desc("Disable advanced copy optimization")); |
| 99 | |
Bill Wendling | 6628431 | 2010-08-27 20:39:09 +0000 | [diff] [blame] | 100 | STATISTIC(NumReuse, "Number of extension results reused"); |
Evan Cheng | e4b8ac9 | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 101 | STATISTIC(NumCmps, "Number of compares eliminated"); |
Lang Hames | 31bb57b | 2012-02-25 00:46:38 +0000 | [diff] [blame] | 102 | STATISTIC(NumImmFold, "Number of move immediate folded"); |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 103 | STATISTIC(NumLoadFold, "Number of loads folded"); |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 104 | STATISTIC(NumSelects, "Number of selects optimized"); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 105 | STATISTIC(NumUncoalescableCopies, "Number of uncoalescable copies optimized"); |
| 106 | STATISTIC(NumRewrittenCopies, "Number of copies rewritten"); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 107 | |
| 108 | namespace { |
| 109 | class PeepholeOptimizer : public MachineFunctionPass { |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 110 | MachineFunction *MF; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 111 | const TargetInstrInfo *TII; |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 112 | const TargetRegisterInfo *TRI; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 113 | MachineRegisterInfo *MRI; |
| 114 | MachineDominatorTree *DT; // Machine dominator tree |
| 115 | |
| 116 | public: |
| 117 | static char ID; // Pass identification |
Owen Anderson | 6c18d1a | 2010-10-19 17:21:58 +0000 | [diff] [blame] | 118 | PeepholeOptimizer() : MachineFunctionPass(ID) { |
| 119 | initializePeepholeOptimizerPass(*PassRegistry::getPassRegistry()); |
| 120 | } |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 121 | |
Craig Topper | 4584cd5 | 2014-03-07 09:26:03 +0000 | [diff] [blame] | 122 | bool runOnMachineFunction(MachineFunction &MF) override; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 123 | |
Craig Topper | 4584cd5 | 2014-03-07 09:26:03 +0000 | [diff] [blame] | 124 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 125 | AU.setPreservesCFG(); |
| 126 | MachineFunctionPass::getAnalysisUsage(AU); |
| 127 | if (Aggressive) { |
| 128 | AU.addRequired<MachineDominatorTree>(); |
| 129 | AU.addPreserved<MachineDominatorTree>(); |
| 130 | } |
| 131 | } |
| 132 | |
| 133 | private: |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 134 | bool optimizeCmpInstr(MachineInstr *MI, MachineBasicBlock *MBB); |
| 135 | bool optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB, |
Hans Wennborg | 97a59ae | 2014-08-11 13:52:46 +0000 | [diff] [blame] | 136 | SmallPtrSetImpl<MachineInstr*> &LocalMIs); |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 137 | bool optimizeSelect(MachineInstr *MI); |
Gerolf Hoflehner | a4c96d0 | 2014-10-14 23:07:53 +0000 | [diff] [blame^] | 138 | bool optimizeCondBranch(MachineInstr *MI); |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 139 | bool optimizeCopyOrBitcast(MachineInstr *MI); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 140 | bool optimizeCoalescableCopy(MachineInstr *MI); |
| 141 | bool optimizeUncoalescableCopy(MachineInstr *MI, |
| 142 | SmallPtrSetImpl<MachineInstr *> &LocalMIs); |
| 143 | bool findNextSource(unsigned &Reg, unsigned &SubReg); |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 144 | bool isMoveImmediate(MachineInstr *MI, |
| 145 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 146 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs); |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 147 | bool foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB, |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 148 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 149 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs); |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 150 | bool isLoadFoldable(MachineInstr *MI, |
| 151 | SmallSet<unsigned, 16> &FoldAsLoadDefCandidates); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 152 | |
| 153 | /// \brief Check whether \p MI is understood by the register coalescer |
| 154 | /// but may require some rewriting. |
| 155 | bool isCoalescableCopy(const MachineInstr &MI) { |
| 156 | // SubregToRegs are not interesting, because they are already register |
| 157 | // coalescer friendly. |
| 158 | return MI.isCopy() || (!DisableAdvCopyOpt && |
| 159 | (MI.isRegSequence() || MI.isInsertSubreg() || |
| 160 | MI.isExtractSubreg())); |
| 161 | } |
| 162 | |
| 163 | /// \brief Check whether \p MI is a copy like instruction that is |
| 164 | /// not recognized by the register coalescer. |
| 165 | bool isUncoalescableCopy(const MachineInstr &MI) { |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 166 | return MI.isBitcast() || |
| 167 | (!DisableAdvCopyOpt && |
| 168 | (MI.isRegSequenceLike() || MI.isInsertSubregLike() || |
| 169 | MI.isExtractSubregLike())); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 170 | } |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 171 | }; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 172 | |
| 173 | /// \brief Helper class to track the possible sources of a value defined by |
| 174 | /// a (chain of) copy related instructions. |
| 175 | /// Given a definition (instruction and definition index), this class |
| 176 | /// follows the use-def chain to find successive suitable sources. |
| 177 | /// The given source can be used to rewrite the definition into |
| 178 | /// def = COPY src. |
| 179 | /// |
| 180 | /// For instance, let us consider the following snippet: |
| 181 | /// v0 = |
| 182 | /// v2 = INSERT_SUBREG v1, v0, sub0 |
| 183 | /// def = COPY v2.sub0 |
| 184 | /// |
| 185 | /// Using a ValueTracker for def = COPY v2.sub0 will give the following |
| 186 | /// suitable sources: |
| 187 | /// v2.sub0 and v0. |
| 188 | /// Then, def can be rewritten into def = COPY v0. |
| 189 | class ValueTracker { |
| 190 | private: |
| 191 | /// The current point into the use-def chain. |
| 192 | const MachineInstr *Def; |
| 193 | /// The index of the definition in Def. |
| 194 | unsigned DefIdx; |
| 195 | /// The sub register index of the definition. |
| 196 | unsigned DefSubReg; |
| 197 | /// The register where the value can be found. |
| 198 | unsigned Reg; |
| 199 | /// Specifiy whether or not the value tracking looks through |
| 200 | /// complex instructions. When this is false, the value tracker |
| 201 | /// bails on everything that is not a copy or a bitcast. |
| 202 | /// |
| 203 | /// Note: This could have been implemented as a specialized version of |
| 204 | /// the ValueTracker class but that would have complicated the code of |
| 205 | /// the users of this class. |
| 206 | bool UseAdvancedTracking; |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 207 | /// MachineRegisterInfo used to perform tracking. |
| 208 | const MachineRegisterInfo &MRI; |
| 209 | /// Optional TargetInstrInfo used to perform some complex |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 210 | /// tracking. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 211 | const TargetInstrInfo *TII; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 212 | |
| 213 | /// \brief Dispatcher to the right underlying implementation of |
| 214 | /// getNextSource. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 215 | bool getNextSourceImpl(unsigned &SrcReg, unsigned &SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 216 | /// \brief Specialized version of getNextSource for Copy instructions. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 217 | bool getNextSourceFromCopy(unsigned &SrcReg, unsigned &SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 218 | /// \brief Specialized version of getNextSource for Bitcast instructions. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 219 | bool getNextSourceFromBitcast(unsigned &SrcReg, unsigned &SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 220 | /// \brief Specialized version of getNextSource for RegSequence |
| 221 | /// instructions. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 222 | bool getNextSourceFromRegSequence(unsigned &SrcReg, unsigned &SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 223 | /// \brief Specialized version of getNextSource for InsertSubreg |
| 224 | /// instructions. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 225 | bool getNextSourceFromInsertSubreg(unsigned &SrcReg, unsigned &SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 226 | /// \brief Specialized version of getNextSource for ExtractSubreg |
| 227 | /// instructions. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 228 | bool getNextSourceFromExtractSubreg(unsigned &SrcReg, unsigned &SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 229 | /// \brief Specialized version of getNextSource for SubregToReg |
| 230 | /// instructions. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 231 | bool getNextSourceFromSubregToReg(unsigned &SrcReg, unsigned &SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 232 | |
| 233 | public: |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 234 | /// \brief Create a ValueTracker instance for the value defined by \p Reg. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 235 | /// \p DefSubReg represents the sub register index the value tracker will |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 236 | /// track. It does not need to match the sub register index used in the |
| 237 | /// definition of \p Reg. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 238 | /// \p UseAdvancedTracking specifies whether or not the value tracker looks |
| 239 | /// through complex instructions. By default (false), it handles only copy |
| 240 | /// and bitcast instructions. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 241 | /// If \p Reg is a physical register, a value tracker constructed with |
| 242 | /// this constructor will not find any alternative source. |
| 243 | /// Indeed, when \p Reg is a physical register that constructor does not |
| 244 | /// know which definition of \p Reg it should track. |
| 245 | /// Use the next constructor to track a physical register. |
| 246 | ValueTracker(unsigned Reg, unsigned DefSubReg, |
| 247 | const MachineRegisterInfo &MRI, |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 248 | bool UseAdvancedTracking = false, |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 249 | const TargetInstrInfo *TII = nullptr) |
| 250 | : Def(nullptr), DefIdx(0), DefSubReg(DefSubReg), Reg(Reg), |
| 251 | UseAdvancedTracking(UseAdvancedTracking), MRI(MRI), TII(TII) { |
| 252 | if (!TargetRegisterInfo::isPhysicalRegister(Reg)) { |
| 253 | Def = MRI.getVRegDef(Reg); |
| 254 | DefIdx = MRI.def_begin(Reg).getOperandNo(); |
| 255 | } |
| 256 | } |
| 257 | |
| 258 | /// \brief Create a ValueTracker instance for the value defined by |
| 259 | /// the pair \p MI, \p DefIdx. |
| 260 | /// Unlike the other constructor, the value tracker produced by this one |
| 261 | /// may be able to find a new source when the definition is a physical |
| 262 | /// register. |
| 263 | /// This could be useful to rewrite target specific instructions into |
| 264 | /// generic copy instructions. |
| 265 | ValueTracker(const MachineInstr &MI, unsigned DefIdx, unsigned DefSubReg, |
| 266 | const MachineRegisterInfo &MRI, |
| 267 | bool UseAdvancedTracking = false, |
| 268 | const TargetInstrInfo *TII = nullptr) |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 269 | : Def(&MI), DefIdx(DefIdx), DefSubReg(DefSubReg), |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 270 | UseAdvancedTracking(UseAdvancedTracking), MRI(MRI), TII(TII) { |
| 271 | assert(DefIdx < Def->getDesc().getNumDefs() && |
| 272 | Def->getOperand(DefIdx).isReg() && "Invalid definition"); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 273 | Reg = Def->getOperand(DefIdx).getReg(); |
| 274 | } |
| 275 | |
| 276 | /// \brief Following the use-def chain, get the next available source |
| 277 | /// for the tracked value. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 278 | /// When the returned value is not nullptr, \p SrcReg gives the register |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 279 | /// that contain the tracked value. |
| 280 | /// \note The sub register index returned in \p SrcSubReg must be used |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 281 | /// on \p SrcReg to access the actual value. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 282 | /// \return Unless the returned value is nullptr (i.e., no source found), |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 283 | /// \p SrcReg gives the register of the next source used in the returned |
| 284 | /// instruction and \p SrcSubReg the sub-register index to be used on that |
| 285 | /// source to get the tracked value. When nullptr is returned, no |
| 286 | /// alternative source has been found. |
| 287 | const MachineInstr *getNextSource(unsigned &SrcReg, unsigned &SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 288 | |
| 289 | /// \brief Get the last register where the initial value can be found. |
| 290 | /// Initially this is the register of the definition. |
| 291 | /// Then, after each successful call to getNextSource, this is the |
| 292 | /// register of the last source. |
| 293 | unsigned getReg() const { return Reg; } |
| 294 | }; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 295 | } |
| 296 | |
| 297 | char PeepholeOptimizer::ID = 0; |
Andrew Trick | 1fa5bcb | 2012-02-08 21:23:13 +0000 | [diff] [blame] | 298 | char &llvm::PeepholeOptimizerID = PeepholeOptimizer::ID; |
Owen Anderson | 8ac477f | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 299 | INITIALIZE_PASS_BEGIN(PeepholeOptimizer, "peephole-opts", |
| 300 | "Peephole Optimizations", false, false) |
| 301 | INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree) |
| 302 | INITIALIZE_PASS_END(PeepholeOptimizer, "peephole-opts", |
Owen Anderson | df7a4f2 | 2010-10-07 22:25:06 +0000 | [diff] [blame] | 303 | "Peephole Optimizations", false, false) |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 304 | |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 305 | /// optimizeExtInstr - If instruction is a copy-like instruction, i.e. it reads |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 306 | /// a single register and writes a single register and it does not modify the |
| 307 | /// source, and if the source value is preserved as a sub-register of the |
| 308 | /// result, then replace all reachable uses of the source with the subreg of the |
| 309 | /// result. |
Andrew Trick | 9e76199 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 310 | /// |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 311 | /// Do not generate an EXTRACT that is used only in a debug use, as this changes |
| 312 | /// the code. Since this code does not currently share EXTRACTs, just ignore all |
| 313 | /// debug uses. |
| 314 | bool PeepholeOptimizer:: |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 315 | optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB, |
Hans Wennborg | 97a59ae | 2014-08-11 13:52:46 +0000 | [diff] [blame] | 316 | SmallPtrSetImpl<MachineInstr*> &LocalMIs) { |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 317 | unsigned SrcReg, DstReg, SubIdx; |
| 318 | if (!TII->isCoalescableExtInstr(*MI, SrcReg, DstReg, SubIdx)) |
| 319 | return false; |
Andrew Trick | 9e76199 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 320 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 321 | if (TargetRegisterInfo::isPhysicalRegister(DstReg) || |
| 322 | TargetRegisterInfo::isPhysicalRegister(SrcReg)) |
| 323 | return false; |
| 324 | |
Jakob Stoklund Olesen | 8eb9905 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 325 | if (MRI->hasOneNonDBGUse(SrcReg)) |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 326 | // No other uses. |
| 327 | return false; |
| 328 | |
Jakob Stoklund Olesen | 2f06a65 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 329 | // Ensure DstReg can get a register class that actually supports |
| 330 | // sub-registers. Don't change the class until we commit. |
| 331 | const TargetRegisterClass *DstRC = MRI->getRegClass(DstReg); |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 332 | DstRC = TRI->getSubClassWithSubReg(DstRC, SubIdx); |
Jakob Stoklund Olesen | 2f06a65 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 333 | if (!DstRC) |
| 334 | return false; |
| 335 | |
Jakob Stoklund Olesen | 0f855e4 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 336 | // The ext instr may be operating on a sub-register of SrcReg as well. |
| 337 | // PPC::EXTSW is a 32 -> 64-bit sign extension, but it reads a 64-bit |
| 338 | // register. |
| 339 | // If UseSrcSubIdx is Set, SubIdx also applies to SrcReg, and only uses of |
| 340 | // SrcReg:SubIdx should be replaced. |
Eric Christopher | d913448 | 2014-08-04 21:25:23 +0000 | [diff] [blame] | 341 | bool UseSrcSubIdx = |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 342 | TRI->getSubClassWithSubReg(MRI->getRegClass(SrcReg), SubIdx) != nullptr; |
Jakob Stoklund Olesen | 0f855e4 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 343 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 344 | // The source has other uses. See if we can replace the other uses with use of |
| 345 | // the result of the extension. |
| 346 | SmallPtrSet<MachineBasicBlock*, 4> ReachedBBs; |
Owen Anderson | b36376e | 2014-03-17 19:36:09 +0000 | [diff] [blame] | 347 | for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg)) |
| 348 | ReachedBBs.insert(UI.getParent()); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 349 | |
| 350 | // Uses that are in the same BB of uses of the result of the instruction. |
| 351 | SmallVector<MachineOperand*, 8> Uses; |
| 352 | |
| 353 | // Uses that the result of the instruction can reach. |
| 354 | SmallVector<MachineOperand*, 8> ExtendedUses; |
| 355 | |
| 356 | bool ExtendLife = true; |
Owen Anderson | b36376e | 2014-03-17 19:36:09 +0000 | [diff] [blame] | 357 | for (MachineOperand &UseMO : MRI->use_nodbg_operands(SrcReg)) { |
Owen Anderson | 16c6bf4 | 2014-03-13 23:12:04 +0000 | [diff] [blame] | 358 | MachineInstr *UseMI = UseMO.getParent(); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 359 | if (UseMI == MI) |
| 360 | continue; |
| 361 | |
| 362 | if (UseMI->isPHI()) { |
| 363 | ExtendLife = false; |
| 364 | continue; |
| 365 | } |
| 366 | |
Jakob Stoklund Olesen | 0f855e4 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 367 | // Only accept uses of SrcReg:SubIdx. |
| 368 | if (UseSrcSubIdx && UseMO.getSubReg() != SubIdx) |
| 369 | continue; |
| 370 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 371 | // It's an error to translate this: |
| 372 | // |
| 373 | // %reg1025 = <sext> %reg1024 |
| 374 | // ... |
| 375 | // %reg1026 = SUBREG_TO_REG 0, %reg1024, 4 |
| 376 | // |
| 377 | // into this: |
| 378 | // |
| 379 | // %reg1025 = <sext> %reg1024 |
| 380 | // ... |
| 381 | // %reg1027 = COPY %reg1025:4 |
| 382 | // %reg1026 = SUBREG_TO_REG 0, %reg1027, 4 |
| 383 | // |
| 384 | // The problem here is that SUBREG_TO_REG is there to assert that an |
| 385 | // implicit zext occurs. It doesn't insert a zext instruction. If we allow |
| 386 | // the COPY here, it will give us the value after the <sext>, not the |
| 387 | // original value of %reg1024 before <sext>. |
| 388 | if (UseMI->getOpcode() == TargetOpcode::SUBREG_TO_REG) |
| 389 | continue; |
| 390 | |
| 391 | MachineBasicBlock *UseMBB = UseMI->getParent(); |
| 392 | if (UseMBB == MBB) { |
| 393 | // Local uses that come after the extension. |
| 394 | if (!LocalMIs.count(UseMI)) |
| 395 | Uses.push_back(&UseMO); |
| 396 | } else if (ReachedBBs.count(UseMBB)) { |
| 397 | // Non-local uses where the result of the extension is used. Always |
| 398 | // replace these unless it's a PHI. |
| 399 | Uses.push_back(&UseMO); |
| 400 | } else if (Aggressive && DT->dominates(MBB, UseMBB)) { |
| 401 | // We may want to extend the live range of the extension result in order |
| 402 | // to replace these uses. |
| 403 | ExtendedUses.push_back(&UseMO); |
| 404 | } else { |
| 405 | // Both will be live out of the def MBB anyway. Don't extend live range of |
| 406 | // the extension result. |
| 407 | ExtendLife = false; |
| 408 | break; |
| 409 | } |
| 410 | } |
| 411 | |
| 412 | if (ExtendLife && !ExtendedUses.empty()) |
| 413 | // Extend the liveness of the extension result. |
| 414 | std::copy(ExtendedUses.begin(), ExtendedUses.end(), |
| 415 | std::back_inserter(Uses)); |
| 416 | |
| 417 | // Now replace all uses. |
| 418 | bool Changed = false; |
| 419 | if (!Uses.empty()) { |
| 420 | SmallPtrSet<MachineBasicBlock*, 4> PHIBBs; |
| 421 | |
| 422 | // Look for PHI uses of the extended result, we don't want to extend the |
| 423 | // liveness of a PHI input. It breaks all kinds of assumptions down |
| 424 | // stream. A PHI use is expected to be the kill of its source values. |
Owen Anderson | b36376e | 2014-03-17 19:36:09 +0000 | [diff] [blame] | 425 | for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg)) |
| 426 | if (UI.isPHI()) |
| 427 | PHIBBs.insert(UI.getParent()); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 428 | |
| 429 | const TargetRegisterClass *RC = MRI->getRegClass(SrcReg); |
| 430 | for (unsigned i = 0, e = Uses.size(); i != e; ++i) { |
| 431 | MachineOperand *UseMO = Uses[i]; |
| 432 | MachineInstr *UseMI = UseMO->getParent(); |
| 433 | MachineBasicBlock *UseMBB = UseMI->getParent(); |
| 434 | if (PHIBBs.count(UseMBB)) |
| 435 | continue; |
| 436 | |
Lang Hames | d5862ce | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 437 | // About to add uses of DstReg, clear DstReg's kill flags. |
Jakob Stoklund Olesen | 2f06a65 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 438 | if (!Changed) { |
Lang Hames | d5862ce | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 439 | MRI->clearKillFlags(DstReg); |
Jakob Stoklund Olesen | 2f06a65 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 440 | MRI->constrainRegClass(DstReg, DstRC); |
| 441 | } |
Lang Hames | d5862ce | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 442 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 443 | unsigned NewVR = MRI->createVirtualRegister(RC); |
Jakob Stoklund Olesen | 0f855e4 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 444 | MachineInstr *Copy = BuildMI(*UseMBB, UseMI, UseMI->getDebugLoc(), |
| 445 | TII->get(TargetOpcode::COPY), NewVR) |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 446 | .addReg(DstReg, 0, SubIdx); |
Jakob Stoklund Olesen | 0f855e4 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 447 | // SubIdx applies to both SrcReg and DstReg when UseSrcSubIdx is set. |
| 448 | if (UseSrcSubIdx) { |
| 449 | Copy->getOperand(0).setSubReg(SubIdx); |
| 450 | Copy->getOperand(0).setIsUndef(); |
| 451 | } |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 452 | UseMO->setReg(NewVR); |
| 453 | ++NumReuse; |
| 454 | Changed = true; |
| 455 | } |
| 456 | } |
| 457 | |
| 458 | return Changed; |
| 459 | } |
| 460 | |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 461 | /// optimizeCmpInstr - If the instruction is a compare and the previous |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 462 | /// instruction it's comparing against all ready sets (or could be modified to |
| 463 | /// set) the same flag as the compare, then we can remove the comparison and use |
| 464 | /// the flag from the previous instruction. |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 465 | bool PeepholeOptimizer::optimizeCmpInstr(MachineInstr *MI, |
Evan Cheng | e4b8ac9 | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 466 | MachineBasicBlock *MBB) { |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 467 | // If this instruction is a comparison against zero and isn't comparing a |
| 468 | // physical register, we can try to optimize it. |
Manman Ren | 6fa76dc | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 469 | unsigned SrcReg, SrcReg2; |
Gabor Greif | adbbb93 | 2010-09-21 12:01:15 +0000 | [diff] [blame] | 470 | int CmpMask, CmpValue; |
Manman Ren | 6fa76dc | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 471 | if (!TII->analyzeCompare(MI, SrcReg, SrcReg2, CmpMask, CmpValue) || |
| 472 | TargetRegisterInfo::isPhysicalRegister(SrcReg) || |
| 473 | (SrcReg2 != 0 && TargetRegisterInfo::isPhysicalRegister(SrcReg2))) |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 474 | return false; |
| 475 | |
Bill Wendling | 27dddd1 | 2010-09-11 00:13:50 +0000 | [diff] [blame] | 476 | // Attempt to optimize the comparison instruction. |
Manman Ren | 6fa76dc | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 477 | if (TII->optimizeCompareInstr(MI, SrcReg, SrcReg2, CmpMask, CmpValue, MRI)) { |
Evan Cheng | e4b8ac9 | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 478 | ++NumCmps; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 479 | return true; |
| 480 | } |
| 481 | |
| 482 | return false; |
| 483 | } |
| 484 | |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 485 | /// Optimize a select instruction. |
| 486 | bool PeepholeOptimizer::optimizeSelect(MachineInstr *MI) { |
| 487 | unsigned TrueOp = 0; |
| 488 | unsigned FalseOp = 0; |
| 489 | bool Optimizable = false; |
| 490 | SmallVector<MachineOperand, 4> Cond; |
| 491 | if (TII->analyzeSelect(MI, Cond, TrueOp, FalseOp, Optimizable)) |
| 492 | return false; |
| 493 | if (!Optimizable) |
| 494 | return false; |
| 495 | if (!TII->optimizeSelect(MI)) |
| 496 | return false; |
| 497 | MI->eraseFromParent(); |
| 498 | ++NumSelects; |
| 499 | return true; |
| 500 | } |
| 501 | |
Gerolf Hoflehner | a4c96d0 | 2014-10-14 23:07:53 +0000 | [diff] [blame^] | 502 | /// \brief Check if a simpler conditional branch can be |
| 503 | // generated |
| 504 | bool PeepholeOptimizer::optimizeCondBranch(MachineInstr *MI) { |
| 505 | return TII->optimizeCondBranch(MI); |
| 506 | } |
| 507 | |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 508 | /// \brief Check if the registers defined by the pair (RegisterClass, SubReg) |
| 509 | /// share the same register file. |
| 510 | static bool shareSameRegisterFile(const TargetRegisterInfo &TRI, |
| 511 | const TargetRegisterClass *DefRC, |
| 512 | unsigned DefSubReg, |
| 513 | const TargetRegisterClass *SrcRC, |
| 514 | unsigned SrcSubReg) { |
| 515 | // Same register class. |
| 516 | if (DefRC == SrcRC) |
| 517 | return true; |
| 518 | |
| 519 | // Both operands are sub registers. Check if they share a register class. |
| 520 | unsigned SrcIdx, DefIdx; |
| 521 | if (SrcSubReg && DefSubReg) |
| 522 | return TRI.getCommonSuperRegClass(SrcRC, SrcSubReg, DefRC, DefSubReg, |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 523 | SrcIdx, DefIdx) != nullptr; |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 524 | // At most one of the register is a sub register, make it Src to avoid |
| 525 | // duplicating the test. |
| 526 | if (!SrcSubReg) { |
| 527 | std::swap(DefSubReg, SrcSubReg); |
| 528 | std::swap(DefRC, SrcRC); |
| 529 | } |
| 530 | |
| 531 | // One of the register is a sub register, check if we can get a superclass. |
| 532 | if (SrcSubReg) |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 533 | return TRI.getMatchingSuperRegClass(SrcRC, DefRC, SrcSubReg) != nullptr; |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 534 | // Plain copy. |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 535 | return TRI.getCommonSubClass(DefRC, SrcRC) != nullptr; |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 536 | } |
| 537 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 538 | /// \brief Try to find the next source that share the same register file |
| 539 | /// for the value defined by \p Reg and \p SubReg. |
| 540 | /// When true is returned, \p Reg and \p SubReg are updated with the |
| 541 | /// register number and sub-register index of the new source. |
| 542 | /// \return False if no alternative sources are available. True otherwise. |
| 543 | bool PeepholeOptimizer::findNextSource(unsigned &Reg, unsigned &SubReg) { |
| 544 | // Do not try to find a new source for a physical register. |
| 545 | // So far we do not have any motivating example for doing that. |
| 546 | // Thus, instead of maintaining untested code, we will revisit that if |
| 547 | // that changes at some point. |
| 548 | if (TargetRegisterInfo::isPhysicalRegister(Reg)) |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 549 | return false; |
| 550 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 551 | const TargetRegisterClass *DefRC = MRI->getRegClass(Reg); |
| 552 | unsigned DefSubReg = SubReg; |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 553 | |
| 554 | unsigned Src; |
| 555 | unsigned SrcSubReg; |
| 556 | bool ShouldRewrite = false; |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 557 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 558 | // Follow the chain of copies until we reach the top of the use-def chain |
| 559 | // or find a more suitable source. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 560 | ValueTracker ValTracker(Reg, DefSubReg, *MRI, !DisableAdvCopyOpt, TII); |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 561 | do { |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 562 | unsigned CopySrcReg, CopySrcSubReg; |
| 563 | if (!ValTracker.getNextSource(CopySrcReg, CopySrcSubReg)) |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 564 | break; |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 565 | Src = CopySrcReg; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 566 | SrcSubReg = CopySrcSubReg; |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 567 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 568 | // Do not extend the live-ranges of physical registers as they add |
| 569 | // constraints to the register allocator. |
| 570 | // Moreover, if we want to extend the live-range of a physical register, |
| 571 | // unlike SSA virtual register, we will have to check that they are not |
| 572 | // redefine before the related use. |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 573 | if (TargetRegisterInfo::isPhysicalRegister(Src)) |
| 574 | break; |
| 575 | |
| 576 | const TargetRegisterClass *SrcRC = MRI->getRegClass(Src); |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 577 | |
| 578 | // If this source does not incur a cross register bank copy, use it. |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 579 | ShouldRewrite = shareSameRegisterFile(*TRI, DefRC, DefSubReg, SrcRC, |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 580 | SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 581 | } while (!ShouldRewrite); |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 582 | |
| 583 | // If we did not find a more suitable source, there is nothing to optimize. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 584 | if (!ShouldRewrite || Src == Reg) |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 585 | return false; |
| 586 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 587 | Reg = Src; |
| 588 | SubReg = SrcSubReg; |
| 589 | return true; |
| 590 | } |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 591 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 592 | namespace { |
| 593 | /// \brief Helper class to rewrite the arguments of a copy-like instruction. |
| 594 | class CopyRewriter { |
| 595 | protected: |
| 596 | /// The copy-like instruction. |
| 597 | MachineInstr &CopyLike; |
| 598 | /// The index of the source being rewritten. |
| 599 | unsigned CurrentSrcIdx; |
| 600 | |
| 601 | public: |
| 602 | CopyRewriter(MachineInstr &MI) : CopyLike(MI), CurrentSrcIdx(0) {} |
| 603 | |
| 604 | virtual ~CopyRewriter() {} |
| 605 | |
| 606 | /// \brief Get the next rewritable source (SrcReg, SrcSubReg) and |
| 607 | /// the related value that it affects (TrackReg, TrackSubReg). |
| 608 | /// A source is considered rewritable if its register class and the |
| 609 | /// register class of the related TrackReg may not be register |
| 610 | /// coalescer friendly. In other words, given a copy-like instruction |
| 611 | /// not all the arguments may be returned at rewritable source, since |
| 612 | /// some arguments are none to be register coalescer friendly. |
| 613 | /// |
| 614 | /// Each call of this method moves the current source to the next |
| 615 | /// rewritable source. |
| 616 | /// For instance, let CopyLike be the instruction to rewrite. |
| 617 | /// CopyLike has one definition and one source: |
| 618 | /// dst.dstSubIdx = CopyLike src.srcSubIdx. |
| 619 | /// |
| 620 | /// The first call will give the first rewritable source, i.e., |
| 621 | /// the only source this instruction has: |
| 622 | /// (SrcReg, SrcSubReg) = (src, srcSubIdx). |
| 623 | /// This source defines the whole definition, i.e., |
| 624 | /// (TrackReg, TrackSubReg) = (dst, dstSubIdx). |
| 625 | /// |
| 626 | /// The second and subsequent calls will return false, has there is only one |
| 627 | /// rewritable source. |
| 628 | /// |
| 629 | /// \return True if a rewritable source has been found, false otherwise. |
| 630 | /// The output arguments are valid if and only if true is returned. |
| 631 | virtual bool getNextRewritableSource(unsigned &SrcReg, unsigned &SrcSubReg, |
| 632 | unsigned &TrackReg, |
| 633 | unsigned &TrackSubReg) { |
| 634 | // If CurrentSrcIdx == 1, this means this function has already been |
| 635 | // called once. CopyLike has one defintiion and one argument, thus, |
| 636 | // there is nothing else to rewrite. |
| 637 | if (!CopyLike.isCopy() || CurrentSrcIdx == 1) |
| 638 | return false; |
| 639 | // This is the first call to getNextRewritableSource. |
| 640 | // Move the CurrentSrcIdx to remember that we made that call. |
| 641 | CurrentSrcIdx = 1; |
| 642 | // The rewritable source is the argument. |
| 643 | const MachineOperand &MOSrc = CopyLike.getOperand(1); |
| 644 | SrcReg = MOSrc.getReg(); |
| 645 | SrcSubReg = MOSrc.getSubReg(); |
| 646 | // What we track are the alternative sources of the definition. |
| 647 | const MachineOperand &MODef = CopyLike.getOperand(0); |
| 648 | TrackReg = MODef.getReg(); |
| 649 | TrackSubReg = MODef.getSubReg(); |
| 650 | return true; |
| 651 | } |
| 652 | |
| 653 | /// \brief Rewrite the current source with \p NewReg and \p NewSubReg |
| 654 | /// if possible. |
| 655 | /// \return True if the rewritting was possible, false otherwise. |
| 656 | virtual bool RewriteCurrentSource(unsigned NewReg, unsigned NewSubReg) { |
| 657 | if (!CopyLike.isCopy() || CurrentSrcIdx != 1) |
| 658 | return false; |
| 659 | MachineOperand &MOSrc = CopyLike.getOperand(CurrentSrcIdx); |
| 660 | MOSrc.setReg(NewReg); |
| 661 | MOSrc.setSubReg(NewSubReg); |
| 662 | return true; |
| 663 | } |
| 664 | }; |
| 665 | |
| 666 | /// \brief Specialized rewriter for INSERT_SUBREG instruction. |
| 667 | class InsertSubregRewriter : public CopyRewriter { |
| 668 | public: |
| 669 | InsertSubregRewriter(MachineInstr &MI) : CopyRewriter(MI) { |
| 670 | assert(MI.isInsertSubreg() && "Invalid instruction"); |
| 671 | } |
| 672 | |
| 673 | /// \brief See CopyRewriter::getNextRewritableSource. |
| 674 | /// Here CopyLike has the following form: |
| 675 | /// dst = INSERT_SUBREG Src1, Src2.src2SubIdx, subIdx. |
| 676 | /// Src1 has the same register class has dst, hence, there is |
| 677 | /// nothing to rewrite. |
| 678 | /// Src2.src2SubIdx, may not be register coalescer friendly. |
| 679 | /// Therefore, the first call to this method returns: |
| 680 | /// (SrcReg, SrcSubReg) = (Src2, src2SubIdx). |
| 681 | /// (TrackReg, TrackSubReg) = (dst, subIdx). |
| 682 | /// |
| 683 | /// Subsequence calls will return false. |
| 684 | bool getNextRewritableSource(unsigned &SrcReg, unsigned &SrcSubReg, |
| 685 | unsigned &TrackReg, |
| 686 | unsigned &TrackSubReg) override { |
| 687 | // If we already get the only source we can rewrite, return false. |
| 688 | if (CurrentSrcIdx == 2) |
| 689 | return false; |
| 690 | // We are looking at v2 = INSERT_SUBREG v0, v1, sub0. |
| 691 | CurrentSrcIdx = 2; |
| 692 | const MachineOperand &MOInsertedReg = CopyLike.getOperand(2); |
| 693 | SrcReg = MOInsertedReg.getReg(); |
| 694 | SrcSubReg = MOInsertedReg.getSubReg(); |
| 695 | const MachineOperand &MODef = CopyLike.getOperand(0); |
| 696 | |
| 697 | // We want to track something that is compatible with the |
| 698 | // partial definition. |
| 699 | TrackReg = MODef.getReg(); |
| 700 | if (MODef.getSubReg()) |
| 701 | // Bails if we have to compose sub-register indices. |
| 702 | return false; |
| 703 | TrackSubReg = (unsigned)CopyLike.getOperand(3).getImm(); |
| 704 | return true; |
| 705 | } |
| 706 | bool RewriteCurrentSource(unsigned NewReg, unsigned NewSubReg) override { |
| 707 | if (CurrentSrcIdx != 2) |
| 708 | return false; |
| 709 | // We are rewriting the inserted reg. |
| 710 | MachineOperand &MO = CopyLike.getOperand(CurrentSrcIdx); |
| 711 | MO.setReg(NewReg); |
| 712 | MO.setSubReg(NewSubReg); |
| 713 | return true; |
| 714 | } |
| 715 | }; |
| 716 | |
| 717 | /// \brief Specialized rewriter for EXTRACT_SUBREG instruction. |
| 718 | class ExtractSubregRewriter : public CopyRewriter { |
| 719 | const TargetInstrInfo &TII; |
| 720 | |
| 721 | public: |
| 722 | ExtractSubregRewriter(MachineInstr &MI, const TargetInstrInfo &TII) |
| 723 | : CopyRewriter(MI), TII(TII) { |
| 724 | assert(MI.isExtractSubreg() && "Invalid instruction"); |
| 725 | } |
| 726 | |
| 727 | /// \brief See CopyRewriter::getNextRewritableSource. |
| 728 | /// Here CopyLike has the following form: |
| 729 | /// dst.dstSubIdx = EXTRACT_SUBREG Src, subIdx. |
| 730 | /// There is only one rewritable source: Src.subIdx, |
| 731 | /// which defines dst.dstSubIdx. |
| 732 | bool getNextRewritableSource(unsigned &SrcReg, unsigned &SrcSubReg, |
| 733 | unsigned &TrackReg, |
| 734 | unsigned &TrackSubReg) override { |
| 735 | // If we already get the only source we can rewrite, return false. |
| 736 | if (CurrentSrcIdx == 1) |
| 737 | return false; |
| 738 | // We are looking at v1 = EXTRACT_SUBREG v0, sub0. |
| 739 | CurrentSrcIdx = 1; |
| 740 | const MachineOperand &MOExtractedReg = CopyLike.getOperand(1); |
| 741 | SrcReg = MOExtractedReg.getReg(); |
| 742 | // If we have to compose sub-register indices, bails out. |
| 743 | if (MOExtractedReg.getSubReg()) |
| 744 | return false; |
| 745 | |
| 746 | SrcSubReg = CopyLike.getOperand(2).getImm(); |
| 747 | |
| 748 | // We want to track something that is compatible with the definition. |
| 749 | const MachineOperand &MODef = CopyLike.getOperand(0); |
| 750 | TrackReg = MODef.getReg(); |
| 751 | TrackSubReg = MODef.getSubReg(); |
| 752 | return true; |
| 753 | } |
| 754 | |
| 755 | bool RewriteCurrentSource(unsigned NewReg, unsigned NewSubReg) override { |
| 756 | // The only source we can rewrite is the input register. |
| 757 | if (CurrentSrcIdx != 1) |
| 758 | return false; |
| 759 | |
| 760 | CopyLike.getOperand(CurrentSrcIdx).setReg(NewReg); |
| 761 | |
| 762 | // If we find a source that does not require to extract something, |
| 763 | // rewrite the operation with a copy. |
| 764 | if (!NewSubReg) { |
| 765 | // Move the current index to an invalid position. |
| 766 | // We do not want another call to this method to be able |
| 767 | // to do any change. |
| 768 | CurrentSrcIdx = -1; |
| 769 | // Rewrite the operation as a COPY. |
| 770 | // Get rid of the sub-register index. |
| 771 | CopyLike.RemoveOperand(2); |
| 772 | // Morph the operation into a COPY. |
| 773 | CopyLike.setDesc(TII.get(TargetOpcode::COPY)); |
| 774 | return true; |
| 775 | } |
| 776 | CopyLike.getOperand(CurrentSrcIdx + 1).setImm(NewSubReg); |
| 777 | return true; |
| 778 | } |
| 779 | }; |
| 780 | |
| 781 | /// \brief Specialized rewriter for REG_SEQUENCE instruction. |
| 782 | class RegSequenceRewriter : public CopyRewriter { |
| 783 | public: |
| 784 | RegSequenceRewriter(MachineInstr &MI) : CopyRewriter(MI) { |
| 785 | assert(MI.isRegSequence() && "Invalid instruction"); |
| 786 | } |
| 787 | |
| 788 | /// \brief See CopyRewriter::getNextRewritableSource. |
| 789 | /// Here CopyLike has the following form: |
| 790 | /// dst = REG_SEQUENCE Src1.src1SubIdx, subIdx1, Src2.src2SubIdx, subIdx2. |
| 791 | /// Each call will return a different source, walking all the available |
| 792 | /// source. |
| 793 | /// |
| 794 | /// The first call returns: |
| 795 | /// (SrcReg, SrcSubReg) = (Src1, src1SubIdx). |
| 796 | /// (TrackReg, TrackSubReg) = (dst, subIdx1). |
| 797 | /// |
| 798 | /// The second call returns: |
| 799 | /// (SrcReg, SrcSubReg) = (Src2, src2SubIdx). |
| 800 | /// (TrackReg, TrackSubReg) = (dst, subIdx2). |
| 801 | /// |
| 802 | /// And so on, until all the sources have been traversed, then |
| 803 | /// it returns false. |
| 804 | bool getNextRewritableSource(unsigned &SrcReg, unsigned &SrcSubReg, |
| 805 | unsigned &TrackReg, |
| 806 | unsigned &TrackSubReg) override { |
| 807 | // We are looking at v0 = REG_SEQUENCE v1, sub1, v2, sub2, etc. |
| 808 | |
| 809 | // If this is the first call, move to the first argument. |
| 810 | if (CurrentSrcIdx == 0) { |
| 811 | CurrentSrcIdx = 1; |
| 812 | } else { |
| 813 | // Otherwise, move to the next argument and check that it is valid. |
| 814 | CurrentSrcIdx += 2; |
| 815 | if (CurrentSrcIdx >= CopyLike.getNumOperands()) |
| 816 | return false; |
| 817 | } |
| 818 | const MachineOperand &MOInsertedReg = CopyLike.getOperand(CurrentSrcIdx); |
| 819 | SrcReg = MOInsertedReg.getReg(); |
| 820 | // If we have to compose sub-register indices, bails out. |
| 821 | if ((SrcSubReg = MOInsertedReg.getSubReg())) |
| 822 | return false; |
| 823 | |
| 824 | // We want to track something that is compatible with the related |
| 825 | // partial definition. |
| 826 | TrackSubReg = CopyLike.getOperand(CurrentSrcIdx + 1).getImm(); |
| 827 | |
| 828 | const MachineOperand &MODef = CopyLike.getOperand(0); |
| 829 | TrackReg = MODef.getReg(); |
| 830 | // If we have to compose sub-registers, bails. |
| 831 | return MODef.getSubReg() == 0; |
| 832 | } |
| 833 | |
| 834 | bool RewriteCurrentSource(unsigned NewReg, unsigned NewSubReg) override { |
| 835 | // We cannot rewrite out of bound operands. |
| 836 | // Moreover, rewritable sources are at odd positions. |
| 837 | if ((CurrentSrcIdx & 1) != 1 || CurrentSrcIdx > CopyLike.getNumOperands()) |
| 838 | return false; |
| 839 | |
| 840 | MachineOperand &MO = CopyLike.getOperand(CurrentSrcIdx); |
| 841 | MO.setReg(NewReg); |
| 842 | MO.setSubReg(NewSubReg); |
| 843 | return true; |
| 844 | } |
| 845 | }; |
| 846 | } // End namespace. |
| 847 | |
| 848 | /// \brief Get the appropriated CopyRewriter for \p MI. |
| 849 | /// \return A pointer to a dynamically allocated CopyRewriter or nullptr |
| 850 | /// if no rewriter works for \p MI. |
| 851 | static CopyRewriter *getCopyRewriter(MachineInstr &MI, |
| 852 | const TargetInstrInfo &TII) { |
| 853 | switch (MI.getOpcode()) { |
| 854 | default: |
| 855 | return nullptr; |
| 856 | case TargetOpcode::COPY: |
| 857 | return new CopyRewriter(MI); |
| 858 | case TargetOpcode::INSERT_SUBREG: |
| 859 | return new InsertSubregRewriter(MI); |
| 860 | case TargetOpcode::EXTRACT_SUBREG: |
| 861 | return new ExtractSubregRewriter(MI, TII); |
| 862 | case TargetOpcode::REG_SEQUENCE: |
| 863 | return new RegSequenceRewriter(MI); |
| 864 | } |
| 865 | llvm_unreachable(nullptr); |
| 866 | } |
| 867 | |
| 868 | /// \brief Optimize generic copy instructions to avoid cross |
| 869 | /// register bank copy. The optimization looks through a chain of |
| 870 | /// copies and tries to find a source that has a compatible register |
| 871 | /// class. |
| 872 | /// Two register classes are considered to be compatible if they share |
| 873 | /// the same register bank. |
| 874 | /// New copies issued by this optimization are register allocator |
| 875 | /// friendly. This optimization does not remove any copy as it may |
| 876 | /// overconstraint the register allocator, but replaces some operands |
| 877 | /// when possible. |
| 878 | /// \pre isCoalescableCopy(*MI) is true. |
| 879 | /// \return True, when \p MI has been rewritten. False otherwise. |
| 880 | bool PeepholeOptimizer::optimizeCoalescableCopy(MachineInstr *MI) { |
| 881 | assert(MI && isCoalescableCopy(*MI) && "Invalid argument"); |
| 882 | assert(MI->getDesc().getNumDefs() == 1 && |
| 883 | "Coalescer can understand multiple defs?!"); |
| 884 | const MachineOperand &MODef = MI->getOperand(0); |
| 885 | // Do not rewrite physical definitions. |
| 886 | if (TargetRegisterInfo::isPhysicalRegister(MODef.getReg())) |
| 887 | return false; |
| 888 | |
| 889 | bool Changed = false; |
| 890 | // Get the right rewriter for the current copy. |
| 891 | std::unique_ptr<CopyRewriter> CpyRewriter(getCopyRewriter(*MI, *TII)); |
| 892 | // If none exists, bails out. |
| 893 | if (!CpyRewriter) |
| 894 | return false; |
| 895 | // Rewrite each rewritable source. |
| 896 | unsigned SrcReg, SrcSubReg, TrackReg, TrackSubReg; |
| 897 | while (CpyRewriter->getNextRewritableSource(SrcReg, SrcSubReg, TrackReg, |
| 898 | TrackSubReg)) { |
| 899 | unsigned NewSrc = TrackReg; |
| 900 | unsigned NewSubReg = TrackSubReg; |
| 901 | // Try to find a more suitable source. |
| 902 | // If we failed to do so, or get the actual source, |
| 903 | // move to the next source. |
| 904 | if (!findNextSource(NewSrc, NewSubReg) || SrcReg == NewSrc) |
| 905 | continue; |
| 906 | // Rewrite source. |
Quentin Colombet | 6b36337 | 2014-08-21 21:34:06 +0000 | [diff] [blame] | 907 | if (CpyRewriter->RewriteCurrentSource(NewSrc, NewSubReg)) { |
| 908 | // We may have extended the live-range of NewSrc, account for that. |
| 909 | MRI->clearKillFlags(NewSrc); |
| 910 | Changed = true; |
| 911 | } |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 912 | } |
| 913 | // TODO: We could have a clean-up method to tidy the instruction. |
| 914 | // E.g., v0 = INSERT_SUBREG v1, v1.sub0, sub0 |
| 915 | // => v0 = COPY v1 |
| 916 | // Currently we haven't seen motivating example for that and we |
| 917 | // want to avoid untested code. |
| 918 | NumRewrittenCopies += Changed == true; |
| 919 | return Changed; |
| 920 | } |
| 921 | |
| 922 | /// \brief Optimize copy-like instructions to create |
| 923 | /// register coalescer friendly instruction. |
| 924 | /// The optimization tries to kill-off the \p MI by looking |
| 925 | /// through a chain of copies to find a source that has a compatible |
| 926 | /// register class. |
| 927 | /// If such a source is found, it replace \p MI by a generic COPY |
| 928 | /// operation. |
| 929 | /// \pre isUncoalescableCopy(*MI) is true. |
| 930 | /// \return True, when \p MI has been optimized. In that case, \p MI has |
| 931 | /// been removed from its parent. |
| 932 | /// All COPY instructions created, are inserted in \p LocalMIs. |
| 933 | bool PeepholeOptimizer::optimizeUncoalescableCopy( |
| 934 | MachineInstr *MI, SmallPtrSetImpl<MachineInstr *> &LocalMIs) { |
| 935 | assert(MI && isUncoalescableCopy(*MI) && "Invalid argument"); |
| 936 | |
| 937 | // Check if we can rewrite all the values defined by this instruction. |
| 938 | SmallVector< |
| 939 | std::pair<TargetInstrInfo::RegSubRegPair, TargetInstrInfo::RegSubRegPair>, |
| 940 | 4> RewritePairs; |
| 941 | for (const MachineOperand &MODef : MI->defs()) { |
| 942 | if (MODef.isDead()) |
| 943 | // We can ignore those. |
| 944 | continue; |
| 945 | |
| 946 | // If a physical register is here, this is probably for a good reason. |
| 947 | // Do not rewrite that. |
| 948 | if (TargetRegisterInfo::isPhysicalRegister(MODef.getReg())) |
| 949 | return false; |
| 950 | |
| 951 | // If we do not know how to rewrite this definition, there is no point |
| 952 | // in trying to kill this instruction. |
| 953 | TargetInstrInfo::RegSubRegPair Def(MODef.getReg(), MODef.getSubReg()); |
| 954 | TargetInstrInfo::RegSubRegPair Src = Def; |
| 955 | if (!findNextSource(Src.Reg, Src.SubReg)) |
| 956 | return false; |
| 957 | RewritePairs.push_back(std::make_pair(Def, Src)); |
| 958 | } |
| 959 | // The change is possible for all defs, do it. |
| 960 | for (const auto &PairDefSrc : RewritePairs) { |
| 961 | const auto &Def = PairDefSrc.first; |
| 962 | const auto &Src = PairDefSrc.second; |
| 963 | // Rewrite the "copy" in a way the register coalescer understands. |
| 964 | assert(!TargetRegisterInfo::isPhysicalRegister(Def.Reg) && |
| 965 | "We do not rewrite physical registers"); |
| 966 | const TargetRegisterClass *DefRC = MRI->getRegClass(Def.Reg); |
| 967 | unsigned NewVR = MRI->createVirtualRegister(DefRC); |
| 968 | MachineInstr *NewCopy = BuildMI(*MI->getParent(), MI, MI->getDebugLoc(), |
| 969 | TII->get(TargetOpcode::COPY), |
| 970 | NewVR).addReg(Src.Reg, 0, Src.SubReg); |
| 971 | NewCopy->getOperand(0).setSubReg(Def.SubReg); |
| 972 | if (Def.SubReg) |
| 973 | NewCopy->getOperand(0).setIsUndef(); |
| 974 | LocalMIs.insert(NewCopy); |
| 975 | MRI->replaceRegWith(Def.Reg, NewVR); |
| 976 | MRI->clearKillFlags(NewVR); |
| 977 | // We extended the lifetime of Src. |
| 978 | // Clear the kill flags to account for that. |
| 979 | MRI->clearKillFlags(Src.Reg); |
| 980 | } |
| 981 | // MI is now dead. |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 982 | MI->eraseFromParent(); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 983 | ++NumUncoalescableCopies; |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 984 | return true; |
| 985 | } |
| 986 | |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 987 | /// isLoadFoldable - Check whether MI is a candidate for folding into a later |
| 988 | /// instruction. We only fold loads to virtual registers and the virtual |
| 989 | /// register defined has a single use. |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 990 | bool PeepholeOptimizer::isLoadFoldable( |
| 991 | MachineInstr *MI, |
| 992 | SmallSet<unsigned, 16> &FoldAsLoadDefCandidates) { |
Manman Ren | ba8122c | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 993 | if (!MI->canFoldAsLoad() || !MI->mayLoad()) |
| 994 | return false; |
| 995 | const MCInstrDesc &MCID = MI->getDesc(); |
| 996 | if (MCID.getNumDefs() != 1) |
| 997 | return false; |
| 998 | |
| 999 | unsigned Reg = MI->getOperand(0).getReg(); |
Ekaterina Romanova | 8d62008 | 2014-03-13 18:47:12 +0000 | [diff] [blame] | 1000 | // To reduce compilation time, we check MRI->hasOneNonDBGUse when inserting |
Manman Ren | ba8122c | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 1001 | // loads. It should be checked when processing uses of the load, since |
| 1002 | // uses can be removed during peephole. |
| 1003 | if (!MI->getOperand(0).getSubReg() && |
| 1004 | TargetRegisterInfo::isVirtualRegister(Reg) && |
Ekaterina Romanova | 8d62008 | 2014-03-13 18:47:12 +0000 | [diff] [blame] | 1005 | MRI->hasOneNonDBGUse(Reg)) { |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1006 | FoldAsLoadDefCandidates.insert(Reg); |
Manman Ren | ba8122c | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 1007 | return true; |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 1008 | } |
| 1009 | return false; |
| 1010 | } |
| 1011 | |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1012 | bool PeepholeOptimizer::isMoveImmediate(MachineInstr *MI, |
| 1013 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 1014 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs) { |
Evan Cheng | 6cc775f | 2011-06-28 19:10:37 +0000 | [diff] [blame] | 1015 | const MCInstrDesc &MCID = MI->getDesc(); |
Evan Cheng | 7f8e563 | 2011-12-07 07:15:52 +0000 | [diff] [blame] | 1016 | if (!MI->isMoveImmediate()) |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1017 | return false; |
Evan Cheng | 6cc775f | 2011-06-28 19:10:37 +0000 | [diff] [blame] | 1018 | if (MCID.getNumDefs() != 1) |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1019 | return false; |
| 1020 | unsigned Reg = MI->getOperand(0).getReg(); |
| 1021 | if (TargetRegisterInfo::isVirtualRegister(Reg)) { |
| 1022 | ImmDefMIs.insert(std::make_pair(Reg, MI)); |
| 1023 | ImmDefRegs.insert(Reg); |
| 1024 | return true; |
| 1025 | } |
Andrew Trick | 9e76199 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 1026 | |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1027 | return false; |
| 1028 | } |
| 1029 | |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 1030 | /// foldImmediate - Try folding register operands that are defined by move |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1031 | /// immediate instructions, i.e. a trivial constant folding optimization, if |
| 1032 | /// and only if the def and use are in the same BB. |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 1033 | bool PeepholeOptimizer::foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB, |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1034 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 1035 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs) { |
| 1036 | for (unsigned i = 0, e = MI->getDesc().getNumOperands(); i != e; ++i) { |
| 1037 | MachineOperand &MO = MI->getOperand(i); |
| 1038 | if (!MO.isReg() || MO.isDef()) |
| 1039 | continue; |
| 1040 | unsigned Reg = MO.getReg(); |
Jakob Stoklund Olesen | 2fb5b31 | 2011-01-10 02:58:51 +0000 | [diff] [blame] | 1041 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1042 | continue; |
| 1043 | if (ImmDefRegs.count(Reg) == 0) |
| 1044 | continue; |
| 1045 | DenseMap<unsigned, MachineInstr*>::iterator II = ImmDefMIs.find(Reg); |
| 1046 | assert(II != ImmDefMIs.end()); |
| 1047 | if (TII->FoldImmediate(MI, II->second, Reg, MRI)) { |
| 1048 | ++NumImmFold; |
| 1049 | return true; |
| 1050 | } |
| 1051 | } |
| 1052 | return false; |
| 1053 | } |
| 1054 | |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 1055 | bool PeepholeOptimizer::runOnMachineFunction(MachineFunction &mf) { |
| 1056 | if (skipOptnoneFunction(*mf.getFunction())) |
Paul Robinson | 7c99ec5 | 2014-03-31 17:43:35 +0000 | [diff] [blame] | 1057 | return false; |
| 1058 | |
Craig Topper | 588ceec | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 1059 | DEBUG(dbgs() << "********** PEEPHOLE OPTIMIZER **********\n"); |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 1060 | DEBUG(dbgs() << "********** Function: " << mf.getName() << '\n'); |
Craig Topper | 588ceec | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 1061 | |
Evan Cheng | 2ce016c | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 1062 | if (DisablePeephole) |
| 1063 | return false; |
Andrew Trick | 9e76199 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 1064 | |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 1065 | MF = &mf; |
| 1066 | TII = MF->getSubtarget().getInstrInfo(); |
| 1067 | TRI = MF->getSubtarget().getRegisterInfo(); |
| 1068 | MRI = &MF->getRegInfo(); |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 1069 | DT = Aggressive ? &getAnalysis<MachineDominatorTree>() : nullptr; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1070 | |
| 1071 | bool Changed = false; |
| 1072 | |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 1073 | for (MachineFunction::iterator I = MF->begin(), E = MF->end(); I != E; ++I) { |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1074 | MachineBasicBlock *MBB = &*I; |
Andrew Trick | 9e76199 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 1075 | |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1076 | bool SeenMoveImm = false; |
Hans Wennborg | 941a570 | 2014-08-11 02:50:43 +0000 | [diff] [blame] | 1077 | SmallPtrSet<MachineInstr*, 16> LocalMIs; |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1078 | SmallSet<unsigned, 4> ImmDefRegs; |
| 1079 | DenseMap<unsigned, MachineInstr*> ImmDefMIs; |
| 1080 | SmallSet<unsigned, 16> FoldAsLoadDefCandidates; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1081 | |
| 1082 | for (MachineBasicBlock::iterator |
Bill Wendling | aee679b | 2010-09-10 21:55:43 +0000 | [diff] [blame] | 1083 | MII = I->begin(), MIE = I->end(); MII != MIE; ) { |
Evan Cheng | 9bf3f8e | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 1084 | MachineInstr *MI = &*MII; |
Jakob Stoklund Olesen | 714f595 | 2012-08-17 14:38:59 +0000 | [diff] [blame] | 1085 | // We may be erasing MI below, increment MII now. |
| 1086 | ++MII; |
Evan Cheng | 2ce016c | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 1087 | LocalMIs.insert(MI); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1088 | |
Ekaterina Romanova | 8d62008 | 2014-03-13 18:47:12 +0000 | [diff] [blame] | 1089 | // Skip debug values. They should not affect this peephole optimization. |
| 1090 | if (MI->isDebugValue()) |
| 1091 | continue; |
| 1092 | |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 1093 | // If there exists an instruction which belongs to the following |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1094 | // categories, we will discard the load candidates. |
Rafael Espindola | b1f25f1 | 2014-03-07 06:08:31 +0000 | [diff] [blame] | 1095 | if (MI->isPosition() || MI->isPHI() || MI->isImplicitDef() || |
Ekaterina Romanova | 8d62008 | 2014-03-13 18:47:12 +0000 | [diff] [blame] | 1096 | MI->isKill() || MI->isInlineAsm() || |
Evan Cheng | 9bf3f8e | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 1097 | MI->hasUnmodeledSideEffects()) { |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1098 | FoldAsLoadDefCandidates.clear(); |
Evan Cheng | 2ce016c | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 1099 | continue; |
Evan Cheng | 9bf3f8e | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 1100 | } |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 1101 | if (MI->mayStore() || MI->isCall()) |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1102 | FoldAsLoadDefCandidates.clear(); |
Evan Cheng | 2ce016c | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 1103 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1104 | if ((isUncoalescableCopy(*MI) && |
| 1105 | optimizeUncoalescableCopy(MI, LocalMIs)) || |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 1106 | (MI->isCompare() && optimizeCmpInstr(MI, MBB)) || |
| 1107 | (MI->isSelect() && optimizeSelect(MI))) { |
| 1108 | // MI is deleted. |
| 1109 | LocalMIs.erase(MI); |
| 1110 | Changed = true; |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 1111 | continue; |
Evan Cheng | 9bf3f8e | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 1112 | } |
| 1113 | |
Gerolf Hoflehner | a4c96d0 | 2014-10-14 23:07:53 +0000 | [diff] [blame^] | 1114 | if (MI->isConditionalBranch() && optimizeCondBranch(MI)) { |
| 1115 | Changed = true; |
| 1116 | continue; |
| 1117 | } |
| 1118 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1119 | if (isCoalescableCopy(*MI) && optimizeCoalescableCopy(MI)) { |
| 1120 | // MI is just rewritten. |
| 1121 | Changed = true; |
| 1122 | continue; |
| 1123 | } |
| 1124 | |
Evan Cheng | 9bf3f8e | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 1125 | if (isMoveImmediate(MI, ImmDefRegs, ImmDefMIs)) { |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1126 | SeenMoveImm = true; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1127 | } else { |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 1128 | Changed |= optimizeExtInstr(MI, MBB, LocalMIs); |
Rafael Espindola | 048405f | 2012-10-15 18:21:07 +0000 | [diff] [blame] | 1129 | // optimizeExtInstr might have created new instructions after MI |
| 1130 | // and before the already incremented MII. Adjust MII so that the |
| 1131 | // next iteration sees the new instructions. |
| 1132 | MII = MI; |
| 1133 | ++MII; |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1134 | if (SeenMoveImm) |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 1135 | Changed |= foldImmediate(MI, MBB, ImmDefRegs, ImmDefMIs); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1136 | } |
Evan Cheng | 98196b4 | 2011-02-15 05:00:24 +0000 | [diff] [blame] | 1137 | |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 1138 | // Check whether MI is a load candidate for folding into a later |
| 1139 | // instruction. If MI is not a candidate, check whether we can fold an |
| 1140 | // earlier load into MI. |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1141 | if (!isLoadFoldable(MI, FoldAsLoadDefCandidates) && |
| 1142 | !FoldAsLoadDefCandidates.empty()) { |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1143 | const MCInstrDesc &MIDesc = MI->getDesc(); |
| 1144 | for (unsigned i = MIDesc.getNumDefs(); i != MIDesc.getNumOperands(); |
| 1145 | ++i) { |
| 1146 | const MachineOperand &MOp = MI->getOperand(i); |
| 1147 | if (!MOp.isReg()) |
| 1148 | continue; |
Lang Hames | 3c0dc2a | 2014-04-03 05:03:20 +0000 | [diff] [blame] | 1149 | unsigned FoldAsLoadDefReg = MOp.getReg(); |
| 1150 | if (FoldAsLoadDefCandidates.count(FoldAsLoadDefReg)) { |
| 1151 | // We need to fold load after optimizeCmpInstr, since |
| 1152 | // optimizeCmpInstr can enable folding by converting SUB to CMP. |
| 1153 | // Save FoldAsLoadDefReg because optimizeLoadInstr() resets it and |
| 1154 | // we need it for markUsesInDebugValueAsUndef(). |
| 1155 | unsigned FoldedReg = FoldAsLoadDefReg; |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 1156 | MachineInstr *DefMI = nullptr; |
Lang Hames | 3c0dc2a | 2014-04-03 05:03:20 +0000 | [diff] [blame] | 1157 | MachineInstr *FoldMI = TII->optimizeLoadInstr(MI, MRI, |
| 1158 | FoldAsLoadDefReg, |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1159 | DefMI); |
| 1160 | if (FoldMI) { |
| 1161 | // Update LocalMIs since we replaced MI with FoldMI and deleted |
| 1162 | // DefMI. |
| 1163 | DEBUG(dbgs() << "Replacing: " << *MI); |
| 1164 | DEBUG(dbgs() << " With: " << *FoldMI); |
| 1165 | LocalMIs.erase(MI); |
| 1166 | LocalMIs.erase(DefMI); |
| 1167 | LocalMIs.insert(FoldMI); |
| 1168 | MI->eraseFromParent(); |
| 1169 | DefMI->eraseFromParent(); |
Lang Hames | 3c0dc2a | 2014-04-03 05:03:20 +0000 | [diff] [blame] | 1170 | MRI->markUsesInDebugValueAsUndef(FoldedReg); |
| 1171 | FoldAsLoadDefCandidates.erase(FoldedReg); |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1172 | ++NumLoadFold; |
| 1173 | // MI is replaced with FoldMI. |
| 1174 | Changed = true; |
| 1175 | break; |
| 1176 | } |
| 1177 | } |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 1178 | } |
| 1179 | } |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1180 | } |
| 1181 | } |
| 1182 | |
| 1183 | return Changed; |
| 1184 | } |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1185 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1186 | bool ValueTracker::getNextSourceFromCopy(unsigned &SrcReg, |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1187 | unsigned &SrcSubReg) { |
| 1188 | assert(Def->isCopy() && "Invalid definition"); |
| 1189 | // Copy instruction are supposed to be: Def = Src. |
| 1190 | // If someone breaks this assumption, bad things will happen everywhere. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1191 | assert(Def->getNumOperands() == 2 && "Invalid number of operands"); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1192 | |
| 1193 | if (Def->getOperand(DefIdx).getSubReg() != DefSubReg) |
| 1194 | // If we look for a different subreg, it means we want a subreg of src. |
| 1195 | // Bails as we do not support composing subreg yet. |
| 1196 | return false; |
| 1197 | // Otherwise, we want the whole source. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1198 | const MachineOperand &Src = Def->getOperand(1); |
| 1199 | SrcReg = Src.getReg(); |
| 1200 | SrcSubReg = Src.getSubReg(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1201 | return true; |
| 1202 | } |
| 1203 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1204 | bool ValueTracker::getNextSourceFromBitcast(unsigned &SrcReg, |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1205 | unsigned &SrcSubReg) { |
| 1206 | assert(Def->isBitcast() && "Invalid definition"); |
| 1207 | |
| 1208 | // Bail if there are effects that a plain copy will not expose. |
| 1209 | if (Def->hasUnmodeledSideEffects()) |
| 1210 | return false; |
| 1211 | |
| 1212 | // Bitcasts with more than one def are not supported. |
| 1213 | if (Def->getDesc().getNumDefs() != 1) |
| 1214 | return false; |
| 1215 | if (Def->getOperand(DefIdx).getSubReg() != DefSubReg) |
| 1216 | // If we look for a different subreg, it means we want a subreg of the src. |
| 1217 | // Bails as we do not support composing subreg yet. |
| 1218 | return false; |
| 1219 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1220 | unsigned SrcIdx = Def->getNumOperands(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1221 | for (unsigned OpIdx = DefIdx + 1, EndOpIdx = SrcIdx; OpIdx != EndOpIdx; |
| 1222 | ++OpIdx) { |
| 1223 | const MachineOperand &MO = Def->getOperand(OpIdx); |
| 1224 | if (!MO.isReg() || !MO.getReg()) |
| 1225 | continue; |
| 1226 | assert(!MO.isDef() && "We should have skipped all the definitions by now"); |
| 1227 | if (SrcIdx != EndOpIdx) |
| 1228 | // Multiple sources? |
| 1229 | return false; |
| 1230 | SrcIdx = OpIdx; |
| 1231 | } |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1232 | const MachineOperand &Src = Def->getOperand(SrcIdx); |
| 1233 | SrcReg = Src.getReg(); |
| 1234 | SrcSubReg = Src.getSubReg(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1235 | return true; |
| 1236 | } |
| 1237 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1238 | bool ValueTracker::getNextSourceFromRegSequence(unsigned &SrcReg, |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1239 | unsigned &SrcSubReg) { |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1240 | assert((Def->isRegSequence() || Def->isRegSequenceLike()) && |
| 1241 | "Invalid definition"); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1242 | |
| 1243 | if (Def->getOperand(DefIdx).getSubReg()) |
| 1244 | // If we are composing subreg, bails out. |
| 1245 | // The case we are checking is Def.<subreg> = REG_SEQUENCE. |
| 1246 | // This should almost never happen as the SSA property is tracked at |
| 1247 | // the register level (as opposed to the subreg level). |
| 1248 | // I.e., |
| 1249 | // Def.sub0 = |
| 1250 | // Def.sub1 = |
| 1251 | // is a valid SSA representation for Def.sub0 and Def.sub1, but not for |
| 1252 | // Def. Thus, it must not be generated. |
Quentin Colombet | 6d590d5 | 2014-07-01 16:23:44 +0000 | [diff] [blame] | 1253 | // However, some code could theoretically generates a single |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1254 | // Def.sub0 (i.e, not defining the other subregs) and we would |
| 1255 | // have this case. |
| 1256 | // If we can ascertain (or force) that this never happens, we could |
| 1257 | // turn that into an assertion. |
| 1258 | return false; |
| 1259 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1260 | if (!TII) |
| 1261 | // We could handle the REG_SEQUENCE here, but we do not want to |
| 1262 | // duplicate the code from the generic TII. |
| 1263 | return false; |
| 1264 | |
| 1265 | SmallVector<TargetInstrInfo::RegSubRegPairAndIdx, 8> RegSeqInputRegs; |
| 1266 | if (!TII->getRegSequenceInputs(*Def, DefIdx, RegSeqInputRegs)) |
| 1267 | return false; |
| 1268 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1269 | // We are looking at: |
| 1270 | // Def = REG_SEQUENCE v0, sub0, v1, sub1, ... |
| 1271 | // Check if one of the operand defines the subreg we are interested in. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1272 | for (auto &RegSeqInput : RegSeqInputRegs) { |
| 1273 | if (RegSeqInput.SubIdx == DefSubReg) { |
| 1274 | if (RegSeqInput.SubReg) |
| 1275 | // Bails if we have to compose sub registers. |
| 1276 | return false; |
| 1277 | |
| 1278 | SrcReg = RegSeqInput.Reg; |
| 1279 | SrcSubReg = RegSeqInput.SubReg; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1280 | return true; |
| 1281 | } |
| 1282 | } |
| 1283 | |
| 1284 | // If the subreg we are tracking is super-defined by another subreg, |
| 1285 | // we could follow this value. However, this would require to compose |
| 1286 | // the subreg and we do not do that for now. |
| 1287 | return false; |
| 1288 | } |
| 1289 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1290 | bool ValueTracker::getNextSourceFromInsertSubreg(unsigned &SrcReg, |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1291 | unsigned &SrcSubReg) { |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 1292 | assert((Def->isInsertSubreg() || Def->isInsertSubregLike()) && |
| 1293 | "Invalid definition"); |
| 1294 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1295 | if (Def->getOperand(DefIdx).getSubReg()) |
| 1296 | // If we are composing subreg, bails out. |
| 1297 | // Same remark as getNextSourceFromRegSequence. |
| 1298 | // I.e., this may be turned into an assert. |
| 1299 | return false; |
| 1300 | |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 1301 | if (!TII) |
| 1302 | // We could handle the REG_SEQUENCE here, but we do not want to |
| 1303 | // duplicate the code from the generic TII. |
| 1304 | return false; |
| 1305 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1306 | TargetInstrInfo::RegSubRegPair BaseReg; |
| 1307 | TargetInstrInfo::RegSubRegPairAndIdx InsertedReg; |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 1308 | if (!TII->getInsertSubregInputs(*Def, DefIdx, BaseReg, InsertedReg)) |
| 1309 | return false; |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1310 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1311 | // We are looking at: |
| 1312 | // Def = INSERT_SUBREG v0, v1, sub1 |
| 1313 | // There are two cases: |
| 1314 | // 1. DefSubReg == sub1, get v1. |
| 1315 | // 2. DefSubReg != sub1, the value may be available through v0. |
| 1316 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1317 | // #1 Check if the inserted register matches the required sub index. |
| 1318 | if (InsertedReg.SubIdx == DefSubReg) { |
| 1319 | SrcReg = InsertedReg.Reg; |
| 1320 | SrcSubReg = InsertedReg.SubReg; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1321 | return true; |
| 1322 | } |
| 1323 | // #2 Otherwise, if the sub register we are looking for is not partial |
| 1324 | // defined by the inserted element, we can look through the main |
| 1325 | // register (v0). |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1326 | const MachineOperand &MODef = Def->getOperand(DefIdx); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1327 | // If the result register (Def) and the base register (v0) do not |
| 1328 | // have the same register class or if we have to compose |
| 1329 | // subregisters, bails out. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1330 | if (MRI.getRegClass(MODef.getReg()) != MRI.getRegClass(BaseReg.Reg) || |
| 1331 | BaseReg.SubReg) |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1332 | return false; |
| 1333 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1334 | // Get the TRI and check if the inserted sub-register overlaps with the |
| 1335 | // sub-register we are tracking. |
| 1336 | const TargetRegisterInfo *TRI = MRI.getTargetRegisterInfo(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1337 | if (!TRI || |
| 1338 | (TRI->getSubRegIndexLaneMask(DefSubReg) & |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1339 | TRI->getSubRegIndexLaneMask(InsertedReg.SubIdx)) != 0) |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1340 | return false; |
| 1341 | // At this point, the value is available in v0 via the same subreg |
| 1342 | // we used for Def. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1343 | SrcReg = BaseReg.Reg; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1344 | SrcSubReg = DefSubReg; |
| 1345 | return true; |
| 1346 | } |
| 1347 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1348 | bool ValueTracker::getNextSourceFromExtractSubreg(unsigned &SrcReg, |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1349 | unsigned &SrcSubReg) { |
Quentin Colombet | 67639df | 2014-08-20 23:13:02 +0000 | [diff] [blame] | 1350 | assert((Def->isExtractSubreg() || |
| 1351 | Def->isExtractSubregLike()) && "Invalid definition"); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1352 | // We are looking at: |
| 1353 | // Def = EXTRACT_SUBREG v0, sub0 |
| 1354 | |
| 1355 | // Bails if we have to compose sub registers. |
| 1356 | // Indeed, if DefSubReg != 0, we would have to compose it with sub0. |
| 1357 | if (DefSubReg) |
| 1358 | return false; |
| 1359 | |
Quentin Colombet | 67639df | 2014-08-20 23:13:02 +0000 | [diff] [blame] | 1360 | if (!TII) |
| 1361 | // We could handle the EXTRACT_SUBREG here, but we do not want to |
| 1362 | // duplicate the code from the generic TII. |
| 1363 | return false; |
| 1364 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1365 | TargetInstrInfo::RegSubRegPairAndIdx ExtractSubregInputReg; |
Quentin Colombet | 67639df | 2014-08-20 23:13:02 +0000 | [diff] [blame] | 1366 | if (!TII->getExtractSubregInputs(*Def, DefIdx, ExtractSubregInputReg)) |
| 1367 | return false; |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1368 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1369 | // Bails if we have to compose sub registers. |
| 1370 | // Likewise, if v0.subreg != 0, we would have to compose v0.subreg with sub0. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1371 | if (ExtractSubregInputReg.SubReg) |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1372 | return false; |
| 1373 | // Otherwise, the value is available in the v0.sub0. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1374 | SrcReg = ExtractSubregInputReg.Reg; |
| 1375 | SrcSubReg = ExtractSubregInputReg.SubIdx; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1376 | return true; |
| 1377 | } |
| 1378 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1379 | bool ValueTracker::getNextSourceFromSubregToReg(unsigned &SrcReg, |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1380 | unsigned &SrcSubReg) { |
| 1381 | assert(Def->isSubregToReg() && "Invalid definition"); |
| 1382 | // We are looking at: |
| 1383 | // Def = SUBREG_TO_REG Imm, v0, sub0 |
| 1384 | |
| 1385 | // Bails if we have to compose sub registers. |
| 1386 | // If DefSubReg != sub0, we would have to check that all the bits |
| 1387 | // we track are included in sub0 and if yes, we would have to |
| 1388 | // determine the right subreg in v0. |
| 1389 | if (DefSubReg != Def->getOperand(3).getImm()) |
| 1390 | return false; |
| 1391 | // Bails if we have to compose sub registers. |
| 1392 | // Likewise, if v0.subreg != 0, we would have to compose it with sub0. |
| 1393 | if (Def->getOperand(2).getSubReg()) |
| 1394 | return false; |
| 1395 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1396 | SrcReg = Def->getOperand(2).getReg(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1397 | SrcSubReg = Def->getOperand(3).getImm(); |
| 1398 | return true; |
| 1399 | } |
| 1400 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1401 | bool ValueTracker::getNextSourceImpl(unsigned &SrcReg, unsigned &SrcSubReg) { |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1402 | assert(Def && "This method needs a valid definition"); |
| 1403 | |
| 1404 | assert( |
| 1405 | (DefIdx < Def->getDesc().getNumDefs() || Def->getDesc().isVariadic()) && |
| 1406 | Def->getOperand(DefIdx).isDef() && "Invalid DefIdx"); |
| 1407 | if (Def->isCopy()) |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1408 | return getNextSourceFromCopy(SrcReg, SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1409 | if (Def->isBitcast()) |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1410 | return getNextSourceFromBitcast(SrcReg, SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1411 | // All the remaining cases involve "complex" instructions. |
| 1412 | // Bails if we did not ask for the advanced tracking. |
| 1413 | if (!UseAdvancedTracking) |
| 1414 | return false; |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1415 | if (Def->isRegSequence() || Def->isRegSequenceLike()) |
| 1416 | return getNextSourceFromRegSequence(SrcReg, SrcSubReg); |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 1417 | if (Def->isInsertSubreg() || Def->isInsertSubregLike()) |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1418 | return getNextSourceFromInsertSubreg(SrcReg, SrcSubReg); |
Quentin Colombet | 67639df | 2014-08-20 23:13:02 +0000 | [diff] [blame] | 1419 | if (Def->isExtractSubreg() || Def->isExtractSubregLike()) |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1420 | return getNextSourceFromExtractSubreg(SrcReg, SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1421 | if (Def->isSubregToReg()) |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1422 | return getNextSourceFromSubregToReg(SrcReg, SrcSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1423 | return false; |
| 1424 | } |
| 1425 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1426 | const MachineInstr *ValueTracker::getNextSource(unsigned &SrcReg, |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1427 | unsigned &SrcSubReg) { |
| 1428 | // If we reach a point where we cannot move up in the use-def chain, |
| 1429 | // there is nothing we can get. |
| 1430 | if (!Def) |
| 1431 | return nullptr; |
| 1432 | |
| 1433 | const MachineInstr *PrevDef = nullptr; |
| 1434 | // Try to find the next source. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1435 | if (getNextSourceImpl(SrcReg, SrcSubReg)) { |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1436 | // Update definition, definition index, and subregister for the |
| 1437 | // next call of getNextSource. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1438 | // Update the current register. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1439 | Reg = SrcReg; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1440 | // Update the return value before moving up in the use-def chain. |
| 1441 | PrevDef = Def; |
| 1442 | // If we can still move up in the use-def chain, move to the next |
| 1443 | // defintion. |
| 1444 | if (!TargetRegisterInfo::isPhysicalRegister(Reg)) { |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1445 | Def = MRI.getVRegDef(Reg); |
| 1446 | DefIdx = MRI.def_begin(Reg).getOperandNo(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1447 | DefSubReg = SrcSubReg; |
| 1448 | return PrevDef; |
| 1449 | } |
| 1450 | } |
| 1451 | // If we end up here, this means we will not be able to find another source |
| 1452 | // for the next iteration. |
| 1453 | // Make sure any new call to getNextSource bails out early by cutting the |
| 1454 | // use-def chain. |
| 1455 | Def = nullptr; |
| 1456 | return PrevDef; |
| 1457 | } |