Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1 | //===-- PeepholeOptimizer.cpp - Peephole Optimizations --------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // Perform peephole optimizations on the machine code: |
| 11 | // |
| 12 | // - Optimize Extensions |
| 13 | // |
| 14 | // Optimization of sign / zero extension instructions. It may be extended to |
| 15 | // handle other instructions with similar properties. |
| 16 | // |
| 17 | // On some targets, some instructions, e.g. X86 sign / zero extension, may |
| 18 | // leave the source value in the lower part of the result. This optimization |
| 19 | // will replace some uses of the pre-extension value with uses of the |
| 20 | // sub-register of the results. |
| 21 | // |
| 22 | // - Optimize Comparisons |
| 23 | // |
| 24 | // Optimization of comparison instructions. For instance, in this code: |
| 25 | // |
| 26 | // sub r1, 1 |
| 27 | // cmp r1, 0 |
| 28 | // bz L1 |
| 29 | // |
| 30 | // If the "sub" instruction all ready sets (or could be modified to set) the |
| 31 | // same flag that the "cmp" instruction sets and that "bz" uses, then we can |
| 32 | // eliminate the "cmp" instruction. |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 33 | // |
Manman Ren | 247c5ab | 2012-05-11 01:30:47 +0000 | [diff] [blame] | 34 | // Another instance, in this code: |
| 35 | // |
| 36 | // sub r1, r3 | sub r1, imm |
| 37 | // cmp r3, r1 or cmp r1, r3 | cmp r1, imm |
| 38 | // bge L1 |
| 39 | // |
| 40 | // If the branch instruction can use flag from "sub", then we can replace |
| 41 | // "sub" with "subs" and eliminate the "cmp" instruction. |
| 42 | // |
Joel Jones | 8293b7b | 2012-12-11 16:10:25 +0000 | [diff] [blame] | 43 | // - Optimize Loads: |
| 44 | // |
| 45 | // Loads that can be folded into a later instruction. A load is foldable |
| 46 | // if it loads to virtual registers and the virtual register defined has |
| 47 | // a single use. |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 48 | // |
| 49 | // - Optimize Copies and Bitcast: |
| 50 | // |
| 51 | // Rewrite copies and bitcasts to avoid cross register bank copies |
| 52 | // when possible. |
| 53 | // E.g., Consider the following example, where capital and lower |
| 54 | // letters denote different register file: |
| 55 | // b = copy A <-- cross-bank copy |
| 56 | // C = copy b <-- cross-bank copy |
| 57 | // => |
| 58 | // b = copy A <-- cross-bank copy |
| 59 | // C = copy A <-- same-bank copy |
| 60 | // |
| 61 | // E.g., for bitcast: |
| 62 | // b = bitcast A <-- cross-bank copy |
| 63 | // C = bitcast b <-- cross-bank copy |
| 64 | // => |
| 65 | // b = bitcast A <-- cross-bank copy |
| 66 | // C = copy A <-- same-bank copy |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 67 | //===----------------------------------------------------------------------===// |
| 68 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 69 | #include "llvm/CodeGen/Passes.h" |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 70 | #include "llvm/ADT/DenseMap.h" |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 71 | #include "llvm/ADT/SmallPtrSet.h" |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 72 | #include "llvm/ADT/SmallSet.h" |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 73 | #include "llvm/ADT/Statistic.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 74 | #include "llvm/CodeGen/MachineDominators.h" |
| 75 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
| 76 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
| 77 | #include "llvm/Support/CommandLine.h" |
Craig Topper | a1032b7 | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 78 | #include "llvm/Support/Debug.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 79 | #include "llvm/Target/TargetInstrInfo.h" |
| 80 | #include "llvm/Target/TargetRegisterInfo.h" |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 81 | using namespace llvm; |
| 82 | |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame] | 83 | #define DEBUG_TYPE "peephole-opt" |
| 84 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 85 | // Optimize Extensions |
| 86 | static cl::opt<bool> |
| 87 | Aggressive("aggressive-ext-opt", cl::Hidden, |
| 88 | cl::desc("Aggressive extension optimization")); |
| 89 | |
Bill Wendling | 40a5eb1 | 2010-11-01 20:41:43 +0000 | [diff] [blame] | 90 | static cl::opt<bool> |
| 91 | DisablePeephole("disable-peephole", cl::Hidden, cl::init(false), |
| 92 | cl::desc("Disable the peephole optimizer")); |
| 93 | |
Stephen Hines | c6a4f5e | 2014-07-21 00:45:20 -0700 | [diff] [blame] | 94 | static cl::opt<bool> |
| 95 | DisableAdvCopyOpt("disable-adv-copy-opt", cl::Hidden, cl::init(true), |
| 96 | cl::desc("Disable advanced copy optimization")); |
| 97 | |
Bill Wendling | 69c5eb5 | 2010-08-27 20:39:09 +0000 | [diff] [blame] | 98 | STATISTIC(NumReuse, "Number of extension results reused"); |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 99 | STATISTIC(NumCmps, "Number of compares eliminated"); |
Lang Hames | 3b26eb6 | 2012-02-25 00:46:38 +0000 | [diff] [blame] | 100 | STATISTIC(NumImmFold, "Number of move immediate folded"); |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 101 | STATISTIC(NumLoadFold, "Number of loads folded"); |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 102 | STATISTIC(NumSelects, "Number of selects optimized"); |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 103 | STATISTIC(NumCopiesBitcasts, "Number of copies/bitcasts optimized"); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 104 | |
| 105 | namespace { |
| 106 | class PeepholeOptimizer : public MachineFunctionPass { |
| 107 | const TargetMachine *TM; |
| 108 | const TargetInstrInfo *TII; |
| 109 | MachineRegisterInfo *MRI; |
| 110 | MachineDominatorTree *DT; // Machine dominator tree |
| 111 | |
| 112 | public: |
| 113 | static char ID; // Pass identification |
Owen Anderson | 081c34b | 2010-10-19 17:21:58 +0000 | [diff] [blame] | 114 | PeepholeOptimizer() : MachineFunctionPass(ID) { |
| 115 | initializePeepholeOptimizerPass(*PassRegistry::getPassRegistry()); |
| 116 | } |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 117 | |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 118 | bool runOnMachineFunction(MachineFunction &MF) override; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 119 | |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 120 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 121 | AU.setPreservesCFG(); |
| 122 | MachineFunctionPass::getAnalysisUsage(AU); |
| 123 | if (Aggressive) { |
| 124 | AU.addRequired<MachineDominatorTree>(); |
| 125 | AU.addPreserved<MachineDominatorTree>(); |
| 126 | } |
| 127 | } |
| 128 | |
| 129 | private: |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 130 | bool optimizeCmpInstr(MachineInstr *MI, MachineBasicBlock *MBB); |
| 131 | bool optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB, |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 132 | SmallPtrSet<MachineInstr*, 8> &LocalMIs); |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 133 | bool optimizeSelect(MachineInstr *MI); |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 134 | bool optimizeCopyOrBitcast(MachineInstr *MI); |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 135 | bool isMoveImmediate(MachineInstr *MI, |
| 136 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 137 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs); |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 138 | bool foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB, |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 139 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 140 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs); |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 141 | bool isLoadFoldable(MachineInstr *MI, |
| 142 | SmallSet<unsigned, 16> &FoldAsLoadDefCandidates); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 143 | }; |
Stephen Hines | c6a4f5e | 2014-07-21 00:45:20 -0700 | [diff] [blame] | 144 | |
| 145 | /// \brief Helper class to track the possible sources of a value defined by |
| 146 | /// a (chain of) copy related instructions. |
| 147 | /// Given a definition (instruction and definition index), this class |
| 148 | /// follows the use-def chain to find successive suitable sources. |
| 149 | /// The given source can be used to rewrite the definition into |
| 150 | /// def = COPY src. |
| 151 | /// |
| 152 | /// For instance, let us consider the following snippet: |
| 153 | /// v0 = |
| 154 | /// v2 = INSERT_SUBREG v1, v0, sub0 |
| 155 | /// def = COPY v2.sub0 |
| 156 | /// |
| 157 | /// Using a ValueTracker for def = COPY v2.sub0 will give the following |
| 158 | /// suitable sources: |
| 159 | /// v2.sub0 and v0. |
| 160 | /// Then, def can be rewritten into def = COPY v0. |
| 161 | class ValueTracker { |
| 162 | private: |
| 163 | /// The current point into the use-def chain. |
| 164 | const MachineInstr *Def; |
| 165 | /// The index of the definition in Def. |
| 166 | unsigned DefIdx; |
| 167 | /// The sub register index of the definition. |
| 168 | unsigned DefSubReg; |
| 169 | /// The register where the value can be found. |
| 170 | unsigned Reg; |
| 171 | /// Specifiy whether or not the value tracking looks through |
| 172 | /// complex instructions. When this is false, the value tracker |
| 173 | /// bails on everything that is not a copy or a bitcast. |
| 174 | /// |
| 175 | /// Note: This could have been implemented as a specialized version of |
| 176 | /// the ValueTracker class but that would have complicated the code of |
| 177 | /// the users of this class. |
| 178 | bool UseAdvancedTracking; |
| 179 | /// Optional MachineRegisterInfo used to perform some complex |
| 180 | /// tracking. |
| 181 | const MachineRegisterInfo *MRI; |
| 182 | |
| 183 | /// \brief Dispatcher to the right underlying implementation of |
| 184 | /// getNextSource. |
| 185 | bool getNextSourceImpl(unsigned &SrcIdx, unsigned &SrcSubReg); |
| 186 | /// \brief Specialized version of getNextSource for Copy instructions. |
| 187 | bool getNextSourceFromCopy(unsigned &SrcIdx, unsigned &SrcSubReg); |
| 188 | /// \brief Specialized version of getNextSource for Bitcast instructions. |
| 189 | bool getNextSourceFromBitcast(unsigned &SrcIdx, unsigned &SrcSubReg); |
| 190 | /// \brief Specialized version of getNextSource for RegSequence |
| 191 | /// instructions. |
| 192 | bool getNextSourceFromRegSequence(unsigned &SrcIdx, unsigned &SrcSubReg); |
| 193 | /// \brief Specialized version of getNextSource for InsertSubreg |
| 194 | /// instructions. |
| 195 | bool getNextSourceFromInsertSubreg(unsigned &SrcIdx, unsigned &SrcSubReg); |
| 196 | /// \brief Specialized version of getNextSource for ExtractSubreg |
| 197 | /// instructions. |
| 198 | bool getNextSourceFromExtractSubreg(unsigned &SrcIdx, unsigned &SrcSubReg); |
| 199 | /// \brief Specialized version of getNextSource for SubregToReg |
| 200 | /// instructions. |
| 201 | bool getNextSourceFromSubregToReg(unsigned &SrcIdx, unsigned &SrcSubReg); |
| 202 | |
| 203 | public: |
| 204 | /// \brief Create a ValueTracker instance for the value defines by \p MI |
| 205 | /// at the operand index \p DefIdx. |
| 206 | /// \p DefSubReg represents the sub register index the value tracker will |
| 207 | /// track. It does not need to match the sub register index used in \p MI. |
| 208 | /// \p UseAdvancedTracking specifies whether or not the value tracker looks |
| 209 | /// through complex instructions. By default (false), it handles only copy |
| 210 | /// and bitcast instructions. |
| 211 | /// \p MRI useful to perform some complex checks. |
| 212 | ValueTracker(const MachineInstr &MI, unsigned DefIdx, unsigned DefSubReg, |
| 213 | bool UseAdvancedTracking = false, |
| 214 | const MachineRegisterInfo *MRI = nullptr) |
| 215 | : Def(&MI), DefIdx(DefIdx), DefSubReg(DefSubReg), |
| 216 | UseAdvancedTracking(UseAdvancedTracking), MRI(MRI) { |
| 217 | assert(Def->getOperand(DefIdx).isDef() && |
| 218 | Def->getOperand(DefIdx).isReg() && |
| 219 | "Definition does not match machine instruction"); |
| 220 | // Initially the value is in the defined register. |
| 221 | Reg = Def->getOperand(DefIdx).getReg(); |
| 222 | } |
| 223 | |
| 224 | /// \brief Following the use-def chain, get the next available source |
| 225 | /// for the tracked value. |
| 226 | /// When the returned value is not nullptr, getReg() gives the register |
| 227 | /// that contain the tracked value. |
| 228 | /// \note The sub register index returned in \p SrcSubReg must be used |
| 229 | /// on that getReg() to access the actual value. |
| 230 | /// \return Unless the returned value is nullptr (i.e., no source found), |
| 231 | /// \p SrcIdx gives the index of the next source in the returned |
| 232 | /// instruction and \p SrcSubReg the index to be used on that source to |
| 233 | /// get the tracked value. When nullptr is returned, no alternative source |
| 234 | /// has been found. |
| 235 | const MachineInstr *getNextSource(unsigned &SrcIdx, unsigned &SrcSubReg); |
| 236 | |
| 237 | /// \brief Get the last register where the initial value can be found. |
| 238 | /// Initially this is the register of the definition. |
| 239 | /// Then, after each successful call to getNextSource, this is the |
| 240 | /// register of the last source. |
| 241 | unsigned getReg() const { return Reg; } |
| 242 | }; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 243 | } |
| 244 | |
| 245 | char PeepholeOptimizer::ID = 0; |
Andrew Trick | 1dd8c85 | 2012-02-08 21:23:13 +0000 | [diff] [blame] | 246 | char &llvm::PeepholeOptimizerID = PeepholeOptimizer::ID; |
Owen Anderson | 2ab36d3 | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 247 | INITIALIZE_PASS_BEGIN(PeepholeOptimizer, "peephole-opts", |
| 248 | "Peephole Optimizations", false, false) |
| 249 | INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree) |
| 250 | INITIALIZE_PASS_END(PeepholeOptimizer, "peephole-opts", |
Owen Anderson | ce665bd | 2010-10-07 22:25:06 +0000 | [diff] [blame] | 251 | "Peephole Optimizations", false, false) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 252 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 253 | /// optimizeExtInstr - If instruction is a copy-like instruction, i.e. it reads |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 254 | /// a single register and writes a single register and it does not modify the |
| 255 | /// source, and if the source value is preserved as a sub-register of the |
| 256 | /// result, then replace all reachable uses of the source with the subreg of the |
| 257 | /// result. |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 258 | /// |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 259 | /// Do not generate an EXTRACT that is used only in a debug use, as this changes |
| 260 | /// the code. Since this code does not currently share EXTRACTs, just ignore all |
| 261 | /// debug uses. |
| 262 | bool PeepholeOptimizer:: |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 263 | optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB, |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 264 | SmallPtrSet<MachineInstr*, 8> &LocalMIs) { |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 265 | unsigned SrcReg, DstReg, SubIdx; |
| 266 | if (!TII->isCoalescableExtInstr(*MI, SrcReg, DstReg, SubIdx)) |
| 267 | return false; |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 268 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 269 | if (TargetRegisterInfo::isPhysicalRegister(DstReg) || |
| 270 | TargetRegisterInfo::isPhysicalRegister(SrcReg)) |
| 271 | return false; |
| 272 | |
Jakob Stoklund Olesen | d8d0279 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 273 | if (MRI->hasOneNonDBGUse(SrcReg)) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 274 | // No other uses. |
| 275 | return false; |
| 276 | |
Jakob Stoklund Olesen | 418a363 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 277 | // Ensure DstReg can get a register class that actually supports |
| 278 | // sub-registers. Don't change the class until we commit. |
| 279 | const TargetRegisterClass *DstRC = MRI->getRegClass(DstReg); |
| 280 | DstRC = TM->getRegisterInfo()->getSubClassWithSubReg(DstRC, SubIdx); |
| 281 | if (!DstRC) |
| 282 | return false; |
| 283 | |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 284 | // The ext instr may be operating on a sub-register of SrcReg as well. |
| 285 | // PPC::EXTSW is a 32 -> 64-bit sign extension, but it reads a 64-bit |
| 286 | // register. |
| 287 | // If UseSrcSubIdx is Set, SubIdx also applies to SrcReg, and only uses of |
| 288 | // SrcReg:SubIdx should be replaced. |
| 289 | bool UseSrcSubIdx = TM->getRegisterInfo()-> |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame] | 290 | getSubClassWithSubReg(MRI->getRegClass(SrcReg), SubIdx) != nullptr; |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 291 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 292 | // The source has other uses. See if we can replace the other uses with use of |
| 293 | // the result of the extension. |
| 294 | SmallPtrSet<MachineBasicBlock*, 4> ReachedBBs; |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 295 | for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg)) |
| 296 | ReachedBBs.insert(UI.getParent()); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 297 | |
| 298 | // Uses that are in the same BB of uses of the result of the instruction. |
| 299 | SmallVector<MachineOperand*, 8> Uses; |
| 300 | |
| 301 | // Uses that the result of the instruction can reach. |
| 302 | SmallVector<MachineOperand*, 8> ExtendedUses; |
| 303 | |
| 304 | bool ExtendLife = true; |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 305 | for (MachineOperand &UseMO : MRI->use_nodbg_operands(SrcReg)) { |
| 306 | MachineInstr *UseMI = UseMO.getParent(); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 307 | if (UseMI == MI) |
| 308 | continue; |
| 309 | |
| 310 | if (UseMI->isPHI()) { |
| 311 | ExtendLife = false; |
| 312 | continue; |
| 313 | } |
| 314 | |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 315 | // Only accept uses of SrcReg:SubIdx. |
| 316 | if (UseSrcSubIdx && UseMO.getSubReg() != SubIdx) |
| 317 | continue; |
| 318 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 319 | // It's an error to translate this: |
| 320 | // |
| 321 | // %reg1025 = <sext> %reg1024 |
| 322 | // ... |
| 323 | // %reg1026 = SUBREG_TO_REG 0, %reg1024, 4 |
| 324 | // |
| 325 | // into this: |
| 326 | // |
| 327 | // %reg1025 = <sext> %reg1024 |
| 328 | // ... |
| 329 | // %reg1027 = COPY %reg1025:4 |
| 330 | // %reg1026 = SUBREG_TO_REG 0, %reg1027, 4 |
| 331 | // |
| 332 | // The problem here is that SUBREG_TO_REG is there to assert that an |
| 333 | // implicit zext occurs. It doesn't insert a zext instruction. If we allow |
| 334 | // the COPY here, it will give us the value after the <sext>, not the |
| 335 | // original value of %reg1024 before <sext>. |
| 336 | if (UseMI->getOpcode() == TargetOpcode::SUBREG_TO_REG) |
| 337 | continue; |
| 338 | |
| 339 | MachineBasicBlock *UseMBB = UseMI->getParent(); |
| 340 | if (UseMBB == MBB) { |
| 341 | // Local uses that come after the extension. |
| 342 | if (!LocalMIs.count(UseMI)) |
| 343 | Uses.push_back(&UseMO); |
| 344 | } else if (ReachedBBs.count(UseMBB)) { |
| 345 | // Non-local uses where the result of the extension is used. Always |
| 346 | // replace these unless it's a PHI. |
| 347 | Uses.push_back(&UseMO); |
| 348 | } else if (Aggressive && DT->dominates(MBB, UseMBB)) { |
| 349 | // We may want to extend the live range of the extension result in order |
| 350 | // to replace these uses. |
| 351 | ExtendedUses.push_back(&UseMO); |
| 352 | } else { |
| 353 | // Both will be live out of the def MBB anyway. Don't extend live range of |
| 354 | // the extension result. |
| 355 | ExtendLife = false; |
| 356 | break; |
| 357 | } |
| 358 | } |
| 359 | |
| 360 | if (ExtendLife && !ExtendedUses.empty()) |
| 361 | // Extend the liveness of the extension result. |
| 362 | std::copy(ExtendedUses.begin(), ExtendedUses.end(), |
| 363 | std::back_inserter(Uses)); |
| 364 | |
| 365 | // Now replace all uses. |
| 366 | bool Changed = false; |
| 367 | if (!Uses.empty()) { |
| 368 | SmallPtrSet<MachineBasicBlock*, 4> PHIBBs; |
| 369 | |
| 370 | // Look for PHI uses of the extended result, we don't want to extend the |
| 371 | // liveness of a PHI input. It breaks all kinds of assumptions down |
| 372 | // stream. A PHI use is expected to be the kill of its source values. |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 373 | for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg)) |
| 374 | if (UI.isPHI()) |
| 375 | PHIBBs.insert(UI.getParent()); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 376 | |
| 377 | const TargetRegisterClass *RC = MRI->getRegClass(SrcReg); |
| 378 | for (unsigned i = 0, e = Uses.size(); i != e; ++i) { |
| 379 | MachineOperand *UseMO = Uses[i]; |
| 380 | MachineInstr *UseMI = UseMO->getParent(); |
| 381 | MachineBasicBlock *UseMBB = UseMI->getParent(); |
| 382 | if (PHIBBs.count(UseMBB)) |
| 383 | continue; |
| 384 | |
Lang Hames | c69cbd0 | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 385 | // About to add uses of DstReg, clear DstReg's kill flags. |
Jakob Stoklund Olesen | 418a363 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 386 | if (!Changed) { |
Lang Hames | c69cbd0 | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 387 | MRI->clearKillFlags(DstReg); |
Jakob Stoklund Olesen | 418a363 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 388 | MRI->constrainRegClass(DstReg, DstRC); |
| 389 | } |
Lang Hames | c69cbd0 | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 390 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 391 | unsigned NewVR = MRI->createVirtualRegister(RC); |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 392 | MachineInstr *Copy = BuildMI(*UseMBB, UseMI, UseMI->getDebugLoc(), |
| 393 | TII->get(TargetOpcode::COPY), NewVR) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 394 | .addReg(DstReg, 0, SubIdx); |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 395 | // SubIdx applies to both SrcReg and DstReg when UseSrcSubIdx is set. |
| 396 | if (UseSrcSubIdx) { |
| 397 | Copy->getOperand(0).setSubReg(SubIdx); |
| 398 | Copy->getOperand(0).setIsUndef(); |
| 399 | } |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 400 | UseMO->setReg(NewVR); |
| 401 | ++NumReuse; |
| 402 | Changed = true; |
| 403 | } |
| 404 | } |
| 405 | |
| 406 | return Changed; |
| 407 | } |
| 408 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 409 | /// optimizeCmpInstr - If the instruction is a compare and the previous |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 410 | /// instruction it's comparing against all ready sets (or could be modified to |
| 411 | /// set) the same flag as the compare, then we can remove the comparison and use |
| 412 | /// the flag from the previous instruction. |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 413 | bool PeepholeOptimizer::optimizeCmpInstr(MachineInstr *MI, |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 414 | MachineBasicBlock *MBB) { |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 415 | // If this instruction is a comparison against zero and isn't comparing a |
| 416 | // physical register, we can try to optimize it. |
Manman Ren | de7266c | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 417 | unsigned SrcReg, SrcReg2; |
Gabor Greif | 04ac81d | 2010-09-21 12:01:15 +0000 | [diff] [blame] | 418 | int CmpMask, CmpValue; |
Manman Ren | de7266c | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 419 | if (!TII->analyzeCompare(MI, SrcReg, SrcReg2, CmpMask, CmpValue) || |
| 420 | TargetRegisterInfo::isPhysicalRegister(SrcReg) || |
| 421 | (SrcReg2 != 0 && TargetRegisterInfo::isPhysicalRegister(SrcReg2))) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 422 | return false; |
| 423 | |
Bill Wendling | a655686 | 2010-09-11 00:13:50 +0000 | [diff] [blame] | 424 | // Attempt to optimize the comparison instruction. |
Manman Ren | de7266c | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 425 | if (TII->optimizeCompareInstr(MI, SrcReg, SrcReg2, CmpMask, CmpValue, MRI)) { |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 426 | ++NumCmps; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 427 | return true; |
| 428 | } |
| 429 | |
| 430 | return false; |
| 431 | } |
| 432 | |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 433 | /// Optimize a select instruction. |
| 434 | bool PeepholeOptimizer::optimizeSelect(MachineInstr *MI) { |
| 435 | unsigned TrueOp = 0; |
| 436 | unsigned FalseOp = 0; |
| 437 | bool Optimizable = false; |
| 438 | SmallVector<MachineOperand, 4> Cond; |
| 439 | if (TII->analyzeSelect(MI, Cond, TrueOp, FalseOp, Optimizable)) |
| 440 | return false; |
| 441 | if (!Optimizable) |
| 442 | return false; |
| 443 | if (!TII->optimizeSelect(MI)) |
| 444 | return false; |
| 445 | MI->eraseFromParent(); |
| 446 | ++NumSelects; |
| 447 | return true; |
| 448 | } |
| 449 | |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 450 | /// \brief Check if the registers defined by the pair (RegisterClass, SubReg) |
| 451 | /// share the same register file. |
| 452 | static bool shareSameRegisterFile(const TargetRegisterInfo &TRI, |
| 453 | const TargetRegisterClass *DefRC, |
| 454 | unsigned DefSubReg, |
| 455 | const TargetRegisterClass *SrcRC, |
| 456 | unsigned SrcSubReg) { |
| 457 | // Same register class. |
| 458 | if (DefRC == SrcRC) |
| 459 | return true; |
| 460 | |
| 461 | // Both operands are sub registers. Check if they share a register class. |
| 462 | unsigned SrcIdx, DefIdx; |
| 463 | if (SrcSubReg && DefSubReg) |
| 464 | return TRI.getCommonSuperRegClass(SrcRC, SrcSubReg, DefRC, DefSubReg, |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame] | 465 | SrcIdx, DefIdx) != nullptr; |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 466 | // At most one of the register is a sub register, make it Src to avoid |
| 467 | // duplicating the test. |
| 468 | if (!SrcSubReg) { |
| 469 | std::swap(DefSubReg, SrcSubReg); |
| 470 | std::swap(DefRC, SrcRC); |
| 471 | } |
| 472 | |
| 473 | // One of the register is a sub register, check if we can get a superclass. |
| 474 | if (SrcSubReg) |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame] | 475 | return TRI.getMatchingSuperRegClass(SrcRC, DefRC, SrcSubReg) != nullptr; |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 476 | // Plain copy. |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame] | 477 | return TRI.getCommonSubClass(DefRC, SrcRC) != nullptr; |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 478 | } |
| 479 | |
| 480 | /// \brief Get the index of the definition and source for \p Copy |
| 481 | /// instruction. |
| 482 | /// \pre Copy.isCopy() or Copy.isBitcast(). |
| 483 | /// \return True if the Copy instruction has only one register source |
| 484 | /// and one register definition. Otherwise, \p DefIdx and \p SrcIdx |
| 485 | /// are invalid. |
| 486 | static bool getCopyOrBitcastDefUseIdx(const MachineInstr &Copy, |
| 487 | unsigned &DefIdx, unsigned &SrcIdx) { |
| 488 | assert((Copy.isCopy() || Copy.isBitcast()) && "Wrong operation type."); |
| 489 | if (Copy.isCopy()) { |
| 490 | // Copy instruction are supposed to be: Def = Src. |
| 491 | if (Copy.getDesc().getNumOperands() != 2) |
| 492 | return false; |
| 493 | DefIdx = 0; |
| 494 | SrcIdx = 1; |
| 495 | assert(Copy.getOperand(DefIdx).isDef() && "Use comes before def!"); |
| 496 | return true; |
| 497 | } |
| 498 | // Bitcast case. |
| 499 | // Bitcasts with more than one def are not supported. |
| 500 | if (Copy.getDesc().getNumDefs() != 1) |
| 501 | return false; |
| 502 | // Initialize SrcIdx to an undefined operand. |
| 503 | SrcIdx = Copy.getDesc().getNumOperands(); |
| 504 | for (unsigned OpIdx = 0, EndOpIdx = SrcIdx; OpIdx != EndOpIdx; ++OpIdx) { |
| 505 | const MachineOperand &MO = Copy.getOperand(OpIdx); |
| 506 | if (!MO.isReg() || !MO.getReg()) |
| 507 | continue; |
| 508 | if (MO.isDef()) |
| 509 | DefIdx = OpIdx; |
| 510 | else if (SrcIdx != EndOpIdx) |
| 511 | // Multiple sources? |
| 512 | return false; |
| 513 | SrcIdx = OpIdx; |
| 514 | } |
| 515 | return true; |
| 516 | } |
| 517 | |
| 518 | /// \brief Optimize a copy or bitcast instruction to avoid cross |
| 519 | /// register bank copy. The optimization looks through a chain of |
| 520 | /// copies and try to find a source that has a compatible register |
| 521 | /// class. |
| 522 | /// Two register classes are considered to be compatible if they share |
| 523 | /// the same register bank. |
| 524 | /// New copies issued by this optimization are register allocator |
| 525 | /// friendly. This optimization does not remove any copy as it may |
| 526 | /// overconstraint the register allocator, but replaces some when |
| 527 | /// possible. |
| 528 | /// \pre \p MI is a Copy (MI->isCopy() is true) |
| 529 | /// \return True, when \p MI has been optimized. In that case, \p MI has |
| 530 | /// been removed from its parent. |
| 531 | bool PeepholeOptimizer::optimizeCopyOrBitcast(MachineInstr *MI) { |
| 532 | unsigned DefIdx, SrcIdx; |
| 533 | if (!MI || !getCopyOrBitcastDefUseIdx(*MI, DefIdx, SrcIdx)) |
| 534 | return false; |
| 535 | |
| 536 | const MachineOperand &MODef = MI->getOperand(DefIdx); |
| 537 | assert(MODef.isReg() && "Copies must be between registers."); |
| 538 | unsigned Def = MODef.getReg(); |
| 539 | |
| 540 | if (TargetRegisterInfo::isPhysicalRegister(Def)) |
| 541 | return false; |
| 542 | |
| 543 | const TargetRegisterClass *DefRC = MRI->getRegClass(Def); |
| 544 | unsigned DefSubReg = MODef.getSubReg(); |
| 545 | |
| 546 | unsigned Src; |
| 547 | unsigned SrcSubReg; |
| 548 | bool ShouldRewrite = false; |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 549 | const TargetRegisterInfo &TRI = *TM->getRegisterInfo(); |
| 550 | |
Stephen Hines | c6a4f5e | 2014-07-21 00:45:20 -0700 | [diff] [blame] | 551 | // Follow the chain of copies until we reach the top of the use-def chain |
| 552 | // or find a more suitable source. |
| 553 | ValueTracker ValTracker(*MI, DefIdx, DefSubReg, !DisableAdvCopyOpt, MRI); |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 554 | do { |
Stephen Hines | c6a4f5e | 2014-07-21 00:45:20 -0700 | [diff] [blame] | 555 | unsigned CopySrcIdx, CopySrcSubReg; |
| 556 | if (!ValTracker.getNextSource(CopySrcIdx, CopySrcSubReg)) |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 557 | break; |
Stephen Hines | c6a4f5e | 2014-07-21 00:45:20 -0700 | [diff] [blame] | 558 | Src = ValTracker.getReg(); |
| 559 | SrcSubReg = CopySrcSubReg; |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 560 | |
Stephen Hines | c6a4f5e | 2014-07-21 00:45:20 -0700 | [diff] [blame] | 561 | // Do not extend the live-ranges of physical registers as they add |
| 562 | // constraints to the register allocator. |
| 563 | // Moreover, if we want to extend the live-range of a physical register, |
| 564 | // unlike SSA virtual register, we will have to check that they are not |
| 565 | // redefine before the related use. |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 566 | if (TargetRegisterInfo::isPhysicalRegister(Src)) |
| 567 | break; |
| 568 | |
| 569 | const TargetRegisterClass *SrcRC = MRI->getRegClass(Src); |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 570 | |
| 571 | // If this source does not incur a cross register bank copy, use it. |
| 572 | ShouldRewrite = shareSameRegisterFile(TRI, DefRC, DefSubReg, SrcRC, |
| 573 | SrcSubReg); |
Stephen Hines | c6a4f5e | 2014-07-21 00:45:20 -0700 | [diff] [blame] | 574 | } while (!ShouldRewrite); |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 575 | |
| 576 | // If we did not find a more suitable source, there is nothing to optimize. |
| 577 | if (!ShouldRewrite || Src == MI->getOperand(SrcIdx).getReg()) |
| 578 | return false; |
| 579 | |
| 580 | // Rewrite the copy to avoid a cross register bank penalty. |
| 581 | unsigned NewVR = TargetRegisterInfo::isPhysicalRegister(Def) ? Def : |
| 582 | MRI->createVirtualRegister(DefRC); |
| 583 | MachineInstr *NewCopy = BuildMI(*MI->getParent(), MI, MI->getDebugLoc(), |
| 584 | TII->get(TargetOpcode::COPY), NewVR) |
| 585 | .addReg(Src, 0, SrcSubReg); |
| 586 | NewCopy->getOperand(0).setSubReg(DefSubReg); |
| 587 | |
| 588 | MRI->replaceRegWith(Def, NewVR); |
| 589 | MRI->clearKillFlags(NewVR); |
Stephen Hines | c6a4f5e | 2014-07-21 00:45:20 -0700 | [diff] [blame] | 590 | // We extended the lifetime of Src. |
| 591 | // Clear the kill flags to account for that. |
| 592 | MRI->clearKillFlags(Src); |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 593 | MI->eraseFromParent(); |
| 594 | ++NumCopiesBitcasts; |
| 595 | return true; |
| 596 | } |
| 597 | |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 598 | /// isLoadFoldable - Check whether MI is a candidate for folding into a later |
| 599 | /// instruction. We only fold loads to virtual registers and the virtual |
| 600 | /// register defined has a single use. |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 601 | bool PeepholeOptimizer::isLoadFoldable( |
| 602 | MachineInstr *MI, |
| 603 | SmallSet<unsigned, 16> &FoldAsLoadDefCandidates) { |
Manman Ren | 127eea8 | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 604 | if (!MI->canFoldAsLoad() || !MI->mayLoad()) |
| 605 | return false; |
| 606 | const MCInstrDesc &MCID = MI->getDesc(); |
| 607 | if (MCID.getNumDefs() != 1) |
| 608 | return false; |
| 609 | |
| 610 | unsigned Reg = MI->getOperand(0).getReg(); |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 611 | // To reduce compilation time, we check MRI->hasOneNonDBGUse when inserting |
Manman Ren | 127eea8 | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 612 | // loads. It should be checked when processing uses of the load, since |
| 613 | // uses can be removed during peephole. |
| 614 | if (!MI->getOperand(0).getSubReg() && |
| 615 | TargetRegisterInfo::isVirtualRegister(Reg) && |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 616 | MRI->hasOneNonDBGUse(Reg)) { |
| 617 | FoldAsLoadDefCandidates.insert(Reg); |
Manman Ren | 127eea8 | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 618 | return true; |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 619 | } |
| 620 | return false; |
| 621 | } |
| 622 | |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 623 | bool PeepholeOptimizer::isMoveImmediate(MachineInstr *MI, |
| 624 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 625 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs) { |
Evan Cheng | e837dea | 2011-06-28 19:10:37 +0000 | [diff] [blame] | 626 | const MCInstrDesc &MCID = MI->getDesc(); |
Evan Cheng | 5a96b3d | 2011-12-07 07:15:52 +0000 | [diff] [blame] | 627 | if (!MI->isMoveImmediate()) |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 628 | return false; |
Evan Cheng | e837dea | 2011-06-28 19:10:37 +0000 | [diff] [blame] | 629 | if (MCID.getNumDefs() != 1) |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 630 | return false; |
| 631 | unsigned Reg = MI->getOperand(0).getReg(); |
| 632 | if (TargetRegisterInfo::isVirtualRegister(Reg)) { |
| 633 | ImmDefMIs.insert(std::make_pair(Reg, MI)); |
| 634 | ImmDefRegs.insert(Reg); |
| 635 | return true; |
| 636 | } |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 637 | |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 638 | return false; |
| 639 | } |
| 640 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 641 | /// foldImmediate - Try folding register operands that are defined by move |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 642 | /// immediate instructions, i.e. a trivial constant folding optimization, if |
| 643 | /// and only if the def and use are in the same BB. |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 644 | bool PeepholeOptimizer::foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB, |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 645 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 646 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs) { |
| 647 | for (unsigned i = 0, e = MI->getDesc().getNumOperands(); i != e; ++i) { |
| 648 | MachineOperand &MO = MI->getOperand(i); |
| 649 | if (!MO.isReg() || MO.isDef()) |
| 650 | continue; |
| 651 | unsigned Reg = MO.getReg(); |
Jakob Stoklund Olesen | c9df025 | 2011-01-10 02:58:51 +0000 | [diff] [blame] | 652 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 653 | continue; |
| 654 | if (ImmDefRegs.count(Reg) == 0) |
| 655 | continue; |
| 656 | DenseMap<unsigned, MachineInstr*>::iterator II = ImmDefMIs.find(Reg); |
| 657 | assert(II != ImmDefMIs.end()); |
| 658 | if (TII->FoldImmediate(MI, II->second, Reg, MRI)) { |
| 659 | ++NumImmFold; |
| 660 | return true; |
| 661 | } |
| 662 | } |
| 663 | return false; |
| 664 | } |
| 665 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 666 | bool PeepholeOptimizer::runOnMachineFunction(MachineFunction &MF) { |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 667 | if (skipOptnoneFunction(*MF.getFunction())) |
| 668 | return false; |
| 669 | |
Craig Topper | a1032b7 | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 670 | DEBUG(dbgs() << "********** PEEPHOLE OPTIMIZER **********\n"); |
| 671 | DEBUG(dbgs() << "********** Function: " << MF.getName() << '\n'); |
| 672 | |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 673 | if (DisablePeephole) |
| 674 | return false; |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 675 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 676 | TM = &MF.getTarget(); |
| 677 | TII = TM->getInstrInfo(); |
| 678 | MRI = &MF.getRegInfo(); |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame] | 679 | DT = Aggressive ? &getAnalysis<MachineDominatorTree>() : nullptr; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 680 | |
| 681 | bool Changed = false; |
| 682 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 683 | for (MachineFunction::iterator I = MF.begin(), E = MF.end(); I != E; ++I) { |
| 684 | MachineBasicBlock *MBB = &*I; |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 685 | |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 686 | bool SeenMoveImm = false; |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 687 | SmallPtrSet<MachineInstr*, 8> LocalMIs; |
| 688 | SmallSet<unsigned, 4> ImmDefRegs; |
| 689 | DenseMap<unsigned, MachineInstr*> ImmDefMIs; |
| 690 | SmallSet<unsigned, 16> FoldAsLoadDefCandidates; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 691 | |
| 692 | for (MachineBasicBlock::iterator |
Bill Wendling | 220e240 | 2010-09-10 21:55:43 +0000 | [diff] [blame] | 693 | MII = I->begin(), MIE = I->end(); MII != MIE; ) { |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 694 | MachineInstr *MI = &*MII; |
Jakob Stoklund Olesen | cabc069 | 2012-08-17 14:38:59 +0000 | [diff] [blame] | 695 | // We may be erasing MI below, increment MII now. |
| 696 | ++MII; |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 697 | LocalMIs.insert(MI); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 698 | |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 699 | // Skip debug values. They should not affect this peephole optimization. |
| 700 | if (MI->isDebugValue()) |
| 701 | continue; |
| 702 | |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 703 | // If there exists an instruction which belongs to the following |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 704 | // categories, we will discard the load candidates. |
| 705 | if (MI->isPosition() || MI->isPHI() || MI->isImplicitDef() || |
| 706 | MI->isKill() || MI->isInlineAsm() || |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 707 | MI->hasUnmodeledSideEffects()) { |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 708 | FoldAsLoadDefCandidates.clear(); |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 709 | continue; |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 710 | } |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 711 | if (MI->mayStore() || MI->isCall()) |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 712 | FoldAsLoadDefCandidates.clear(); |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 713 | |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 714 | if (((MI->isBitcast() || MI->isCopy()) && optimizeCopyOrBitcast(MI)) || |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 715 | (MI->isCompare() && optimizeCmpInstr(MI, MBB)) || |
| 716 | (MI->isSelect() && optimizeSelect(MI))) { |
| 717 | // MI is deleted. |
| 718 | LocalMIs.erase(MI); |
| 719 | Changed = true; |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 720 | continue; |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 721 | } |
| 722 | |
| 723 | if (isMoveImmediate(MI, ImmDefRegs, ImmDefMIs)) { |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 724 | SeenMoveImm = true; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 725 | } else { |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 726 | Changed |= optimizeExtInstr(MI, MBB, LocalMIs); |
Rafael Espindola | 10ad98b | 2012-10-15 18:21:07 +0000 | [diff] [blame] | 727 | // optimizeExtInstr might have created new instructions after MI |
| 728 | // and before the already incremented MII. Adjust MII so that the |
| 729 | // next iteration sees the new instructions. |
| 730 | MII = MI; |
| 731 | ++MII; |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 732 | if (SeenMoveImm) |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 733 | Changed |= foldImmediate(MI, MBB, ImmDefRegs, ImmDefMIs); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 734 | } |
Evan Cheng | 326d976 | 2011-02-15 05:00:24 +0000 | [diff] [blame] | 735 | |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 736 | // Check whether MI is a load candidate for folding into a later |
| 737 | // instruction. If MI is not a candidate, check whether we can fold an |
| 738 | // earlier load into MI. |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 739 | if (!isLoadFoldable(MI, FoldAsLoadDefCandidates) && |
| 740 | !FoldAsLoadDefCandidates.empty()) { |
| 741 | const MCInstrDesc &MIDesc = MI->getDesc(); |
| 742 | for (unsigned i = MIDesc.getNumDefs(); i != MIDesc.getNumOperands(); |
| 743 | ++i) { |
| 744 | const MachineOperand &MOp = MI->getOperand(i); |
| 745 | if (!MOp.isReg()) |
| 746 | continue; |
| 747 | unsigned FoldAsLoadDefReg = MOp.getReg(); |
| 748 | if (FoldAsLoadDefCandidates.count(FoldAsLoadDefReg)) { |
| 749 | // We need to fold load after optimizeCmpInstr, since |
| 750 | // optimizeCmpInstr can enable folding by converting SUB to CMP. |
| 751 | // Save FoldAsLoadDefReg because optimizeLoadInstr() resets it and |
| 752 | // we need it for markUsesInDebugValueAsUndef(). |
| 753 | unsigned FoldedReg = FoldAsLoadDefReg; |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame] | 754 | MachineInstr *DefMI = nullptr; |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 755 | MachineInstr *FoldMI = TII->optimizeLoadInstr(MI, MRI, |
| 756 | FoldAsLoadDefReg, |
| 757 | DefMI); |
| 758 | if (FoldMI) { |
| 759 | // Update LocalMIs since we replaced MI with FoldMI and deleted |
| 760 | // DefMI. |
| 761 | DEBUG(dbgs() << "Replacing: " << *MI); |
| 762 | DEBUG(dbgs() << " With: " << *FoldMI); |
| 763 | LocalMIs.erase(MI); |
| 764 | LocalMIs.erase(DefMI); |
| 765 | LocalMIs.insert(FoldMI); |
| 766 | MI->eraseFromParent(); |
| 767 | DefMI->eraseFromParent(); |
| 768 | MRI->markUsesInDebugValueAsUndef(FoldedReg); |
| 769 | FoldAsLoadDefCandidates.erase(FoldedReg); |
| 770 | ++NumLoadFold; |
| 771 | // MI is replaced with FoldMI. |
| 772 | Changed = true; |
| 773 | break; |
| 774 | } |
| 775 | } |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 776 | } |
| 777 | } |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 778 | } |
| 779 | } |
| 780 | |
| 781 | return Changed; |
| 782 | } |
Stephen Hines | c6a4f5e | 2014-07-21 00:45:20 -0700 | [diff] [blame] | 783 | |
| 784 | bool ValueTracker::getNextSourceFromCopy(unsigned &SrcIdx, |
| 785 | unsigned &SrcSubReg) { |
| 786 | assert(Def->isCopy() && "Invalid definition"); |
| 787 | // Copy instruction are supposed to be: Def = Src. |
| 788 | // If someone breaks this assumption, bad things will happen everywhere. |
| 789 | assert(Def->getDesc().getNumOperands() == 2 && "Invalid number of operands"); |
| 790 | |
| 791 | if (Def->getOperand(DefIdx).getSubReg() != DefSubReg) |
| 792 | // If we look for a different subreg, it means we want a subreg of src. |
| 793 | // Bails as we do not support composing subreg yet. |
| 794 | return false; |
| 795 | // Otherwise, we want the whole source. |
| 796 | SrcIdx = 1; |
| 797 | SrcSubReg = Def->getOperand(SrcIdx).getSubReg(); |
| 798 | return true; |
| 799 | } |
| 800 | |
| 801 | bool ValueTracker::getNextSourceFromBitcast(unsigned &SrcIdx, |
| 802 | unsigned &SrcSubReg) { |
| 803 | assert(Def->isBitcast() && "Invalid definition"); |
| 804 | |
| 805 | // Bail if there are effects that a plain copy will not expose. |
| 806 | if (Def->hasUnmodeledSideEffects()) |
| 807 | return false; |
| 808 | |
| 809 | // Bitcasts with more than one def are not supported. |
| 810 | if (Def->getDesc().getNumDefs() != 1) |
| 811 | return false; |
| 812 | if (Def->getOperand(DefIdx).getSubReg() != DefSubReg) |
| 813 | // If we look for a different subreg, it means we want a subreg of the src. |
| 814 | // Bails as we do not support composing subreg yet. |
| 815 | return false; |
| 816 | |
| 817 | SrcIdx = Def->getDesc().getNumOperands(); |
| 818 | for (unsigned OpIdx = DefIdx + 1, EndOpIdx = SrcIdx; OpIdx != EndOpIdx; |
| 819 | ++OpIdx) { |
| 820 | const MachineOperand &MO = Def->getOperand(OpIdx); |
| 821 | if (!MO.isReg() || !MO.getReg()) |
| 822 | continue; |
| 823 | assert(!MO.isDef() && "We should have skipped all the definitions by now"); |
| 824 | if (SrcIdx != EndOpIdx) |
| 825 | // Multiple sources? |
| 826 | return false; |
| 827 | SrcIdx = OpIdx; |
| 828 | } |
| 829 | SrcSubReg = Def->getOperand(SrcIdx).getSubReg(); |
| 830 | return true; |
| 831 | } |
| 832 | |
| 833 | bool ValueTracker::getNextSourceFromRegSequence(unsigned &SrcIdx, |
| 834 | unsigned &SrcSubReg) { |
| 835 | assert(Def->isRegSequence() && "Invalid definition"); |
| 836 | |
| 837 | if (Def->getOperand(DefIdx).getSubReg()) |
| 838 | // If we are composing subreg, bails out. |
| 839 | // The case we are checking is Def.<subreg> = REG_SEQUENCE. |
| 840 | // This should almost never happen as the SSA property is tracked at |
| 841 | // the register level (as opposed to the subreg level). |
| 842 | // I.e., |
| 843 | // Def.sub0 = |
| 844 | // Def.sub1 = |
| 845 | // is a valid SSA representation for Def.sub0 and Def.sub1, but not for |
| 846 | // Def. Thus, it must not be generated. |
| 847 | // However, some code could theoretically generates a single |
| 848 | // Def.sub0 (i.e, not defining the other subregs) and we would |
| 849 | // have this case. |
| 850 | // If we can ascertain (or force) that this never happens, we could |
| 851 | // turn that into an assertion. |
| 852 | return false; |
| 853 | |
| 854 | // We are looking at: |
| 855 | // Def = REG_SEQUENCE v0, sub0, v1, sub1, ... |
| 856 | // Check if one of the operand defines the subreg we are interested in. |
| 857 | for (unsigned OpIdx = DefIdx + 1, EndOpIdx = Def->getNumOperands(); |
| 858 | OpIdx != EndOpIdx; OpIdx += 2) { |
| 859 | const MachineOperand &MOSubIdx = Def->getOperand(OpIdx + 1); |
| 860 | assert(MOSubIdx.isImm() && |
| 861 | "One of the subindex of the reg_sequence is not an immediate"); |
| 862 | if (MOSubIdx.getImm() == DefSubReg) { |
| 863 | assert(Def->getOperand(OpIdx).isReg() && |
| 864 | "One of the source of the reg_sequence is not a register"); |
| 865 | SrcIdx = OpIdx; |
| 866 | SrcSubReg = Def->getOperand(SrcIdx).getSubReg(); |
| 867 | return true; |
| 868 | } |
| 869 | } |
| 870 | |
| 871 | // If the subreg we are tracking is super-defined by another subreg, |
| 872 | // we could follow this value. However, this would require to compose |
| 873 | // the subreg and we do not do that for now. |
| 874 | return false; |
| 875 | } |
| 876 | |
| 877 | bool ValueTracker::getNextSourceFromInsertSubreg(unsigned &SrcIdx, |
| 878 | unsigned &SrcSubReg) { |
| 879 | assert(Def->isInsertSubreg() && "Invalid definition"); |
| 880 | if (Def->getOperand(DefIdx).getSubReg()) |
| 881 | // If we are composing subreg, bails out. |
| 882 | // Same remark as getNextSourceFromRegSequence. |
| 883 | // I.e., this may be turned into an assert. |
| 884 | return false; |
| 885 | |
| 886 | // We are looking at: |
| 887 | // Def = INSERT_SUBREG v0, v1, sub1 |
| 888 | // There are two cases: |
| 889 | // 1. DefSubReg == sub1, get v1. |
| 890 | // 2. DefSubReg != sub1, the value may be available through v0. |
| 891 | |
| 892 | // #1 Check if the inserted register matches the require sub index. |
| 893 | unsigned InsertedSubReg = Def->getOperand(3).getImm(); |
| 894 | if (InsertedSubReg == DefSubReg) { |
| 895 | SrcIdx = 2; |
| 896 | SrcSubReg = Def->getOperand(SrcIdx).getSubReg(); |
| 897 | return true; |
| 898 | } |
| 899 | // #2 Otherwise, if the sub register we are looking for is not partial |
| 900 | // defined by the inserted element, we can look through the main |
| 901 | // register (v0). |
| 902 | // To check the overlapping we need a MRI and a TRI. |
| 903 | if (!MRI) |
| 904 | return false; |
| 905 | |
| 906 | const MachineOperand &MODef = Def->getOperand(DefIdx); |
| 907 | const MachineOperand &MOBase = Def->getOperand(1); |
| 908 | // If the result register (Def) and the base register (v0) do not |
| 909 | // have the same register class or if we have to compose |
| 910 | // subregisters, bails out. |
| 911 | if (MRI->getRegClass(MODef.getReg()) != MRI->getRegClass(MOBase.getReg()) || |
| 912 | MOBase.getSubReg()) |
| 913 | return false; |
| 914 | |
| 915 | // Get the TRI and check if inserted sub register overlaps with the |
| 916 | // sub register we are tracking. |
| 917 | const TargetRegisterInfo *TRI = MRI->getTargetRegisterInfo(); |
| 918 | if (!TRI || |
| 919 | (TRI->getSubRegIndexLaneMask(DefSubReg) & |
| 920 | TRI->getSubRegIndexLaneMask(InsertedSubReg)) != 0) |
| 921 | return false; |
| 922 | // At this point, the value is available in v0 via the same subreg |
| 923 | // we used for Def. |
| 924 | SrcIdx = 1; |
| 925 | SrcSubReg = DefSubReg; |
| 926 | return true; |
| 927 | } |
| 928 | |
| 929 | bool ValueTracker::getNextSourceFromExtractSubreg(unsigned &SrcIdx, |
| 930 | unsigned &SrcSubReg) { |
| 931 | assert(Def->isExtractSubreg() && "Invalid definition"); |
| 932 | // We are looking at: |
| 933 | // Def = EXTRACT_SUBREG v0, sub0 |
| 934 | |
| 935 | // Bails if we have to compose sub registers. |
| 936 | // Indeed, if DefSubReg != 0, we would have to compose it with sub0. |
| 937 | if (DefSubReg) |
| 938 | return false; |
| 939 | |
| 940 | // Bails if we have to compose sub registers. |
| 941 | // Likewise, if v0.subreg != 0, we would have to compose v0.subreg with sub0. |
| 942 | if (Def->getOperand(1).getSubReg()) |
| 943 | return false; |
| 944 | // Otherwise, the value is available in the v0.sub0. |
| 945 | SrcIdx = 1; |
| 946 | SrcSubReg = Def->getOperand(2).getImm(); |
| 947 | return true; |
| 948 | } |
| 949 | |
| 950 | bool ValueTracker::getNextSourceFromSubregToReg(unsigned &SrcIdx, |
| 951 | unsigned &SrcSubReg) { |
| 952 | assert(Def->isSubregToReg() && "Invalid definition"); |
| 953 | // We are looking at: |
| 954 | // Def = SUBREG_TO_REG Imm, v0, sub0 |
| 955 | |
| 956 | // Bails if we have to compose sub registers. |
| 957 | // If DefSubReg != sub0, we would have to check that all the bits |
| 958 | // we track are included in sub0 and if yes, we would have to |
| 959 | // determine the right subreg in v0. |
| 960 | if (DefSubReg != Def->getOperand(3).getImm()) |
| 961 | return false; |
| 962 | // Bails if we have to compose sub registers. |
| 963 | // Likewise, if v0.subreg != 0, we would have to compose it with sub0. |
| 964 | if (Def->getOperand(2).getSubReg()) |
| 965 | return false; |
| 966 | |
| 967 | SrcIdx = 2; |
| 968 | SrcSubReg = Def->getOperand(3).getImm(); |
| 969 | return true; |
| 970 | } |
| 971 | |
| 972 | bool ValueTracker::getNextSourceImpl(unsigned &SrcIdx, unsigned &SrcSubReg) { |
| 973 | assert(Def && "This method needs a valid definition"); |
| 974 | |
| 975 | assert( |
| 976 | (DefIdx < Def->getDesc().getNumDefs() || Def->getDesc().isVariadic()) && |
| 977 | Def->getOperand(DefIdx).isDef() && "Invalid DefIdx"); |
| 978 | if (Def->isCopy()) |
| 979 | return getNextSourceFromCopy(SrcIdx, SrcSubReg); |
| 980 | if (Def->isBitcast()) |
| 981 | return getNextSourceFromBitcast(SrcIdx, SrcSubReg); |
| 982 | // All the remaining cases involve "complex" instructions. |
| 983 | // Bails if we did not ask for the advanced tracking. |
| 984 | if (!UseAdvancedTracking) |
| 985 | return false; |
| 986 | if (Def->isRegSequence()) |
| 987 | return getNextSourceFromRegSequence(SrcIdx, SrcSubReg); |
| 988 | if (Def->isInsertSubreg()) |
| 989 | return getNextSourceFromInsertSubreg(SrcIdx, SrcSubReg); |
| 990 | if (Def->isExtractSubreg()) |
| 991 | return getNextSourceFromExtractSubreg(SrcIdx, SrcSubReg); |
| 992 | if (Def->isSubregToReg()) |
| 993 | return getNextSourceFromSubregToReg(SrcIdx, SrcSubReg); |
| 994 | return false; |
| 995 | } |
| 996 | |
| 997 | const MachineInstr *ValueTracker::getNextSource(unsigned &SrcIdx, |
| 998 | unsigned &SrcSubReg) { |
| 999 | // If we reach a point where we cannot move up in the use-def chain, |
| 1000 | // there is nothing we can get. |
| 1001 | if (!Def) |
| 1002 | return nullptr; |
| 1003 | |
| 1004 | const MachineInstr *PrevDef = nullptr; |
| 1005 | // Try to find the next source. |
| 1006 | if (getNextSourceImpl(SrcIdx, SrcSubReg)) { |
| 1007 | // Update definition, definition index, and subregister for the |
| 1008 | // next call of getNextSource. |
| 1009 | const MachineOperand &MO = Def->getOperand(SrcIdx); |
| 1010 | assert(MO.isReg() && !MO.isDef() && "Source is invalid"); |
| 1011 | // Update the current register. |
| 1012 | Reg = MO.getReg(); |
| 1013 | // Update the return value before moving up in the use-def chain. |
| 1014 | PrevDef = Def; |
| 1015 | // If we can still move up in the use-def chain, move to the next |
| 1016 | // defintion. |
| 1017 | if (!TargetRegisterInfo::isPhysicalRegister(Reg)) { |
| 1018 | Def = MRI->getVRegDef(Reg); |
| 1019 | DefIdx = MRI->def_begin(Reg).getOperandNo(); |
| 1020 | DefSubReg = SrcSubReg; |
| 1021 | return PrevDef; |
| 1022 | } |
| 1023 | } |
| 1024 | // If we end up here, this means we will not be able to find another source |
| 1025 | // for the next iteration. |
| 1026 | // Make sure any new call to getNextSource bails out early by cutting the |
| 1027 | // use-def chain. |
| 1028 | Def = nullptr; |
| 1029 | return PrevDef; |
| 1030 | } |