Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1 | //===-- PeepholeOptimizer.cpp - Peephole Optimizations --------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // Perform peephole optimizations on the machine code: |
| 11 | // |
| 12 | // - Optimize Extensions |
| 13 | // |
| 14 | // Optimization of sign / zero extension instructions. It may be extended to |
| 15 | // handle other instructions with similar properties. |
| 16 | // |
| 17 | // On some targets, some instructions, e.g. X86 sign / zero extension, may |
| 18 | // leave the source value in the lower part of the result. This optimization |
| 19 | // will replace some uses of the pre-extension value with uses of the |
| 20 | // sub-register of the results. |
| 21 | // |
| 22 | // - Optimize Comparisons |
| 23 | // |
| 24 | // Optimization of comparison instructions. For instance, in this code: |
| 25 | // |
| 26 | // sub r1, 1 |
| 27 | // cmp r1, 0 |
| 28 | // bz L1 |
| 29 | // |
| 30 | // If the "sub" instruction all ready sets (or could be modified to set) the |
| 31 | // same flag that the "cmp" instruction sets and that "bz" uses, then we can |
| 32 | // eliminate the "cmp" instruction. |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 33 | // |
Manman Ren | 247c5ab | 2012-05-11 01:30:47 +0000 | [diff] [blame] | 34 | // Another instance, in this code: |
| 35 | // |
| 36 | // sub r1, r3 | sub r1, imm |
| 37 | // cmp r3, r1 or cmp r1, r3 | cmp r1, imm |
| 38 | // bge L1 |
| 39 | // |
| 40 | // If the branch instruction can use flag from "sub", then we can replace |
| 41 | // "sub" with "subs" and eliminate the "cmp" instruction. |
| 42 | // |
Joel Jones | 8293b7b | 2012-12-11 16:10:25 +0000 | [diff] [blame] | 43 | // - Optimize Loads: |
| 44 | // |
| 45 | // Loads that can be folded into a later instruction. A load is foldable |
| 46 | // if it loads to virtual registers and the virtual register defined has |
| 47 | // a single use. |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame^] | 48 | // |
| 49 | // - Optimize Copies and Bitcast: |
| 50 | // |
| 51 | // Rewrite copies and bitcasts to avoid cross register bank copies |
| 52 | // when possible. |
| 53 | // E.g., Consider the following example, where capital and lower |
| 54 | // letters denote different register file: |
| 55 | // b = copy A <-- cross-bank copy |
| 56 | // C = copy b <-- cross-bank copy |
| 57 | // => |
| 58 | // b = copy A <-- cross-bank copy |
| 59 | // C = copy A <-- same-bank copy |
| 60 | // |
| 61 | // E.g., for bitcast: |
| 62 | // b = bitcast A <-- cross-bank copy |
| 63 | // C = bitcast b <-- cross-bank copy |
| 64 | // => |
| 65 | // b = bitcast A <-- cross-bank copy |
| 66 | // C = copy A <-- same-bank copy |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 67 | //===----------------------------------------------------------------------===// |
| 68 | |
| 69 | #define DEBUG_TYPE "peephole-opt" |
| 70 | #include "llvm/CodeGen/Passes.h" |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 71 | #include "llvm/ADT/DenseMap.h" |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 72 | #include "llvm/ADT/SmallPtrSet.h" |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 73 | #include "llvm/ADT/SmallSet.h" |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 74 | #include "llvm/ADT/Statistic.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 75 | #include "llvm/CodeGen/MachineDominators.h" |
| 76 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
| 77 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
| 78 | #include "llvm/Support/CommandLine.h" |
Craig Topper | a1032b7 | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 79 | #include "llvm/Support/Debug.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 80 | #include "llvm/Target/TargetInstrInfo.h" |
| 81 | #include "llvm/Target/TargetRegisterInfo.h" |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 82 | using namespace llvm; |
| 83 | |
| 84 | // Optimize Extensions |
| 85 | static cl::opt<bool> |
| 86 | Aggressive("aggressive-ext-opt", cl::Hidden, |
| 87 | cl::desc("Aggressive extension optimization")); |
| 88 | |
Bill Wendling | 40a5eb1 | 2010-11-01 20:41:43 +0000 | [diff] [blame] | 89 | static cl::opt<bool> |
| 90 | DisablePeephole("disable-peephole", cl::Hidden, cl::init(false), |
| 91 | cl::desc("Disable the peephole optimizer")); |
| 92 | |
Bill Wendling | 69c5eb5 | 2010-08-27 20:39:09 +0000 | [diff] [blame] | 93 | STATISTIC(NumReuse, "Number of extension results reused"); |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 94 | STATISTIC(NumCmps, "Number of compares eliminated"); |
Lang Hames | 3b26eb6 | 2012-02-25 00:46:38 +0000 | [diff] [blame] | 95 | STATISTIC(NumImmFold, "Number of move immediate folded"); |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 96 | STATISTIC(NumLoadFold, "Number of loads folded"); |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 97 | STATISTIC(NumSelects, "Number of selects optimized"); |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame^] | 98 | STATISTIC(NumCopiesBitcasts, "Number of copies/bitcasts optimized"); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 99 | |
| 100 | namespace { |
| 101 | class PeepholeOptimizer : public MachineFunctionPass { |
| 102 | const TargetMachine *TM; |
| 103 | const TargetInstrInfo *TII; |
| 104 | MachineRegisterInfo *MRI; |
| 105 | MachineDominatorTree *DT; // Machine dominator tree |
| 106 | |
| 107 | public: |
| 108 | static char ID; // Pass identification |
Owen Anderson | 081c34b | 2010-10-19 17:21:58 +0000 | [diff] [blame] | 109 | PeepholeOptimizer() : MachineFunctionPass(ID) { |
| 110 | initializePeepholeOptimizerPass(*PassRegistry::getPassRegistry()); |
| 111 | } |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 112 | |
| 113 | virtual bool runOnMachineFunction(MachineFunction &MF); |
| 114 | |
| 115 | virtual void getAnalysisUsage(AnalysisUsage &AU) const { |
| 116 | AU.setPreservesCFG(); |
| 117 | MachineFunctionPass::getAnalysisUsage(AU); |
| 118 | if (Aggressive) { |
| 119 | AU.addRequired<MachineDominatorTree>(); |
| 120 | AU.addPreserved<MachineDominatorTree>(); |
| 121 | } |
| 122 | } |
| 123 | |
| 124 | private: |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 125 | bool optimizeCmpInstr(MachineInstr *MI, MachineBasicBlock *MBB); |
| 126 | bool optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB, |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 127 | SmallPtrSet<MachineInstr*, 8> &LocalMIs); |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 128 | bool optimizeSelect(MachineInstr *MI); |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame^] | 129 | bool optimizeCopyOrBitcast(MachineInstr *MI); |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 130 | bool isMoveImmediate(MachineInstr *MI, |
| 131 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 132 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs); |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 133 | bool foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB, |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 134 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 135 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs); |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 136 | bool isLoadFoldable(MachineInstr *MI, unsigned &FoldAsLoadDefReg); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 137 | }; |
| 138 | } |
| 139 | |
| 140 | char PeepholeOptimizer::ID = 0; |
Andrew Trick | 1dd8c85 | 2012-02-08 21:23:13 +0000 | [diff] [blame] | 141 | char &llvm::PeepholeOptimizerID = PeepholeOptimizer::ID; |
Owen Anderson | 2ab36d3 | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 142 | INITIALIZE_PASS_BEGIN(PeepholeOptimizer, "peephole-opts", |
| 143 | "Peephole Optimizations", false, false) |
| 144 | INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree) |
| 145 | INITIALIZE_PASS_END(PeepholeOptimizer, "peephole-opts", |
Owen Anderson | ce665bd | 2010-10-07 22:25:06 +0000 | [diff] [blame] | 146 | "Peephole Optimizations", false, false) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 147 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 148 | /// optimizeExtInstr - If instruction is a copy-like instruction, i.e. it reads |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 149 | /// a single register and writes a single register and it does not modify the |
| 150 | /// source, and if the source value is preserved as a sub-register of the |
| 151 | /// result, then replace all reachable uses of the source with the subreg of the |
| 152 | /// result. |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 153 | /// |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 154 | /// Do not generate an EXTRACT that is used only in a debug use, as this changes |
| 155 | /// the code. Since this code does not currently share EXTRACTs, just ignore all |
| 156 | /// debug uses. |
| 157 | bool PeepholeOptimizer:: |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 158 | optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB, |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 159 | SmallPtrSet<MachineInstr*, 8> &LocalMIs) { |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 160 | unsigned SrcReg, DstReg, SubIdx; |
| 161 | if (!TII->isCoalescableExtInstr(*MI, SrcReg, DstReg, SubIdx)) |
| 162 | return false; |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 163 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 164 | if (TargetRegisterInfo::isPhysicalRegister(DstReg) || |
| 165 | TargetRegisterInfo::isPhysicalRegister(SrcReg)) |
| 166 | return false; |
| 167 | |
Jakob Stoklund Olesen | d8d0279 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 168 | if (MRI->hasOneNonDBGUse(SrcReg)) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 169 | // No other uses. |
| 170 | return false; |
| 171 | |
Jakob Stoklund Olesen | 418a363 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 172 | // Ensure DstReg can get a register class that actually supports |
| 173 | // sub-registers. Don't change the class until we commit. |
| 174 | const TargetRegisterClass *DstRC = MRI->getRegClass(DstReg); |
| 175 | DstRC = TM->getRegisterInfo()->getSubClassWithSubReg(DstRC, SubIdx); |
| 176 | if (!DstRC) |
| 177 | return false; |
| 178 | |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 179 | // The ext instr may be operating on a sub-register of SrcReg as well. |
| 180 | // PPC::EXTSW is a 32 -> 64-bit sign extension, but it reads a 64-bit |
| 181 | // register. |
| 182 | // If UseSrcSubIdx is Set, SubIdx also applies to SrcReg, and only uses of |
| 183 | // SrcReg:SubIdx should be replaced. |
| 184 | bool UseSrcSubIdx = TM->getRegisterInfo()-> |
| 185 | getSubClassWithSubReg(MRI->getRegClass(SrcReg), SubIdx) != 0; |
| 186 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 187 | // The source has other uses. See if we can replace the other uses with use of |
| 188 | // the result of the extension. |
| 189 | SmallPtrSet<MachineBasicBlock*, 4> ReachedBBs; |
Jakob Stoklund Olesen | d8d0279 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 190 | for (MachineRegisterInfo::use_nodbg_iterator |
| 191 | UI = MRI->use_nodbg_begin(DstReg), UE = MRI->use_nodbg_end(); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 192 | UI != UE; ++UI) |
| 193 | ReachedBBs.insert(UI->getParent()); |
| 194 | |
| 195 | // Uses that are in the same BB of uses of the result of the instruction. |
| 196 | SmallVector<MachineOperand*, 8> Uses; |
| 197 | |
| 198 | // Uses that the result of the instruction can reach. |
| 199 | SmallVector<MachineOperand*, 8> ExtendedUses; |
| 200 | |
| 201 | bool ExtendLife = true; |
Jakob Stoklund Olesen | d8d0279 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 202 | for (MachineRegisterInfo::use_nodbg_iterator |
| 203 | UI = MRI->use_nodbg_begin(SrcReg), UE = MRI->use_nodbg_end(); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 204 | UI != UE; ++UI) { |
| 205 | MachineOperand &UseMO = UI.getOperand(); |
| 206 | MachineInstr *UseMI = &*UI; |
| 207 | if (UseMI == MI) |
| 208 | continue; |
| 209 | |
| 210 | if (UseMI->isPHI()) { |
| 211 | ExtendLife = false; |
| 212 | continue; |
| 213 | } |
| 214 | |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 215 | // Only accept uses of SrcReg:SubIdx. |
| 216 | if (UseSrcSubIdx && UseMO.getSubReg() != SubIdx) |
| 217 | continue; |
| 218 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 219 | // It's an error to translate this: |
| 220 | // |
| 221 | // %reg1025 = <sext> %reg1024 |
| 222 | // ... |
| 223 | // %reg1026 = SUBREG_TO_REG 0, %reg1024, 4 |
| 224 | // |
| 225 | // into this: |
| 226 | // |
| 227 | // %reg1025 = <sext> %reg1024 |
| 228 | // ... |
| 229 | // %reg1027 = COPY %reg1025:4 |
| 230 | // %reg1026 = SUBREG_TO_REG 0, %reg1027, 4 |
| 231 | // |
| 232 | // The problem here is that SUBREG_TO_REG is there to assert that an |
| 233 | // implicit zext occurs. It doesn't insert a zext instruction. If we allow |
| 234 | // the COPY here, it will give us the value after the <sext>, not the |
| 235 | // original value of %reg1024 before <sext>. |
| 236 | if (UseMI->getOpcode() == TargetOpcode::SUBREG_TO_REG) |
| 237 | continue; |
| 238 | |
| 239 | MachineBasicBlock *UseMBB = UseMI->getParent(); |
| 240 | if (UseMBB == MBB) { |
| 241 | // Local uses that come after the extension. |
| 242 | if (!LocalMIs.count(UseMI)) |
| 243 | Uses.push_back(&UseMO); |
| 244 | } else if (ReachedBBs.count(UseMBB)) { |
| 245 | // Non-local uses where the result of the extension is used. Always |
| 246 | // replace these unless it's a PHI. |
| 247 | Uses.push_back(&UseMO); |
| 248 | } else if (Aggressive && DT->dominates(MBB, UseMBB)) { |
| 249 | // We may want to extend the live range of the extension result in order |
| 250 | // to replace these uses. |
| 251 | ExtendedUses.push_back(&UseMO); |
| 252 | } else { |
| 253 | // Both will be live out of the def MBB anyway. Don't extend live range of |
| 254 | // the extension result. |
| 255 | ExtendLife = false; |
| 256 | break; |
| 257 | } |
| 258 | } |
| 259 | |
| 260 | if (ExtendLife && !ExtendedUses.empty()) |
| 261 | // Extend the liveness of the extension result. |
| 262 | std::copy(ExtendedUses.begin(), ExtendedUses.end(), |
| 263 | std::back_inserter(Uses)); |
| 264 | |
| 265 | // Now replace all uses. |
| 266 | bool Changed = false; |
| 267 | if (!Uses.empty()) { |
| 268 | SmallPtrSet<MachineBasicBlock*, 4> PHIBBs; |
| 269 | |
| 270 | // Look for PHI uses of the extended result, we don't want to extend the |
| 271 | // liveness of a PHI input. It breaks all kinds of assumptions down |
| 272 | // stream. A PHI use is expected to be the kill of its source values. |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 273 | for (MachineRegisterInfo::use_nodbg_iterator |
Jakob Stoklund Olesen | d8d0279 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 274 | UI = MRI->use_nodbg_begin(DstReg), UE = MRI->use_nodbg_end(); |
| 275 | UI != UE; ++UI) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 276 | if (UI->isPHI()) |
| 277 | PHIBBs.insert(UI->getParent()); |
| 278 | |
| 279 | const TargetRegisterClass *RC = MRI->getRegClass(SrcReg); |
| 280 | for (unsigned i = 0, e = Uses.size(); i != e; ++i) { |
| 281 | MachineOperand *UseMO = Uses[i]; |
| 282 | MachineInstr *UseMI = UseMO->getParent(); |
| 283 | MachineBasicBlock *UseMBB = UseMI->getParent(); |
| 284 | if (PHIBBs.count(UseMBB)) |
| 285 | continue; |
| 286 | |
Lang Hames | c69cbd0 | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 287 | // About to add uses of DstReg, clear DstReg's kill flags. |
Jakob Stoklund Olesen | 418a363 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 288 | if (!Changed) { |
Lang Hames | c69cbd0 | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 289 | MRI->clearKillFlags(DstReg); |
Jakob Stoklund Olesen | 418a363 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 290 | MRI->constrainRegClass(DstReg, DstRC); |
| 291 | } |
Lang Hames | c69cbd0 | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 292 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 293 | unsigned NewVR = MRI->createVirtualRegister(RC); |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 294 | MachineInstr *Copy = BuildMI(*UseMBB, UseMI, UseMI->getDebugLoc(), |
| 295 | TII->get(TargetOpcode::COPY), NewVR) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 296 | .addReg(DstReg, 0, SubIdx); |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 297 | // SubIdx applies to both SrcReg and DstReg when UseSrcSubIdx is set. |
| 298 | if (UseSrcSubIdx) { |
| 299 | Copy->getOperand(0).setSubReg(SubIdx); |
| 300 | Copy->getOperand(0).setIsUndef(); |
| 301 | } |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 302 | UseMO->setReg(NewVR); |
| 303 | ++NumReuse; |
| 304 | Changed = true; |
| 305 | } |
| 306 | } |
| 307 | |
| 308 | return Changed; |
| 309 | } |
| 310 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 311 | /// optimizeCmpInstr - If the instruction is a compare and the previous |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 312 | /// instruction it's comparing against all ready sets (or could be modified to |
| 313 | /// set) the same flag as the compare, then we can remove the comparison and use |
| 314 | /// the flag from the previous instruction. |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 315 | bool PeepholeOptimizer::optimizeCmpInstr(MachineInstr *MI, |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 316 | MachineBasicBlock *MBB) { |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 317 | // If this instruction is a comparison against zero and isn't comparing a |
| 318 | // physical register, we can try to optimize it. |
Manman Ren | de7266c | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 319 | unsigned SrcReg, SrcReg2; |
Gabor Greif | 04ac81d | 2010-09-21 12:01:15 +0000 | [diff] [blame] | 320 | int CmpMask, CmpValue; |
Manman Ren | de7266c | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 321 | if (!TII->analyzeCompare(MI, SrcReg, SrcReg2, CmpMask, CmpValue) || |
| 322 | TargetRegisterInfo::isPhysicalRegister(SrcReg) || |
| 323 | (SrcReg2 != 0 && TargetRegisterInfo::isPhysicalRegister(SrcReg2))) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 324 | return false; |
| 325 | |
Bill Wendling | a655686 | 2010-09-11 00:13:50 +0000 | [diff] [blame] | 326 | // Attempt to optimize the comparison instruction. |
Manman Ren | de7266c | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 327 | if (TII->optimizeCompareInstr(MI, SrcReg, SrcReg2, CmpMask, CmpValue, MRI)) { |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 328 | ++NumCmps; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 329 | return true; |
| 330 | } |
| 331 | |
| 332 | return false; |
| 333 | } |
| 334 | |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 335 | /// Optimize a select instruction. |
| 336 | bool PeepholeOptimizer::optimizeSelect(MachineInstr *MI) { |
| 337 | unsigned TrueOp = 0; |
| 338 | unsigned FalseOp = 0; |
| 339 | bool Optimizable = false; |
| 340 | SmallVector<MachineOperand, 4> Cond; |
| 341 | if (TII->analyzeSelect(MI, Cond, TrueOp, FalseOp, Optimizable)) |
| 342 | return false; |
| 343 | if (!Optimizable) |
| 344 | return false; |
| 345 | if (!TII->optimizeSelect(MI)) |
| 346 | return false; |
| 347 | MI->eraseFromParent(); |
| 348 | ++NumSelects; |
| 349 | return true; |
| 350 | } |
| 351 | |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame^] | 352 | /// \brief Check if the registers defined by the pair (RegisterClass, SubReg) |
| 353 | /// share the same register file. |
| 354 | static bool shareSameRegisterFile(const TargetRegisterInfo &TRI, |
| 355 | const TargetRegisterClass *DefRC, |
| 356 | unsigned DefSubReg, |
| 357 | const TargetRegisterClass *SrcRC, |
| 358 | unsigned SrcSubReg) { |
| 359 | // Same register class. |
| 360 | if (DefRC == SrcRC) |
| 361 | return true; |
| 362 | |
| 363 | // Both operands are sub registers. Check if they share a register class. |
| 364 | unsigned SrcIdx, DefIdx; |
| 365 | if (SrcSubReg && DefSubReg) |
| 366 | return TRI.getCommonSuperRegClass(SrcRC, SrcSubReg, DefRC, DefSubReg, |
| 367 | SrcIdx, DefIdx) != NULL; |
| 368 | // At most one of the register is a sub register, make it Src to avoid |
| 369 | // duplicating the test. |
| 370 | if (!SrcSubReg) { |
| 371 | std::swap(DefSubReg, SrcSubReg); |
| 372 | std::swap(DefRC, SrcRC); |
| 373 | } |
| 374 | |
| 375 | // One of the register is a sub register, check if we can get a superclass. |
| 376 | if (SrcSubReg) |
| 377 | return TRI.getMatchingSuperRegClass(SrcRC, DefRC, SrcSubReg) != NULL; |
| 378 | // Plain copy. |
| 379 | return TRI.getCommonSubClass(DefRC, SrcRC) != NULL; |
| 380 | } |
| 381 | |
| 382 | /// \brief Get the index of the definition and source for \p Copy |
| 383 | /// instruction. |
| 384 | /// \pre Copy.isCopy() or Copy.isBitcast(). |
| 385 | /// \return True if the Copy instruction has only one register source |
| 386 | /// and one register definition. Otherwise, \p DefIdx and \p SrcIdx |
| 387 | /// are invalid. |
| 388 | static bool getCopyOrBitcastDefUseIdx(const MachineInstr &Copy, |
| 389 | unsigned &DefIdx, unsigned &SrcIdx) { |
| 390 | assert((Copy.isCopy() || Copy.isBitcast()) && "Wrong operation type."); |
| 391 | if (Copy.isCopy()) { |
| 392 | // Copy instruction are supposed to be: Def = Src. |
| 393 | if (Copy.getDesc().getNumOperands() != 2) |
| 394 | return false; |
| 395 | DefIdx = 0; |
| 396 | SrcIdx = 1; |
| 397 | assert(Copy.getOperand(DefIdx).isDef() && "Use comes before def!"); |
| 398 | return true; |
| 399 | } |
| 400 | // Bitcast case. |
| 401 | // Bitcasts with more than one def are not supported. |
| 402 | if (Copy.getDesc().getNumDefs() != 1) |
| 403 | return false; |
| 404 | // Initialize SrcIdx to an undefined operand. |
| 405 | SrcIdx = Copy.getDesc().getNumOperands(); |
| 406 | for (unsigned OpIdx = 0, EndOpIdx = SrcIdx; OpIdx != EndOpIdx; ++OpIdx) { |
| 407 | const MachineOperand &MO = Copy.getOperand(OpIdx); |
| 408 | if (!MO.isReg() || !MO.getReg()) |
| 409 | continue; |
| 410 | if (MO.isDef()) |
| 411 | DefIdx = OpIdx; |
| 412 | else if (SrcIdx != EndOpIdx) |
| 413 | // Multiple sources? |
| 414 | return false; |
| 415 | SrcIdx = OpIdx; |
| 416 | } |
| 417 | return true; |
| 418 | } |
| 419 | |
| 420 | /// \brief Optimize a copy or bitcast instruction to avoid cross |
| 421 | /// register bank copy. The optimization looks through a chain of |
| 422 | /// copies and try to find a source that has a compatible register |
| 423 | /// class. |
| 424 | /// Two register classes are considered to be compatible if they share |
| 425 | /// the same register bank. |
| 426 | /// New copies issued by this optimization are register allocator |
| 427 | /// friendly. This optimization does not remove any copy as it may |
| 428 | /// overconstraint the register allocator, but replaces some when |
| 429 | /// possible. |
| 430 | /// \pre \p MI is a Copy (MI->isCopy() is true) |
| 431 | /// \return True, when \p MI has been optimized. In that case, \p MI has |
| 432 | /// been removed from its parent. |
| 433 | bool PeepholeOptimizer::optimizeCopyOrBitcast(MachineInstr *MI) { |
| 434 | unsigned DefIdx, SrcIdx; |
| 435 | if (!MI || !getCopyOrBitcastDefUseIdx(*MI, DefIdx, SrcIdx)) |
| 436 | return false; |
| 437 | |
| 438 | const MachineOperand &MODef = MI->getOperand(DefIdx); |
| 439 | assert(MODef.isReg() && "Copies must be between registers."); |
| 440 | unsigned Def = MODef.getReg(); |
| 441 | |
| 442 | if (TargetRegisterInfo::isPhysicalRegister(Def)) |
| 443 | return false; |
| 444 | |
| 445 | const TargetRegisterClass *DefRC = MRI->getRegClass(Def); |
| 446 | unsigned DefSubReg = MODef.getSubReg(); |
| 447 | |
| 448 | unsigned Src; |
| 449 | unsigned SrcSubReg; |
| 450 | bool ShouldRewrite = false; |
| 451 | MachineInstr *Copy = MI; |
| 452 | const TargetRegisterInfo &TRI = *TM->getRegisterInfo(); |
| 453 | |
| 454 | // Follow the chain of copies until we reach the top or find a |
| 455 | // more suitable source. |
| 456 | do { |
| 457 | unsigned CopyDefIdx, CopySrcIdx; |
| 458 | if (!getCopyOrBitcastDefUseIdx(*Copy, CopyDefIdx, CopySrcIdx)) |
| 459 | break; |
| 460 | const MachineOperand &MO = Copy->getOperand(CopySrcIdx); |
| 461 | assert(MO.isReg() && "Copies must be between registers."); |
| 462 | Src = MO.getReg(); |
| 463 | |
| 464 | if (TargetRegisterInfo::isPhysicalRegister(Src)) |
| 465 | break; |
| 466 | |
| 467 | const TargetRegisterClass *SrcRC = MRI->getRegClass(Src); |
| 468 | SrcSubReg = MO.getSubReg(); |
| 469 | |
| 470 | // If this source does not incur a cross register bank copy, use it. |
| 471 | ShouldRewrite = shareSameRegisterFile(TRI, DefRC, DefSubReg, SrcRC, |
| 472 | SrcSubReg); |
| 473 | // Follow the chain of copies: get the definition of Src. |
| 474 | Copy = MRI->getVRegDef(Src); |
| 475 | } while (!ShouldRewrite && Copy && (Copy->isCopy() || Copy->isBitcast())); |
| 476 | |
| 477 | // If we did not find a more suitable source, there is nothing to optimize. |
| 478 | if (!ShouldRewrite || Src == MI->getOperand(SrcIdx).getReg()) |
| 479 | return false; |
| 480 | |
| 481 | // Rewrite the copy to avoid a cross register bank penalty. |
| 482 | unsigned NewVR = TargetRegisterInfo::isPhysicalRegister(Def) ? Def : |
| 483 | MRI->createVirtualRegister(DefRC); |
| 484 | MachineInstr *NewCopy = BuildMI(*MI->getParent(), MI, MI->getDebugLoc(), |
| 485 | TII->get(TargetOpcode::COPY), NewVR) |
| 486 | .addReg(Src, 0, SrcSubReg); |
| 487 | NewCopy->getOperand(0).setSubReg(DefSubReg); |
| 488 | |
| 489 | MRI->replaceRegWith(Def, NewVR); |
| 490 | MRI->clearKillFlags(NewVR); |
| 491 | MI->eraseFromParent(); |
| 492 | ++NumCopiesBitcasts; |
| 493 | return true; |
| 494 | } |
| 495 | |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 496 | /// isLoadFoldable - Check whether MI is a candidate for folding into a later |
| 497 | /// instruction. We only fold loads to virtual registers and the virtual |
| 498 | /// register defined has a single use. |
| 499 | bool PeepholeOptimizer::isLoadFoldable(MachineInstr *MI, |
| 500 | unsigned &FoldAsLoadDefReg) { |
Manman Ren | 127eea8 | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 501 | if (!MI->canFoldAsLoad() || !MI->mayLoad()) |
| 502 | return false; |
| 503 | const MCInstrDesc &MCID = MI->getDesc(); |
| 504 | if (MCID.getNumDefs() != 1) |
| 505 | return false; |
| 506 | |
| 507 | unsigned Reg = MI->getOperand(0).getReg(); |
| 508 | // To reduce compilation time, we check MRI->hasOneUse when inserting |
| 509 | // loads. It should be checked when processing uses of the load, since |
| 510 | // uses can be removed during peephole. |
| 511 | if (!MI->getOperand(0).getSubReg() && |
| 512 | TargetRegisterInfo::isVirtualRegister(Reg) && |
| 513 | MRI->hasOneUse(Reg)) { |
| 514 | FoldAsLoadDefReg = Reg; |
| 515 | return true; |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 516 | } |
| 517 | return false; |
| 518 | } |
| 519 | |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 520 | bool PeepholeOptimizer::isMoveImmediate(MachineInstr *MI, |
| 521 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 522 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs) { |
Evan Cheng | e837dea | 2011-06-28 19:10:37 +0000 | [diff] [blame] | 523 | const MCInstrDesc &MCID = MI->getDesc(); |
Evan Cheng | 5a96b3d | 2011-12-07 07:15:52 +0000 | [diff] [blame] | 524 | if (!MI->isMoveImmediate()) |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 525 | return false; |
Evan Cheng | e837dea | 2011-06-28 19:10:37 +0000 | [diff] [blame] | 526 | if (MCID.getNumDefs() != 1) |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 527 | return false; |
| 528 | unsigned Reg = MI->getOperand(0).getReg(); |
| 529 | if (TargetRegisterInfo::isVirtualRegister(Reg)) { |
| 530 | ImmDefMIs.insert(std::make_pair(Reg, MI)); |
| 531 | ImmDefRegs.insert(Reg); |
| 532 | return true; |
| 533 | } |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 534 | |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 535 | return false; |
| 536 | } |
| 537 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 538 | /// foldImmediate - Try folding register operands that are defined by move |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 539 | /// immediate instructions, i.e. a trivial constant folding optimization, if |
| 540 | /// and only if the def and use are in the same BB. |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 541 | bool PeepholeOptimizer::foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB, |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 542 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 543 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs) { |
| 544 | for (unsigned i = 0, e = MI->getDesc().getNumOperands(); i != e; ++i) { |
| 545 | MachineOperand &MO = MI->getOperand(i); |
| 546 | if (!MO.isReg() || MO.isDef()) |
| 547 | continue; |
| 548 | unsigned Reg = MO.getReg(); |
Jakob Stoklund Olesen | c9df025 | 2011-01-10 02:58:51 +0000 | [diff] [blame] | 549 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 550 | continue; |
| 551 | if (ImmDefRegs.count(Reg) == 0) |
| 552 | continue; |
| 553 | DenseMap<unsigned, MachineInstr*>::iterator II = ImmDefMIs.find(Reg); |
| 554 | assert(II != ImmDefMIs.end()); |
| 555 | if (TII->FoldImmediate(MI, II->second, Reg, MRI)) { |
| 556 | ++NumImmFold; |
| 557 | return true; |
| 558 | } |
| 559 | } |
| 560 | return false; |
| 561 | } |
| 562 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 563 | bool PeepholeOptimizer::runOnMachineFunction(MachineFunction &MF) { |
Craig Topper | a1032b7 | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 564 | DEBUG(dbgs() << "********** PEEPHOLE OPTIMIZER **********\n"); |
| 565 | DEBUG(dbgs() << "********** Function: " << MF.getName() << '\n'); |
| 566 | |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 567 | if (DisablePeephole) |
| 568 | return false; |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 569 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 570 | TM = &MF.getTarget(); |
| 571 | TII = TM->getInstrInfo(); |
| 572 | MRI = &MF.getRegInfo(); |
| 573 | DT = Aggressive ? &getAnalysis<MachineDominatorTree>() : 0; |
| 574 | |
| 575 | bool Changed = false; |
| 576 | |
| 577 | SmallPtrSet<MachineInstr*, 8> LocalMIs; |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 578 | SmallSet<unsigned, 4> ImmDefRegs; |
| 579 | DenseMap<unsigned, MachineInstr*> ImmDefMIs; |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 580 | unsigned FoldAsLoadDefReg; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 581 | for (MachineFunction::iterator I = MF.begin(), E = MF.end(); I != E; ++I) { |
| 582 | MachineBasicBlock *MBB = &*I; |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 583 | |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 584 | bool SeenMoveImm = false; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 585 | LocalMIs.clear(); |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 586 | ImmDefRegs.clear(); |
| 587 | ImmDefMIs.clear(); |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 588 | FoldAsLoadDefReg = 0; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 589 | |
| 590 | for (MachineBasicBlock::iterator |
Bill Wendling | 220e240 | 2010-09-10 21:55:43 +0000 | [diff] [blame] | 591 | MII = I->begin(), MIE = I->end(); MII != MIE; ) { |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 592 | MachineInstr *MI = &*MII; |
Jakob Stoklund Olesen | cabc069 | 2012-08-17 14:38:59 +0000 | [diff] [blame] | 593 | // We may be erasing MI below, increment MII now. |
| 594 | ++MII; |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 595 | LocalMIs.insert(MI); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 596 | |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 597 | // If there exists an instruction which belongs to the following |
| 598 | // categories, we will discard the load candidate. |
Evan Cheng | 30a343a | 2011-01-07 21:08:26 +0000 | [diff] [blame] | 599 | if (MI->isLabel() || MI->isPHI() || MI->isImplicitDef() || |
| 600 | MI->isKill() || MI->isInlineAsm() || MI->isDebugValue() || |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 601 | MI->hasUnmodeledSideEffects()) { |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 602 | FoldAsLoadDefReg = 0; |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 603 | continue; |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 604 | } |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 605 | if (MI->mayStore() || MI->isCall()) |
| 606 | FoldAsLoadDefReg = 0; |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 607 | |
Quentin Colombet | 0df6842 | 2013-09-13 18:26:31 +0000 | [diff] [blame^] | 608 | if (((MI->isBitcast() || MI->isCopy()) && optimizeCopyOrBitcast(MI)) || |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 609 | (MI->isCompare() && optimizeCmpInstr(MI, MBB)) || |
| 610 | (MI->isSelect() && optimizeSelect(MI))) { |
| 611 | // MI is deleted. |
| 612 | LocalMIs.erase(MI); |
| 613 | Changed = true; |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 614 | continue; |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 615 | } |
| 616 | |
| 617 | if (isMoveImmediate(MI, ImmDefRegs, ImmDefMIs)) { |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 618 | SeenMoveImm = true; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 619 | } else { |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 620 | Changed |= optimizeExtInstr(MI, MBB, LocalMIs); |
Rafael Espindola | 10ad98b | 2012-10-15 18:21:07 +0000 | [diff] [blame] | 621 | // optimizeExtInstr might have created new instructions after MI |
| 622 | // and before the already incremented MII. Adjust MII so that the |
| 623 | // next iteration sees the new instructions. |
| 624 | MII = MI; |
| 625 | ++MII; |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 626 | if (SeenMoveImm) |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 627 | Changed |= foldImmediate(MI, MBB, ImmDefRegs, ImmDefMIs); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 628 | } |
Evan Cheng | 326d976 | 2011-02-15 05:00:24 +0000 | [diff] [blame] | 629 | |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 630 | // Check whether MI is a load candidate for folding into a later |
| 631 | // instruction. If MI is not a candidate, check whether we can fold an |
| 632 | // earlier load into MI. |
| 633 | if (!isLoadFoldable(MI, FoldAsLoadDefReg) && FoldAsLoadDefReg) { |
| 634 | // We need to fold load after optimizeCmpInstr, since optimizeCmpInstr |
| 635 | // can enable folding by converting SUB to CMP. |
| 636 | MachineInstr *DefMI = 0; |
| 637 | MachineInstr *FoldMI = TII->optimizeLoadInstr(MI, MRI, |
| 638 | FoldAsLoadDefReg, DefMI); |
| 639 | if (FoldMI) { |
| 640 | // Update LocalMIs since we replaced MI with FoldMI and deleted DefMI. |
Craig Topper | a1032b7 | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 641 | DEBUG(dbgs() << "Replacing: " << *MI); |
| 642 | DEBUG(dbgs() << " With: " << *FoldMI); |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 643 | LocalMIs.erase(MI); |
| 644 | LocalMIs.erase(DefMI); |
| 645 | LocalMIs.insert(FoldMI); |
| 646 | MI->eraseFromParent(); |
| 647 | DefMI->eraseFromParent(); |
| 648 | ++NumLoadFold; |
| 649 | |
| 650 | // MI is replaced with FoldMI. |
| 651 | Changed = true; |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 652 | continue; |
| 653 | } |
| 654 | } |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 655 | } |
| 656 | } |
| 657 | |
| 658 | return Changed; |
| 659 | } |