Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1 | //===-- PeepholeOptimizer.cpp - Peephole Optimizations --------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // Perform peephole optimizations on the machine code: |
| 11 | // |
| 12 | // - Optimize Extensions |
| 13 | // |
| 14 | // Optimization of sign / zero extension instructions. It may be extended to |
| 15 | // handle other instructions with similar properties. |
| 16 | // |
| 17 | // On some targets, some instructions, e.g. X86 sign / zero extension, may |
| 18 | // leave the source value in the lower part of the result. This optimization |
| 19 | // will replace some uses of the pre-extension value with uses of the |
| 20 | // sub-register of the results. |
| 21 | // |
| 22 | // - Optimize Comparisons |
| 23 | // |
| 24 | // Optimization of comparison instructions. For instance, in this code: |
| 25 | // |
| 26 | // sub r1, 1 |
| 27 | // cmp r1, 0 |
| 28 | // bz L1 |
| 29 | // |
| 30 | // If the "sub" instruction all ready sets (or could be modified to set) the |
| 31 | // same flag that the "cmp" instruction sets and that "bz" uses, then we can |
| 32 | // eliminate the "cmp" instruction. |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 33 | // |
Manman Ren | 247c5ab | 2012-05-11 01:30:47 +0000 | [diff] [blame] | 34 | // Another instance, in this code: |
| 35 | // |
| 36 | // sub r1, r3 | sub r1, imm |
| 37 | // cmp r3, r1 or cmp r1, r3 | cmp r1, imm |
| 38 | // bge L1 |
| 39 | // |
| 40 | // If the branch instruction can use flag from "sub", then we can replace |
| 41 | // "sub" with "subs" and eliminate the "cmp" instruction. |
| 42 | // |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 43 | // - Optimize Bitcast pairs: |
| 44 | // |
| 45 | // v1 = bitcast v0 |
| 46 | // v2 = bitcast v1 |
| 47 | // = v2 |
| 48 | // => |
| 49 | // v1 = bitcast v0 |
| 50 | // = v0 |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 51 | // |
Joel Jones | 8293b7b | 2012-12-11 16:10:25 +0000 | [diff] [blame] | 52 | // - Optimize Loads: |
| 53 | // |
| 54 | // Loads that can be folded into a later instruction. A load is foldable |
| 55 | // if it loads to virtual registers and the virtual register defined has |
| 56 | // a single use. |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 57 | //===----------------------------------------------------------------------===// |
| 58 | |
| 59 | #define DEBUG_TYPE "peephole-opt" |
| 60 | #include "llvm/CodeGen/Passes.h" |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 61 | #include "llvm/ADT/DenseMap.h" |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 62 | #include "llvm/ADT/SmallPtrSet.h" |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 63 | #include "llvm/ADT/SmallSet.h" |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 64 | #include "llvm/ADT/Statistic.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 65 | #include "llvm/CodeGen/MachineDominators.h" |
| 66 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
| 67 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
| 68 | #include "llvm/Support/CommandLine.h" |
Craig Topper | a1032b7 | 2012-12-17 03:56:00 +0000 | [diff] [blame^] | 69 | #include "llvm/Support/Debug.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 70 | #include "llvm/Target/TargetInstrInfo.h" |
| 71 | #include "llvm/Target/TargetRegisterInfo.h" |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 72 | using namespace llvm; |
| 73 | |
| 74 | // Optimize Extensions |
| 75 | static cl::opt<bool> |
| 76 | Aggressive("aggressive-ext-opt", cl::Hidden, |
| 77 | cl::desc("Aggressive extension optimization")); |
| 78 | |
Bill Wendling | 40a5eb1 | 2010-11-01 20:41:43 +0000 | [diff] [blame] | 79 | static cl::opt<bool> |
| 80 | DisablePeephole("disable-peephole", cl::Hidden, cl::init(false), |
| 81 | cl::desc("Disable the peephole optimizer")); |
| 82 | |
Bill Wendling | 69c5eb5 | 2010-08-27 20:39:09 +0000 | [diff] [blame] | 83 | STATISTIC(NumReuse, "Number of extension results reused"); |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 84 | STATISTIC(NumBitcasts, "Number of bitcasts eliminated"); |
| 85 | STATISTIC(NumCmps, "Number of compares eliminated"); |
Lang Hames | 3b26eb6 | 2012-02-25 00:46:38 +0000 | [diff] [blame] | 86 | STATISTIC(NumImmFold, "Number of move immediate folded"); |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 87 | STATISTIC(NumLoadFold, "Number of loads folded"); |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 88 | STATISTIC(NumSelects, "Number of selects optimized"); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 89 | |
| 90 | namespace { |
| 91 | class PeepholeOptimizer : public MachineFunctionPass { |
| 92 | const TargetMachine *TM; |
| 93 | const TargetInstrInfo *TII; |
| 94 | MachineRegisterInfo *MRI; |
| 95 | MachineDominatorTree *DT; // Machine dominator tree |
| 96 | |
| 97 | public: |
| 98 | static char ID; // Pass identification |
Owen Anderson | 081c34b | 2010-10-19 17:21:58 +0000 | [diff] [blame] | 99 | PeepholeOptimizer() : MachineFunctionPass(ID) { |
| 100 | initializePeepholeOptimizerPass(*PassRegistry::getPassRegistry()); |
| 101 | } |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 102 | |
| 103 | virtual bool runOnMachineFunction(MachineFunction &MF); |
| 104 | |
| 105 | virtual void getAnalysisUsage(AnalysisUsage &AU) const { |
| 106 | AU.setPreservesCFG(); |
| 107 | MachineFunctionPass::getAnalysisUsage(AU); |
| 108 | if (Aggressive) { |
| 109 | AU.addRequired<MachineDominatorTree>(); |
| 110 | AU.addPreserved<MachineDominatorTree>(); |
| 111 | } |
| 112 | } |
| 113 | |
| 114 | private: |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 115 | bool optimizeBitcastInstr(MachineInstr *MI, MachineBasicBlock *MBB); |
| 116 | bool optimizeCmpInstr(MachineInstr *MI, MachineBasicBlock *MBB); |
| 117 | bool optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB, |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 118 | SmallPtrSet<MachineInstr*, 8> &LocalMIs); |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 119 | bool optimizeSelect(MachineInstr *MI); |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 120 | bool isMoveImmediate(MachineInstr *MI, |
| 121 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 122 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs); |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 123 | bool foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB, |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 124 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 125 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs); |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 126 | bool isLoadFoldable(MachineInstr *MI, unsigned &FoldAsLoadDefReg); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 127 | }; |
| 128 | } |
| 129 | |
| 130 | char PeepholeOptimizer::ID = 0; |
Andrew Trick | 1dd8c85 | 2012-02-08 21:23:13 +0000 | [diff] [blame] | 131 | char &llvm::PeepholeOptimizerID = PeepholeOptimizer::ID; |
Owen Anderson | 2ab36d3 | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 132 | INITIALIZE_PASS_BEGIN(PeepholeOptimizer, "peephole-opts", |
| 133 | "Peephole Optimizations", false, false) |
| 134 | INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree) |
| 135 | INITIALIZE_PASS_END(PeepholeOptimizer, "peephole-opts", |
Owen Anderson | ce665bd | 2010-10-07 22:25:06 +0000 | [diff] [blame] | 136 | "Peephole Optimizations", false, false) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 137 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 138 | /// optimizeExtInstr - If instruction is a copy-like instruction, i.e. it reads |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 139 | /// a single register and writes a single register and it does not modify the |
| 140 | /// source, and if the source value is preserved as a sub-register of the |
| 141 | /// result, then replace all reachable uses of the source with the subreg of the |
| 142 | /// result. |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 143 | /// |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 144 | /// Do not generate an EXTRACT that is used only in a debug use, as this changes |
| 145 | /// the code. Since this code does not currently share EXTRACTs, just ignore all |
| 146 | /// debug uses. |
| 147 | bool PeepholeOptimizer:: |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 148 | optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB, |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 149 | SmallPtrSet<MachineInstr*, 8> &LocalMIs) { |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 150 | unsigned SrcReg, DstReg, SubIdx; |
| 151 | if (!TII->isCoalescableExtInstr(*MI, SrcReg, DstReg, SubIdx)) |
| 152 | return false; |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 153 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 154 | if (TargetRegisterInfo::isPhysicalRegister(DstReg) || |
| 155 | TargetRegisterInfo::isPhysicalRegister(SrcReg)) |
| 156 | return false; |
| 157 | |
Jakob Stoklund Olesen | d8d0279 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 158 | if (MRI->hasOneNonDBGUse(SrcReg)) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 159 | // No other uses. |
| 160 | return false; |
| 161 | |
Jakob Stoklund Olesen | 418a363 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 162 | // Ensure DstReg can get a register class that actually supports |
| 163 | // sub-registers. Don't change the class until we commit. |
| 164 | const TargetRegisterClass *DstRC = MRI->getRegClass(DstReg); |
| 165 | DstRC = TM->getRegisterInfo()->getSubClassWithSubReg(DstRC, SubIdx); |
| 166 | if (!DstRC) |
| 167 | return false; |
| 168 | |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 169 | // The ext instr may be operating on a sub-register of SrcReg as well. |
| 170 | // PPC::EXTSW is a 32 -> 64-bit sign extension, but it reads a 64-bit |
| 171 | // register. |
| 172 | // If UseSrcSubIdx is Set, SubIdx also applies to SrcReg, and only uses of |
| 173 | // SrcReg:SubIdx should be replaced. |
| 174 | bool UseSrcSubIdx = TM->getRegisterInfo()-> |
| 175 | getSubClassWithSubReg(MRI->getRegClass(SrcReg), SubIdx) != 0; |
| 176 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 177 | // The source has other uses. See if we can replace the other uses with use of |
| 178 | // the result of the extension. |
| 179 | SmallPtrSet<MachineBasicBlock*, 4> ReachedBBs; |
Jakob Stoklund Olesen | d8d0279 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 180 | for (MachineRegisterInfo::use_nodbg_iterator |
| 181 | UI = MRI->use_nodbg_begin(DstReg), UE = MRI->use_nodbg_end(); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 182 | UI != UE; ++UI) |
| 183 | ReachedBBs.insert(UI->getParent()); |
| 184 | |
| 185 | // Uses that are in the same BB of uses of the result of the instruction. |
| 186 | SmallVector<MachineOperand*, 8> Uses; |
| 187 | |
| 188 | // Uses that the result of the instruction can reach. |
| 189 | SmallVector<MachineOperand*, 8> ExtendedUses; |
| 190 | |
| 191 | bool ExtendLife = true; |
Jakob Stoklund Olesen | d8d0279 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 192 | for (MachineRegisterInfo::use_nodbg_iterator |
| 193 | UI = MRI->use_nodbg_begin(SrcReg), UE = MRI->use_nodbg_end(); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 194 | UI != UE; ++UI) { |
| 195 | MachineOperand &UseMO = UI.getOperand(); |
| 196 | MachineInstr *UseMI = &*UI; |
| 197 | if (UseMI == MI) |
| 198 | continue; |
| 199 | |
| 200 | if (UseMI->isPHI()) { |
| 201 | ExtendLife = false; |
| 202 | continue; |
| 203 | } |
| 204 | |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 205 | // Only accept uses of SrcReg:SubIdx. |
| 206 | if (UseSrcSubIdx && UseMO.getSubReg() != SubIdx) |
| 207 | continue; |
| 208 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 209 | // It's an error to translate this: |
| 210 | // |
| 211 | // %reg1025 = <sext> %reg1024 |
| 212 | // ... |
| 213 | // %reg1026 = SUBREG_TO_REG 0, %reg1024, 4 |
| 214 | // |
| 215 | // into this: |
| 216 | // |
| 217 | // %reg1025 = <sext> %reg1024 |
| 218 | // ... |
| 219 | // %reg1027 = COPY %reg1025:4 |
| 220 | // %reg1026 = SUBREG_TO_REG 0, %reg1027, 4 |
| 221 | // |
| 222 | // The problem here is that SUBREG_TO_REG is there to assert that an |
| 223 | // implicit zext occurs. It doesn't insert a zext instruction. If we allow |
| 224 | // the COPY here, it will give us the value after the <sext>, not the |
| 225 | // original value of %reg1024 before <sext>. |
| 226 | if (UseMI->getOpcode() == TargetOpcode::SUBREG_TO_REG) |
| 227 | continue; |
| 228 | |
| 229 | MachineBasicBlock *UseMBB = UseMI->getParent(); |
| 230 | if (UseMBB == MBB) { |
| 231 | // Local uses that come after the extension. |
| 232 | if (!LocalMIs.count(UseMI)) |
| 233 | Uses.push_back(&UseMO); |
| 234 | } else if (ReachedBBs.count(UseMBB)) { |
| 235 | // Non-local uses where the result of the extension is used. Always |
| 236 | // replace these unless it's a PHI. |
| 237 | Uses.push_back(&UseMO); |
| 238 | } else if (Aggressive && DT->dominates(MBB, UseMBB)) { |
| 239 | // We may want to extend the live range of the extension result in order |
| 240 | // to replace these uses. |
| 241 | ExtendedUses.push_back(&UseMO); |
| 242 | } else { |
| 243 | // Both will be live out of the def MBB anyway. Don't extend live range of |
| 244 | // the extension result. |
| 245 | ExtendLife = false; |
| 246 | break; |
| 247 | } |
| 248 | } |
| 249 | |
| 250 | if (ExtendLife && !ExtendedUses.empty()) |
| 251 | // Extend the liveness of the extension result. |
| 252 | std::copy(ExtendedUses.begin(), ExtendedUses.end(), |
| 253 | std::back_inserter(Uses)); |
| 254 | |
| 255 | // Now replace all uses. |
| 256 | bool Changed = false; |
| 257 | if (!Uses.empty()) { |
| 258 | SmallPtrSet<MachineBasicBlock*, 4> PHIBBs; |
| 259 | |
| 260 | // Look for PHI uses of the extended result, we don't want to extend the |
| 261 | // liveness of a PHI input. It breaks all kinds of assumptions down |
| 262 | // stream. A PHI use is expected to be the kill of its source values. |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 263 | for (MachineRegisterInfo::use_nodbg_iterator |
Jakob Stoklund Olesen | d8d0279 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 264 | UI = MRI->use_nodbg_begin(DstReg), UE = MRI->use_nodbg_end(); |
| 265 | UI != UE; ++UI) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 266 | if (UI->isPHI()) |
| 267 | PHIBBs.insert(UI->getParent()); |
| 268 | |
| 269 | const TargetRegisterClass *RC = MRI->getRegClass(SrcReg); |
| 270 | for (unsigned i = 0, e = Uses.size(); i != e; ++i) { |
| 271 | MachineOperand *UseMO = Uses[i]; |
| 272 | MachineInstr *UseMI = UseMO->getParent(); |
| 273 | MachineBasicBlock *UseMBB = UseMI->getParent(); |
| 274 | if (PHIBBs.count(UseMBB)) |
| 275 | continue; |
| 276 | |
Lang Hames | c69cbd0 | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 277 | // About to add uses of DstReg, clear DstReg's kill flags. |
Jakob Stoklund Olesen | 418a363 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 278 | if (!Changed) { |
Lang Hames | c69cbd0 | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 279 | MRI->clearKillFlags(DstReg); |
Jakob Stoklund Olesen | 418a363 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 280 | MRI->constrainRegClass(DstReg, DstRC); |
| 281 | } |
Lang Hames | c69cbd0 | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 282 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 283 | unsigned NewVR = MRI->createVirtualRegister(RC); |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 284 | MachineInstr *Copy = BuildMI(*UseMBB, UseMI, UseMI->getDebugLoc(), |
| 285 | TII->get(TargetOpcode::COPY), NewVR) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 286 | .addReg(DstReg, 0, SubIdx); |
Jakob Stoklund Olesen | 7164288 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 287 | // SubIdx applies to both SrcReg and DstReg when UseSrcSubIdx is set. |
| 288 | if (UseSrcSubIdx) { |
| 289 | Copy->getOperand(0).setSubReg(SubIdx); |
| 290 | Copy->getOperand(0).setIsUndef(); |
| 291 | } |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 292 | UseMO->setReg(NewVR); |
| 293 | ++NumReuse; |
| 294 | Changed = true; |
| 295 | } |
| 296 | } |
| 297 | |
| 298 | return Changed; |
| 299 | } |
| 300 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 301 | /// optimizeBitcastInstr - If the instruction is a bitcast instruction A that |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 302 | /// cannot be optimized away during isel (e.g. ARM::VMOVSR, which bitcast |
| 303 | /// a value cross register classes), and the source is defined by another |
| 304 | /// bitcast instruction B. And if the register class of source of B matches |
| 305 | /// the register class of instruction A, then it is legal to replace all uses |
| 306 | /// of the def of A with source of B. e.g. |
| 307 | /// %vreg0<def> = VMOVSR %vreg1 |
| 308 | /// %vreg3<def> = VMOVRS %vreg0 |
| 309 | /// Replace all uses of vreg3 with vreg1. |
| 310 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 311 | bool PeepholeOptimizer::optimizeBitcastInstr(MachineInstr *MI, |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 312 | MachineBasicBlock *MBB) { |
| 313 | unsigned NumDefs = MI->getDesc().getNumDefs(); |
| 314 | unsigned NumSrcs = MI->getDesc().getNumOperands() - NumDefs; |
| 315 | if (NumDefs != 1) |
| 316 | return false; |
| 317 | |
| 318 | unsigned Def = 0; |
| 319 | unsigned Src = 0; |
| 320 | for (unsigned i = 0, e = NumDefs + NumSrcs; i != e; ++i) { |
| 321 | const MachineOperand &MO = MI->getOperand(i); |
| 322 | if (!MO.isReg()) |
| 323 | continue; |
| 324 | unsigned Reg = MO.getReg(); |
| 325 | if (!Reg) |
| 326 | continue; |
| 327 | if (MO.isDef()) |
| 328 | Def = Reg; |
| 329 | else if (Src) |
| 330 | // Multiple sources? |
| 331 | return false; |
| 332 | else |
| 333 | Src = Reg; |
| 334 | } |
| 335 | |
| 336 | assert(Def && Src && "Malformed bitcast instruction!"); |
| 337 | |
| 338 | MachineInstr *DefMI = MRI->getVRegDef(Src); |
Evan Cheng | 5a96b3d | 2011-12-07 07:15:52 +0000 | [diff] [blame] | 339 | if (!DefMI || !DefMI->isBitcast()) |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 340 | return false; |
| 341 | |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 342 | unsigned SrcSrc = 0; |
| 343 | NumDefs = DefMI->getDesc().getNumDefs(); |
| 344 | NumSrcs = DefMI->getDesc().getNumOperands() - NumDefs; |
| 345 | if (NumDefs != 1) |
| 346 | return false; |
| 347 | for (unsigned i = 0, e = NumDefs + NumSrcs; i != e; ++i) { |
| 348 | const MachineOperand &MO = DefMI->getOperand(i); |
| 349 | if (!MO.isReg() || MO.isDef()) |
| 350 | continue; |
| 351 | unsigned Reg = MO.getReg(); |
| 352 | if (!Reg) |
| 353 | continue; |
Duncan Sands | 7becbc4 | 2011-07-26 15:05:06 +0000 | [diff] [blame] | 354 | if (!MO.isDef()) { |
| 355 | if (SrcSrc) |
| 356 | // Multiple sources? |
| 357 | return false; |
| 358 | else |
| 359 | SrcSrc = Reg; |
| 360 | } |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 361 | } |
| 362 | |
| 363 | if (MRI->getRegClass(SrcSrc) != MRI->getRegClass(Def)) |
| 364 | return false; |
| 365 | |
| 366 | MRI->replaceRegWith(Def, SrcSrc); |
| 367 | MRI->clearKillFlags(SrcSrc); |
| 368 | MI->eraseFromParent(); |
| 369 | ++NumBitcasts; |
| 370 | return true; |
| 371 | } |
| 372 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 373 | /// optimizeCmpInstr - If the instruction is a compare and the previous |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 374 | /// instruction it's comparing against all ready sets (or could be modified to |
| 375 | /// set) the same flag as the compare, then we can remove the comparison and use |
| 376 | /// the flag from the previous instruction. |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 377 | bool PeepholeOptimizer::optimizeCmpInstr(MachineInstr *MI, |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 378 | MachineBasicBlock *MBB) { |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 379 | // If this instruction is a comparison against zero and isn't comparing a |
| 380 | // physical register, we can try to optimize it. |
Manman Ren | de7266c | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 381 | unsigned SrcReg, SrcReg2; |
Gabor Greif | 04ac81d | 2010-09-21 12:01:15 +0000 | [diff] [blame] | 382 | int CmpMask, CmpValue; |
Manman Ren | de7266c | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 383 | if (!TII->analyzeCompare(MI, SrcReg, SrcReg2, CmpMask, CmpValue) || |
| 384 | TargetRegisterInfo::isPhysicalRegister(SrcReg) || |
| 385 | (SrcReg2 != 0 && TargetRegisterInfo::isPhysicalRegister(SrcReg2))) |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 386 | return false; |
| 387 | |
Bill Wendling | a655686 | 2010-09-11 00:13:50 +0000 | [diff] [blame] | 388 | // Attempt to optimize the comparison instruction. |
Manman Ren | de7266c | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 389 | if (TII->optimizeCompareInstr(MI, SrcReg, SrcReg2, CmpMask, CmpValue, MRI)) { |
Evan Cheng | d158fba | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 390 | ++NumCmps; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 391 | return true; |
| 392 | } |
| 393 | |
| 394 | return false; |
| 395 | } |
| 396 | |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 397 | /// Optimize a select instruction. |
| 398 | bool PeepholeOptimizer::optimizeSelect(MachineInstr *MI) { |
| 399 | unsigned TrueOp = 0; |
| 400 | unsigned FalseOp = 0; |
| 401 | bool Optimizable = false; |
| 402 | SmallVector<MachineOperand, 4> Cond; |
| 403 | if (TII->analyzeSelect(MI, Cond, TrueOp, FalseOp, Optimizable)) |
| 404 | return false; |
| 405 | if (!Optimizable) |
| 406 | return false; |
| 407 | if (!TII->optimizeSelect(MI)) |
| 408 | return false; |
| 409 | MI->eraseFromParent(); |
| 410 | ++NumSelects; |
| 411 | return true; |
| 412 | } |
| 413 | |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 414 | /// isLoadFoldable - Check whether MI is a candidate for folding into a later |
| 415 | /// instruction. We only fold loads to virtual registers and the virtual |
| 416 | /// register defined has a single use. |
| 417 | bool PeepholeOptimizer::isLoadFoldable(MachineInstr *MI, |
| 418 | unsigned &FoldAsLoadDefReg) { |
Manman Ren | 127eea8 | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 419 | if (!MI->canFoldAsLoad() || !MI->mayLoad()) |
| 420 | return false; |
| 421 | const MCInstrDesc &MCID = MI->getDesc(); |
| 422 | if (MCID.getNumDefs() != 1) |
| 423 | return false; |
| 424 | |
| 425 | unsigned Reg = MI->getOperand(0).getReg(); |
| 426 | // To reduce compilation time, we check MRI->hasOneUse when inserting |
| 427 | // loads. It should be checked when processing uses of the load, since |
| 428 | // uses can be removed during peephole. |
| 429 | if (!MI->getOperand(0).getSubReg() && |
| 430 | TargetRegisterInfo::isVirtualRegister(Reg) && |
| 431 | MRI->hasOneUse(Reg)) { |
| 432 | FoldAsLoadDefReg = Reg; |
| 433 | return true; |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 434 | } |
| 435 | return false; |
| 436 | } |
| 437 | |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 438 | bool PeepholeOptimizer::isMoveImmediate(MachineInstr *MI, |
| 439 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 440 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs) { |
Evan Cheng | e837dea | 2011-06-28 19:10:37 +0000 | [diff] [blame] | 441 | const MCInstrDesc &MCID = MI->getDesc(); |
Evan Cheng | 5a96b3d | 2011-12-07 07:15:52 +0000 | [diff] [blame] | 442 | if (!MI->isMoveImmediate()) |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 443 | return false; |
Evan Cheng | e837dea | 2011-06-28 19:10:37 +0000 | [diff] [blame] | 444 | if (MCID.getNumDefs() != 1) |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 445 | return false; |
| 446 | unsigned Reg = MI->getOperand(0).getReg(); |
| 447 | if (TargetRegisterInfo::isVirtualRegister(Reg)) { |
| 448 | ImmDefMIs.insert(std::make_pair(Reg, MI)); |
| 449 | ImmDefRegs.insert(Reg); |
| 450 | return true; |
| 451 | } |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 452 | |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 453 | return false; |
| 454 | } |
| 455 | |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 456 | /// foldImmediate - Try folding register operands that are defined by move |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 457 | /// immediate instructions, i.e. a trivial constant folding optimization, if |
| 458 | /// and only if the def and use are in the same BB. |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 459 | bool PeepholeOptimizer::foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB, |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 460 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 461 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs) { |
| 462 | for (unsigned i = 0, e = MI->getDesc().getNumOperands(); i != e; ++i) { |
| 463 | MachineOperand &MO = MI->getOperand(i); |
| 464 | if (!MO.isReg() || MO.isDef()) |
| 465 | continue; |
| 466 | unsigned Reg = MO.getReg(); |
Jakob Stoklund Olesen | c9df025 | 2011-01-10 02:58:51 +0000 | [diff] [blame] | 467 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 468 | continue; |
| 469 | if (ImmDefRegs.count(Reg) == 0) |
| 470 | continue; |
| 471 | DenseMap<unsigned, MachineInstr*>::iterator II = ImmDefMIs.find(Reg); |
| 472 | assert(II != ImmDefMIs.end()); |
| 473 | if (TII->FoldImmediate(MI, II->second, Reg, MRI)) { |
| 474 | ++NumImmFold; |
| 475 | return true; |
| 476 | } |
| 477 | } |
| 478 | return false; |
| 479 | } |
| 480 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 481 | bool PeepholeOptimizer::runOnMachineFunction(MachineFunction &MF) { |
Craig Topper | a1032b7 | 2012-12-17 03:56:00 +0000 | [diff] [blame^] | 482 | DEBUG(dbgs() << "********** PEEPHOLE OPTIMIZER **********\n"); |
| 483 | DEBUG(dbgs() << "********** Function: " << MF.getName() << '\n'); |
| 484 | |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 485 | if (DisablePeephole) |
| 486 | return false; |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 487 | |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 488 | TM = &MF.getTarget(); |
| 489 | TII = TM->getInstrInfo(); |
| 490 | MRI = &MF.getRegInfo(); |
| 491 | DT = Aggressive ? &getAnalysis<MachineDominatorTree>() : 0; |
| 492 | |
| 493 | bool Changed = false; |
| 494 | |
| 495 | SmallPtrSet<MachineInstr*, 8> LocalMIs; |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 496 | SmallSet<unsigned, 4> ImmDefRegs; |
| 497 | DenseMap<unsigned, MachineInstr*> ImmDefMIs; |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 498 | unsigned FoldAsLoadDefReg; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 499 | for (MachineFunction::iterator I = MF.begin(), E = MF.end(); I != E; ++I) { |
| 500 | MachineBasicBlock *MBB = &*I; |
Andrew Trick | 1df91b0 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 501 | |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 502 | bool SeenMoveImm = false; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 503 | LocalMIs.clear(); |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 504 | ImmDefRegs.clear(); |
| 505 | ImmDefMIs.clear(); |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 506 | FoldAsLoadDefReg = 0; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 507 | |
| 508 | for (MachineBasicBlock::iterator |
Bill Wendling | 220e240 | 2010-09-10 21:55:43 +0000 | [diff] [blame] | 509 | MII = I->begin(), MIE = I->end(); MII != MIE; ) { |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 510 | MachineInstr *MI = &*MII; |
Jakob Stoklund Olesen | cabc069 | 2012-08-17 14:38:59 +0000 | [diff] [blame] | 511 | // We may be erasing MI below, increment MII now. |
| 512 | ++MII; |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 513 | LocalMIs.insert(MI); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 514 | |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 515 | // If there exists an instruction which belongs to the following |
| 516 | // categories, we will discard the load candidate. |
Evan Cheng | 30a343a | 2011-01-07 21:08:26 +0000 | [diff] [blame] | 517 | if (MI->isLabel() || MI->isPHI() || MI->isImplicitDef() || |
| 518 | MI->isKill() || MI->isInlineAsm() || MI->isDebugValue() || |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 519 | MI->hasUnmodeledSideEffects()) { |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 520 | FoldAsLoadDefReg = 0; |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 521 | continue; |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 522 | } |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 523 | if (MI->mayStore() || MI->isCall()) |
| 524 | FoldAsLoadDefReg = 0; |
Evan Cheng | eb96a2f | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 525 | |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 526 | if ((MI->isBitcast() && optimizeBitcastInstr(MI, MBB)) || |
| 527 | (MI->isCompare() && optimizeCmpInstr(MI, MBB)) || |
| 528 | (MI->isSelect() && optimizeSelect(MI))) { |
| 529 | // MI is deleted. |
| 530 | LocalMIs.erase(MI); |
| 531 | Changed = true; |
Jakob Stoklund Olesen | f2c64ef | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 532 | continue; |
Evan Cheng | cf75ab5 | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 533 | } |
| 534 | |
| 535 | if (isMoveImmediate(MI, ImmDefRegs, ImmDefMIs)) { |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 536 | SeenMoveImm = true; |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 537 | } else { |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 538 | Changed |= optimizeExtInstr(MI, MBB, LocalMIs); |
Rafael Espindola | 10ad98b | 2012-10-15 18:21:07 +0000 | [diff] [blame] | 539 | // optimizeExtInstr might have created new instructions after MI |
| 540 | // and before the already incremented MII. Adjust MII so that the |
| 541 | // next iteration sees the new instructions. |
| 542 | MII = MI; |
| 543 | ++MII; |
Evan Cheng | c4af463 | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 544 | if (SeenMoveImm) |
Jim Grosbach | 39cc513 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 545 | Changed |= foldImmediate(MI, MBB, ImmDefRegs, ImmDefMIs); |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 546 | } |
Evan Cheng | 326d976 | 2011-02-15 05:00:24 +0000 | [diff] [blame] | 547 | |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 548 | // Check whether MI is a load candidate for folding into a later |
| 549 | // instruction. If MI is not a candidate, check whether we can fold an |
| 550 | // earlier load into MI. |
| 551 | if (!isLoadFoldable(MI, FoldAsLoadDefReg) && FoldAsLoadDefReg) { |
| 552 | // We need to fold load after optimizeCmpInstr, since optimizeCmpInstr |
| 553 | // can enable folding by converting SUB to CMP. |
| 554 | MachineInstr *DefMI = 0; |
| 555 | MachineInstr *FoldMI = TII->optimizeLoadInstr(MI, MRI, |
| 556 | FoldAsLoadDefReg, DefMI); |
| 557 | if (FoldMI) { |
| 558 | // Update LocalMIs since we replaced MI with FoldMI and deleted DefMI. |
Craig Topper | a1032b7 | 2012-12-17 03:56:00 +0000 | [diff] [blame^] | 559 | DEBUG(dbgs() << "Replacing: " << *MI); |
| 560 | DEBUG(dbgs() << " With: " << *FoldMI); |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 561 | LocalMIs.erase(MI); |
| 562 | LocalMIs.erase(DefMI); |
| 563 | LocalMIs.insert(FoldMI); |
| 564 | MI->eraseFromParent(); |
| 565 | DefMI->eraseFromParent(); |
| 566 | ++NumLoadFold; |
| 567 | |
| 568 | // MI is replaced with FoldMI. |
| 569 | Changed = true; |
Manman Ren | d7d003c | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 570 | continue; |
| 571 | } |
| 572 | } |
Bill Wendling | 6cdb1ab | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 573 | } |
| 574 | } |
| 575 | |
| 576 | return Changed; |
| 577 | } |