blob: e18d96358516b1e17be4f2bdc14ec671fba91aa2 [file] [log] [blame]
Bill Wendling6cdb1ab2010-08-09 23:59:04 +00001//===-- PeepholeOptimizer.cpp - Peephole Optimizations --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// Perform peephole optimizations on the machine code:
11//
12// - Optimize Extensions
13//
14// Optimization of sign / zero extension instructions. It may be extended to
15// handle other instructions with similar properties.
16//
17// On some targets, some instructions, e.g. X86 sign / zero extension, may
18// leave the source value in the lower part of the result. This optimization
19// will replace some uses of the pre-extension value with uses of the
20// sub-register of the results.
21//
22// - Optimize Comparisons
23//
24// Optimization of comparison instructions. For instance, in this code:
25//
26// sub r1, 1
27// cmp r1, 0
28// bz L1
29//
30// If the "sub" instruction all ready sets (or could be modified to set) the
31// same flag that the "cmp" instruction sets and that "bz" uses, then we can
32// eliminate the "cmp" instruction.
Evan Chengd158fba2011-03-15 05:13:13 +000033//
Manman Ren247c5ab2012-05-11 01:30:47 +000034// Another instance, in this code:
35//
36// sub r1, r3 | sub r1, imm
37// cmp r3, r1 or cmp r1, r3 | cmp r1, imm
38// bge L1
39//
40// If the branch instruction can use flag from "sub", then we can replace
41// "sub" with "subs" and eliminate the "cmp" instruction.
42//
Joel Jones8293b7b2012-12-11 16:10:25 +000043// - Optimize Loads:
44//
45// Loads that can be folded into a later instruction. A load is foldable
46// if it loads to virtual registers and the virtual register defined has
47// a single use.
Quentin Colombet0df68422013-09-13 18:26:31 +000048//
49// - Optimize Copies and Bitcast:
50//
51// Rewrite copies and bitcasts to avoid cross register bank copies
52// when possible.
53// E.g., Consider the following example, where capital and lower
54// letters denote different register file:
55// b = copy A <-- cross-bank copy
56// C = copy b <-- cross-bank copy
57// =>
58// b = copy A <-- cross-bank copy
59// C = copy A <-- same-bank copy
60//
61// E.g., for bitcast:
62// b = bitcast A <-- cross-bank copy
63// C = bitcast b <-- cross-bank copy
64// =>
65// b = bitcast A <-- cross-bank copy
66// C = copy A <-- same-bank copy
Bill Wendling6cdb1ab2010-08-09 23:59:04 +000067//===----------------------------------------------------------------------===//
68
69#define DEBUG_TYPE "peephole-opt"
70#include "llvm/CodeGen/Passes.h"
Evan Chengc4af4632010-11-17 20:13:28 +000071#include "llvm/ADT/DenseMap.h"
Bill Wendling6cdb1ab2010-08-09 23:59:04 +000072#include "llvm/ADT/SmallPtrSet.h"
Evan Chengc4af4632010-11-17 20:13:28 +000073#include "llvm/ADT/SmallSet.h"
Bill Wendling6cdb1ab2010-08-09 23:59:04 +000074#include "llvm/ADT/Statistic.h"
Chandler Carruthd04a8d42012-12-03 16:50:05 +000075#include "llvm/CodeGen/MachineDominators.h"
76#include "llvm/CodeGen/MachineInstrBuilder.h"
77#include "llvm/CodeGen/MachineRegisterInfo.h"
78#include "llvm/Support/CommandLine.h"
Craig Toppera1032b72012-12-17 03:56:00 +000079#include "llvm/Support/Debug.h"
Chandler Carruthd04a8d42012-12-03 16:50:05 +000080#include "llvm/Target/TargetInstrInfo.h"
81#include "llvm/Target/TargetRegisterInfo.h"
Bill Wendling6cdb1ab2010-08-09 23:59:04 +000082using namespace llvm;
83
84// Optimize Extensions
85static cl::opt<bool>
86Aggressive("aggressive-ext-opt", cl::Hidden,
87 cl::desc("Aggressive extension optimization"));
88
Bill Wendling40a5eb12010-11-01 20:41:43 +000089static cl::opt<bool>
90DisablePeephole("disable-peephole", cl::Hidden, cl::init(false),
91 cl::desc("Disable the peephole optimizer"));
92
Bill Wendling69c5eb52010-08-27 20:39:09 +000093STATISTIC(NumReuse, "Number of extension results reused");
Evan Chengd158fba2011-03-15 05:13:13 +000094STATISTIC(NumCmps, "Number of compares eliminated");
Lang Hames3b26eb62012-02-25 00:46:38 +000095STATISTIC(NumImmFold, "Number of move immediate folded");
Manman Rend7d003c2012-08-02 00:56:42 +000096STATISTIC(NumLoadFold, "Number of loads folded");
Jakob Stoklund Olesenf2c64ef2012-08-16 23:11:47 +000097STATISTIC(NumSelects, "Number of selects optimized");
Quentin Colombet0df68422013-09-13 18:26:31 +000098STATISTIC(NumCopiesBitcasts, "Number of copies/bitcasts optimized");
Bill Wendling6cdb1ab2010-08-09 23:59:04 +000099
100namespace {
101 class PeepholeOptimizer : public MachineFunctionPass {
102 const TargetMachine *TM;
103 const TargetInstrInfo *TII;
104 MachineRegisterInfo *MRI;
105 MachineDominatorTree *DT; // Machine dominator tree
106
107 public:
108 static char ID; // Pass identification
Owen Anderson081c34b2010-10-19 17:21:58 +0000109 PeepholeOptimizer() : MachineFunctionPass(ID) {
110 initializePeepholeOptimizerPass(*PassRegistry::getPassRegistry());
111 }
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000112
Stephen Hines36b56882014-04-23 16:57:46 -0700113 bool runOnMachineFunction(MachineFunction &MF) override;
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000114
Stephen Hines36b56882014-04-23 16:57:46 -0700115 void getAnalysisUsage(AnalysisUsage &AU) const override {
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000116 AU.setPreservesCFG();
117 MachineFunctionPass::getAnalysisUsage(AU);
118 if (Aggressive) {
119 AU.addRequired<MachineDominatorTree>();
120 AU.addPreserved<MachineDominatorTree>();
121 }
122 }
123
124 private:
Jim Grosbach39cc5132012-05-01 23:21:41 +0000125 bool optimizeCmpInstr(MachineInstr *MI, MachineBasicBlock *MBB);
126 bool optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB,
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000127 SmallPtrSet<MachineInstr*, 8> &LocalMIs);
Jakob Stoklund Olesenf2c64ef2012-08-16 23:11:47 +0000128 bool optimizeSelect(MachineInstr *MI);
Quentin Colombet0df68422013-09-13 18:26:31 +0000129 bool optimizeCopyOrBitcast(MachineInstr *MI);
Evan Chengc4af4632010-11-17 20:13:28 +0000130 bool isMoveImmediate(MachineInstr *MI,
131 SmallSet<unsigned, 4> &ImmDefRegs,
132 DenseMap<unsigned, MachineInstr*> &ImmDefMIs);
Jim Grosbach39cc5132012-05-01 23:21:41 +0000133 bool foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB,
Evan Chengc4af4632010-11-17 20:13:28 +0000134 SmallSet<unsigned, 4> &ImmDefRegs,
135 DenseMap<unsigned, MachineInstr*> &ImmDefMIs);
Stephen Hines36b56882014-04-23 16:57:46 -0700136 bool isLoadFoldable(MachineInstr *MI,
137 SmallSet<unsigned, 16> &FoldAsLoadDefCandidates);
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000138 };
139}
140
141char PeepholeOptimizer::ID = 0;
Andrew Trick1dd8c852012-02-08 21:23:13 +0000142char &llvm::PeepholeOptimizerID = PeepholeOptimizer::ID;
Owen Anderson2ab36d32010-10-12 19:48:12 +0000143INITIALIZE_PASS_BEGIN(PeepholeOptimizer, "peephole-opts",
144 "Peephole Optimizations", false, false)
145INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree)
146INITIALIZE_PASS_END(PeepholeOptimizer, "peephole-opts",
Owen Andersonce665bd2010-10-07 22:25:06 +0000147 "Peephole Optimizations", false, false)
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000148
Jim Grosbach39cc5132012-05-01 23:21:41 +0000149/// optimizeExtInstr - If instruction is a copy-like instruction, i.e. it reads
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000150/// a single register and writes a single register and it does not modify the
151/// source, and if the source value is preserved as a sub-register of the
152/// result, then replace all reachable uses of the source with the subreg of the
153/// result.
Andrew Trick1df91b02012-02-08 21:22:43 +0000154///
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000155/// Do not generate an EXTRACT that is used only in a debug use, as this changes
156/// the code. Since this code does not currently share EXTRACTs, just ignore all
157/// debug uses.
158bool PeepholeOptimizer::
Jim Grosbach39cc5132012-05-01 23:21:41 +0000159optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB,
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000160 SmallPtrSet<MachineInstr*, 8> &LocalMIs) {
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000161 unsigned SrcReg, DstReg, SubIdx;
162 if (!TII->isCoalescableExtInstr(*MI, SrcReg, DstReg, SubIdx))
163 return false;
Andrew Trick1df91b02012-02-08 21:22:43 +0000164
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000165 if (TargetRegisterInfo::isPhysicalRegister(DstReg) ||
166 TargetRegisterInfo::isPhysicalRegister(SrcReg))
167 return false;
168
Jakob Stoklund Olesend8d02792012-06-19 21:10:18 +0000169 if (MRI->hasOneNonDBGUse(SrcReg))
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000170 // No other uses.
171 return false;
172
Jakob Stoklund Olesen418a3632012-05-20 18:42:55 +0000173 // Ensure DstReg can get a register class that actually supports
174 // sub-registers. Don't change the class until we commit.
175 const TargetRegisterClass *DstRC = MRI->getRegClass(DstReg);
176 DstRC = TM->getRegisterInfo()->getSubClassWithSubReg(DstRC, SubIdx);
177 if (!DstRC)
178 return false;
179
Jakob Stoklund Olesen71642882012-06-19 21:14:34 +0000180 // The ext instr may be operating on a sub-register of SrcReg as well.
181 // PPC::EXTSW is a 32 -> 64-bit sign extension, but it reads a 64-bit
182 // register.
183 // If UseSrcSubIdx is Set, SubIdx also applies to SrcReg, and only uses of
184 // SrcReg:SubIdx should be replaced.
185 bool UseSrcSubIdx = TM->getRegisterInfo()->
186 getSubClassWithSubReg(MRI->getRegClass(SrcReg), SubIdx) != 0;
187
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000188 // The source has other uses. See if we can replace the other uses with use of
189 // the result of the extension.
190 SmallPtrSet<MachineBasicBlock*, 4> ReachedBBs;
Stephen Hines36b56882014-04-23 16:57:46 -0700191 for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg))
192 ReachedBBs.insert(UI.getParent());
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000193
194 // Uses that are in the same BB of uses of the result of the instruction.
195 SmallVector<MachineOperand*, 8> Uses;
196
197 // Uses that the result of the instruction can reach.
198 SmallVector<MachineOperand*, 8> ExtendedUses;
199
200 bool ExtendLife = true;
Stephen Hines36b56882014-04-23 16:57:46 -0700201 for (MachineOperand &UseMO : MRI->use_nodbg_operands(SrcReg)) {
202 MachineInstr *UseMI = UseMO.getParent();
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000203 if (UseMI == MI)
204 continue;
205
206 if (UseMI->isPHI()) {
207 ExtendLife = false;
208 continue;
209 }
210
Jakob Stoklund Olesen71642882012-06-19 21:14:34 +0000211 // Only accept uses of SrcReg:SubIdx.
212 if (UseSrcSubIdx && UseMO.getSubReg() != SubIdx)
213 continue;
214
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000215 // It's an error to translate this:
216 //
217 // %reg1025 = <sext> %reg1024
218 // ...
219 // %reg1026 = SUBREG_TO_REG 0, %reg1024, 4
220 //
221 // into this:
222 //
223 // %reg1025 = <sext> %reg1024
224 // ...
225 // %reg1027 = COPY %reg1025:4
226 // %reg1026 = SUBREG_TO_REG 0, %reg1027, 4
227 //
228 // The problem here is that SUBREG_TO_REG is there to assert that an
229 // implicit zext occurs. It doesn't insert a zext instruction. If we allow
230 // the COPY here, it will give us the value after the <sext>, not the
231 // original value of %reg1024 before <sext>.
232 if (UseMI->getOpcode() == TargetOpcode::SUBREG_TO_REG)
233 continue;
234
235 MachineBasicBlock *UseMBB = UseMI->getParent();
236 if (UseMBB == MBB) {
237 // Local uses that come after the extension.
238 if (!LocalMIs.count(UseMI))
239 Uses.push_back(&UseMO);
240 } else if (ReachedBBs.count(UseMBB)) {
241 // Non-local uses where the result of the extension is used. Always
242 // replace these unless it's a PHI.
243 Uses.push_back(&UseMO);
244 } else if (Aggressive && DT->dominates(MBB, UseMBB)) {
245 // We may want to extend the live range of the extension result in order
246 // to replace these uses.
247 ExtendedUses.push_back(&UseMO);
248 } else {
249 // Both will be live out of the def MBB anyway. Don't extend live range of
250 // the extension result.
251 ExtendLife = false;
252 break;
253 }
254 }
255
256 if (ExtendLife && !ExtendedUses.empty())
257 // Extend the liveness of the extension result.
258 std::copy(ExtendedUses.begin(), ExtendedUses.end(),
259 std::back_inserter(Uses));
260
261 // Now replace all uses.
262 bool Changed = false;
263 if (!Uses.empty()) {
264 SmallPtrSet<MachineBasicBlock*, 4> PHIBBs;
265
266 // Look for PHI uses of the extended result, we don't want to extend the
267 // liveness of a PHI input. It breaks all kinds of assumptions down
268 // stream. A PHI use is expected to be the kill of its source values.
Stephen Hines36b56882014-04-23 16:57:46 -0700269 for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg))
270 if (UI.isPHI())
271 PHIBBs.insert(UI.getParent());
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000272
273 const TargetRegisterClass *RC = MRI->getRegClass(SrcReg);
274 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
275 MachineOperand *UseMO = Uses[i];
276 MachineInstr *UseMI = UseMO->getParent();
277 MachineBasicBlock *UseMBB = UseMI->getParent();
278 if (PHIBBs.count(UseMBB))
279 continue;
280
Lang Hamesc69cbd02012-02-25 02:01:00 +0000281 // About to add uses of DstReg, clear DstReg's kill flags.
Jakob Stoklund Olesen418a3632012-05-20 18:42:55 +0000282 if (!Changed) {
Lang Hamesc69cbd02012-02-25 02:01:00 +0000283 MRI->clearKillFlags(DstReg);
Jakob Stoklund Olesen418a3632012-05-20 18:42:55 +0000284 MRI->constrainRegClass(DstReg, DstRC);
285 }
Lang Hamesc69cbd02012-02-25 02:01:00 +0000286
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000287 unsigned NewVR = MRI->createVirtualRegister(RC);
Jakob Stoklund Olesen71642882012-06-19 21:14:34 +0000288 MachineInstr *Copy = BuildMI(*UseMBB, UseMI, UseMI->getDebugLoc(),
289 TII->get(TargetOpcode::COPY), NewVR)
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000290 .addReg(DstReg, 0, SubIdx);
Jakob Stoklund Olesen71642882012-06-19 21:14:34 +0000291 // SubIdx applies to both SrcReg and DstReg when UseSrcSubIdx is set.
292 if (UseSrcSubIdx) {
293 Copy->getOperand(0).setSubReg(SubIdx);
294 Copy->getOperand(0).setIsUndef();
295 }
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000296 UseMO->setReg(NewVR);
297 ++NumReuse;
298 Changed = true;
299 }
300 }
301
302 return Changed;
303}
304
Jim Grosbach39cc5132012-05-01 23:21:41 +0000305/// optimizeCmpInstr - If the instruction is a compare and the previous
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000306/// instruction it's comparing against all ready sets (or could be modified to
307/// set) the same flag as the compare, then we can remove the comparison and use
308/// the flag from the previous instruction.
Jim Grosbach39cc5132012-05-01 23:21:41 +0000309bool PeepholeOptimizer::optimizeCmpInstr(MachineInstr *MI,
Evan Chengd158fba2011-03-15 05:13:13 +0000310 MachineBasicBlock *MBB) {
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000311 // If this instruction is a comparison against zero and isn't comparing a
312 // physical register, we can try to optimize it.
Manman Rende7266c2012-06-29 21:33:59 +0000313 unsigned SrcReg, SrcReg2;
Gabor Greif04ac81d2010-09-21 12:01:15 +0000314 int CmpMask, CmpValue;
Manman Rende7266c2012-06-29 21:33:59 +0000315 if (!TII->analyzeCompare(MI, SrcReg, SrcReg2, CmpMask, CmpValue) ||
316 TargetRegisterInfo::isPhysicalRegister(SrcReg) ||
317 (SrcReg2 != 0 && TargetRegisterInfo::isPhysicalRegister(SrcReg2)))
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000318 return false;
319
Bill Wendlinga6556862010-09-11 00:13:50 +0000320 // Attempt to optimize the comparison instruction.
Manman Rende7266c2012-06-29 21:33:59 +0000321 if (TII->optimizeCompareInstr(MI, SrcReg, SrcReg2, CmpMask, CmpValue, MRI)) {
Evan Chengd158fba2011-03-15 05:13:13 +0000322 ++NumCmps;
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000323 return true;
324 }
325
326 return false;
327}
328
Jakob Stoklund Olesenf2c64ef2012-08-16 23:11:47 +0000329/// Optimize a select instruction.
330bool PeepholeOptimizer::optimizeSelect(MachineInstr *MI) {
331 unsigned TrueOp = 0;
332 unsigned FalseOp = 0;
333 bool Optimizable = false;
334 SmallVector<MachineOperand, 4> Cond;
335 if (TII->analyzeSelect(MI, Cond, TrueOp, FalseOp, Optimizable))
336 return false;
337 if (!Optimizable)
338 return false;
339 if (!TII->optimizeSelect(MI))
340 return false;
341 MI->eraseFromParent();
342 ++NumSelects;
343 return true;
344}
345
Quentin Colombet0df68422013-09-13 18:26:31 +0000346/// \brief Check if the registers defined by the pair (RegisterClass, SubReg)
347/// share the same register file.
348static bool shareSameRegisterFile(const TargetRegisterInfo &TRI,
349 const TargetRegisterClass *DefRC,
350 unsigned DefSubReg,
351 const TargetRegisterClass *SrcRC,
352 unsigned SrcSubReg) {
353 // Same register class.
354 if (DefRC == SrcRC)
355 return true;
356
357 // Both operands are sub registers. Check if they share a register class.
358 unsigned SrcIdx, DefIdx;
359 if (SrcSubReg && DefSubReg)
360 return TRI.getCommonSuperRegClass(SrcRC, SrcSubReg, DefRC, DefSubReg,
361 SrcIdx, DefIdx) != NULL;
362 // At most one of the register is a sub register, make it Src to avoid
363 // duplicating the test.
364 if (!SrcSubReg) {
365 std::swap(DefSubReg, SrcSubReg);
366 std::swap(DefRC, SrcRC);
367 }
368
369 // One of the register is a sub register, check if we can get a superclass.
370 if (SrcSubReg)
371 return TRI.getMatchingSuperRegClass(SrcRC, DefRC, SrcSubReg) != NULL;
372 // Plain copy.
373 return TRI.getCommonSubClass(DefRC, SrcRC) != NULL;
374}
375
376/// \brief Get the index of the definition and source for \p Copy
377/// instruction.
378/// \pre Copy.isCopy() or Copy.isBitcast().
379/// \return True if the Copy instruction has only one register source
380/// and one register definition. Otherwise, \p DefIdx and \p SrcIdx
381/// are invalid.
382static bool getCopyOrBitcastDefUseIdx(const MachineInstr &Copy,
383 unsigned &DefIdx, unsigned &SrcIdx) {
384 assert((Copy.isCopy() || Copy.isBitcast()) && "Wrong operation type.");
385 if (Copy.isCopy()) {
386 // Copy instruction are supposed to be: Def = Src.
387 if (Copy.getDesc().getNumOperands() != 2)
388 return false;
389 DefIdx = 0;
390 SrcIdx = 1;
391 assert(Copy.getOperand(DefIdx).isDef() && "Use comes before def!");
392 return true;
393 }
394 // Bitcast case.
395 // Bitcasts with more than one def are not supported.
396 if (Copy.getDesc().getNumDefs() != 1)
397 return false;
398 // Initialize SrcIdx to an undefined operand.
399 SrcIdx = Copy.getDesc().getNumOperands();
400 for (unsigned OpIdx = 0, EndOpIdx = SrcIdx; OpIdx != EndOpIdx; ++OpIdx) {
401 const MachineOperand &MO = Copy.getOperand(OpIdx);
402 if (!MO.isReg() || !MO.getReg())
403 continue;
404 if (MO.isDef())
405 DefIdx = OpIdx;
406 else if (SrcIdx != EndOpIdx)
407 // Multiple sources?
408 return false;
409 SrcIdx = OpIdx;
410 }
411 return true;
412}
413
414/// \brief Optimize a copy or bitcast instruction to avoid cross
415/// register bank copy. The optimization looks through a chain of
416/// copies and try to find a source that has a compatible register
417/// class.
418/// Two register classes are considered to be compatible if they share
419/// the same register bank.
420/// New copies issued by this optimization are register allocator
421/// friendly. This optimization does not remove any copy as it may
422/// overconstraint the register allocator, but replaces some when
423/// possible.
424/// \pre \p MI is a Copy (MI->isCopy() is true)
425/// \return True, when \p MI has been optimized. In that case, \p MI has
426/// been removed from its parent.
427bool PeepholeOptimizer::optimizeCopyOrBitcast(MachineInstr *MI) {
428 unsigned DefIdx, SrcIdx;
429 if (!MI || !getCopyOrBitcastDefUseIdx(*MI, DefIdx, SrcIdx))
430 return false;
431
432 const MachineOperand &MODef = MI->getOperand(DefIdx);
433 assert(MODef.isReg() && "Copies must be between registers.");
434 unsigned Def = MODef.getReg();
435
436 if (TargetRegisterInfo::isPhysicalRegister(Def))
437 return false;
438
439 const TargetRegisterClass *DefRC = MRI->getRegClass(Def);
440 unsigned DefSubReg = MODef.getSubReg();
441
442 unsigned Src;
443 unsigned SrcSubReg;
444 bool ShouldRewrite = false;
445 MachineInstr *Copy = MI;
446 const TargetRegisterInfo &TRI = *TM->getRegisterInfo();
447
448 // Follow the chain of copies until we reach the top or find a
449 // more suitable source.
450 do {
451 unsigned CopyDefIdx, CopySrcIdx;
452 if (!getCopyOrBitcastDefUseIdx(*Copy, CopyDefIdx, CopySrcIdx))
453 break;
454 const MachineOperand &MO = Copy->getOperand(CopySrcIdx);
455 assert(MO.isReg() && "Copies must be between registers.");
456 Src = MO.getReg();
457
458 if (TargetRegisterInfo::isPhysicalRegister(Src))
459 break;
460
461 const TargetRegisterClass *SrcRC = MRI->getRegClass(Src);
462 SrcSubReg = MO.getSubReg();
463
464 // If this source does not incur a cross register bank copy, use it.
465 ShouldRewrite = shareSameRegisterFile(TRI, DefRC, DefSubReg, SrcRC,
466 SrcSubReg);
467 // Follow the chain of copies: get the definition of Src.
468 Copy = MRI->getVRegDef(Src);
469 } while (!ShouldRewrite && Copy && (Copy->isCopy() || Copy->isBitcast()));
470
471 // If we did not find a more suitable source, there is nothing to optimize.
472 if (!ShouldRewrite || Src == MI->getOperand(SrcIdx).getReg())
473 return false;
474
475 // Rewrite the copy to avoid a cross register bank penalty.
476 unsigned NewVR = TargetRegisterInfo::isPhysicalRegister(Def) ? Def :
477 MRI->createVirtualRegister(DefRC);
478 MachineInstr *NewCopy = BuildMI(*MI->getParent(), MI, MI->getDebugLoc(),
479 TII->get(TargetOpcode::COPY), NewVR)
480 .addReg(Src, 0, SrcSubReg);
481 NewCopy->getOperand(0).setSubReg(DefSubReg);
482
483 MRI->replaceRegWith(Def, NewVR);
484 MRI->clearKillFlags(NewVR);
485 MI->eraseFromParent();
486 ++NumCopiesBitcasts;
487 return true;
488}
489
Manman Rend7d003c2012-08-02 00:56:42 +0000490/// isLoadFoldable - Check whether MI is a candidate for folding into a later
491/// instruction. We only fold loads to virtual registers and the virtual
492/// register defined has a single use.
Stephen Hines36b56882014-04-23 16:57:46 -0700493bool PeepholeOptimizer::isLoadFoldable(
494 MachineInstr *MI,
495 SmallSet<unsigned, 16> &FoldAsLoadDefCandidates) {
Manman Ren127eea82012-08-02 19:37:32 +0000496 if (!MI->canFoldAsLoad() || !MI->mayLoad())
497 return false;
498 const MCInstrDesc &MCID = MI->getDesc();
499 if (MCID.getNumDefs() != 1)
500 return false;
501
502 unsigned Reg = MI->getOperand(0).getReg();
Stephen Hines36b56882014-04-23 16:57:46 -0700503 // To reduce compilation time, we check MRI->hasOneNonDBGUse when inserting
Manman Ren127eea82012-08-02 19:37:32 +0000504 // loads. It should be checked when processing uses of the load, since
505 // uses can be removed during peephole.
506 if (!MI->getOperand(0).getSubReg() &&
507 TargetRegisterInfo::isVirtualRegister(Reg) &&
Stephen Hines36b56882014-04-23 16:57:46 -0700508 MRI->hasOneNonDBGUse(Reg)) {
509 FoldAsLoadDefCandidates.insert(Reg);
Manman Ren127eea82012-08-02 19:37:32 +0000510 return true;
Manman Rend7d003c2012-08-02 00:56:42 +0000511 }
512 return false;
513}
514
Evan Chengc4af4632010-11-17 20:13:28 +0000515bool PeepholeOptimizer::isMoveImmediate(MachineInstr *MI,
516 SmallSet<unsigned, 4> &ImmDefRegs,
517 DenseMap<unsigned, MachineInstr*> &ImmDefMIs) {
Evan Chenge837dea2011-06-28 19:10:37 +0000518 const MCInstrDesc &MCID = MI->getDesc();
Evan Cheng5a96b3d2011-12-07 07:15:52 +0000519 if (!MI->isMoveImmediate())
Evan Chengc4af4632010-11-17 20:13:28 +0000520 return false;
Evan Chenge837dea2011-06-28 19:10:37 +0000521 if (MCID.getNumDefs() != 1)
Evan Chengc4af4632010-11-17 20:13:28 +0000522 return false;
523 unsigned Reg = MI->getOperand(0).getReg();
524 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
525 ImmDefMIs.insert(std::make_pair(Reg, MI));
526 ImmDefRegs.insert(Reg);
527 return true;
528 }
Andrew Trick1df91b02012-02-08 21:22:43 +0000529
Evan Chengc4af4632010-11-17 20:13:28 +0000530 return false;
531}
532
Jim Grosbach39cc5132012-05-01 23:21:41 +0000533/// foldImmediate - Try folding register operands that are defined by move
Evan Chengc4af4632010-11-17 20:13:28 +0000534/// immediate instructions, i.e. a trivial constant folding optimization, if
535/// and only if the def and use are in the same BB.
Jim Grosbach39cc5132012-05-01 23:21:41 +0000536bool PeepholeOptimizer::foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB,
Evan Chengc4af4632010-11-17 20:13:28 +0000537 SmallSet<unsigned, 4> &ImmDefRegs,
538 DenseMap<unsigned, MachineInstr*> &ImmDefMIs) {
539 for (unsigned i = 0, e = MI->getDesc().getNumOperands(); i != e; ++i) {
540 MachineOperand &MO = MI->getOperand(i);
541 if (!MO.isReg() || MO.isDef())
542 continue;
543 unsigned Reg = MO.getReg();
Jakob Stoklund Olesenc9df0252011-01-10 02:58:51 +0000544 if (!TargetRegisterInfo::isVirtualRegister(Reg))
Evan Chengc4af4632010-11-17 20:13:28 +0000545 continue;
546 if (ImmDefRegs.count(Reg) == 0)
547 continue;
548 DenseMap<unsigned, MachineInstr*>::iterator II = ImmDefMIs.find(Reg);
549 assert(II != ImmDefMIs.end());
550 if (TII->FoldImmediate(MI, II->second, Reg, MRI)) {
551 ++NumImmFold;
552 return true;
553 }
554 }
555 return false;
556}
557
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000558bool PeepholeOptimizer::runOnMachineFunction(MachineFunction &MF) {
Stephen Hines36b56882014-04-23 16:57:46 -0700559 if (skipOptnoneFunction(*MF.getFunction()))
560 return false;
561
Craig Toppera1032b72012-12-17 03:56:00 +0000562 DEBUG(dbgs() << "********** PEEPHOLE OPTIMIZER **********\n");
563 DEBUG(dbgs() << "********** Function: " << MF.getName() << '\n');
564
Evan Chengeb96a2f2010-11-15 21:20:45 +0000565 if (DisablePeephole)
566 return false;
Andrew Trick1df91b02012-02-08 21:22:43 +0000567
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000568 TM = &MF.getTarget();
569 TII = TM->getInstrInfo();
570 MRI = &MF.getRegInfo();
571 DT = Aggressive ? &getAnalysis<MachineDominatorTree>() : 0;
572
573 bool Changed = false;
574
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000575 for (MachineFunction::iterator I = MF.begin(), E = MF.end(); I != E; ++I) {
576 MachineBasicBlock *MBB = &*I;
Andrew Trick1df91b02012-02-08 21:22:43 +0000577
Evan Chengc4af4632010-11-17 20:13:28 +0000578 bool SeenMoveImm = false;
Stephen Hines36b56882014-04-23 16:57:46 -0700579 SmallPtrSet<MachineInstr*, 8> LocalMIs;
580 SmallSet<unsigned, 4> ImmDefRegs;
581 DenseMap<unsigned, MachineInstr*> ImmDefMIs;
582 SmallSet<unsigned, 16> FoldAsLoadDefCandidates;
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000583
584 for (MachineBasicBlock::iterator
Bill Wendling220e2402010-09-10 21:55:43 +0000585 MII = I->begin(), MIE = I->end(); MII != MIE; ) {
Evan Chengcf75ab52011-02-14 21:50:37 +0000586 MachineInstr *MI = &*MII;
Jakob Stoklund Olesencabc0692012-08-17 14:38:59 +0000587 // We may be erasing MI below, increment MII now.
588 ++MII;
Evan Chengeb96a2f2010-11-15 21:20:45 +0000589 LocalMIs.insert(MI);
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000590
Stephen Hines36b56882014-04-23 16:57:46 -0700591 // Skip debug values. They should not affect this peephole optimization.
592 if (MI->isDebugValue())
593 continue;
594
Manman Rend7d003c2012-08-02 00:56:42 +0000595 // If there exists an instruction which belongs to the following
Stephen Hines36b56882014-04-23 16:57:46 -0700596 // categories, we will discard the load candidates.
597 if (MI->isPosition() || MI->isPHI() || MI->isImplicitDef() ||
598 MI->isKill() || MI->isInlineAsm() ||
Evan Chengcf75ab52011-02-14 21:50:37 +0000599 MI->hasUnmodeledSideEffects()) {
Stephen Hines36b56882014-04-23 16:57:46 -0700600 FoldAsLoadDefCandidates.clear();
Evan Chengeb96a2f2010-11-15 21:20:45 +0000601 continue;
Evan Chengcf75ab52011-02-14 21:50:37 +0000602 }
Manman Rend7d003c2012-08-02 00:56:42 +0000603 if (MI->mayStore() || MI->isCall())
Stephen Hines36b56882014-04-23 16:57:46 -0700604 FoldAsLoadDefCandidates.clear();
Evan Chengeb96a2f2010-11-15 21:20:45 +0000605
Quentin Colombet0df68422013-09-13 18:26:31 +0000606 if (((MI->isBitcast() || MI->isCopy()) && optimizeCopyOrBitcast(MI)) ||
Jakob Stoklund Olesenf2c64ef2012-08-16 23:11:47 +0000607 (MI->isCompare() && optimizeCmpInstr(MI, MBB)) ||
608 (MI->isSelect() && optimizeSelect(MI))) {
609 // MI is deleted.
610 LocalMIs.erase(MI);
611 Changed = true;
Jakob Stoklund Olesenf2c64ef2012-08-16 23:11:47 +0000612 continue;
Evan Chengcf75ab52011-02-14 21:50:37 +0000613 }
614
615 if (isMoveImmediate(MI, ImmDefRegs, ImmDefMIs)) {
Evan Chengc4af4632010-11-17 20:13:28 +0000616 SeenMoveImm = true;
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000617 } else {
Jim Grosbach39cc5132012-05-01 23:21:41 +0000618 Changed |= optimizeExtInstr(MI, MBB, LocalMIs);
Rafael Espindola10ad98b2012-10-15 18:21:07 +0000619 // optimizeExtInstr might have created new instructions after MI
620 // and before the already incremented MII. Adjust MII so that the
621 // next iteration sees the new instructions.
622 MII = MI;
623 ++MII;
Evan Chengc4af4632010-11-17 20:13:28 +0000624 if (SeenMoveImm)
Jim Grosbach39cc5132012-05-01 23:21:41 +0000625 Changed |= foldImmediate(MI, MBB, ImmDefRegs, ImmDefMIs);
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000626 }
Evan Cheng326d9762011-02-15 05:00:24 +0000627
Manman Rend7d003c2012-08-02 00:56:42 +0000628 // Check whether MI is a load candidate for folding into a later
629 // instruction. If MI is not a candidate, check whether we can fold an
630 // earlier load into MI.
Stephen Hines36b56882014-04-23 16:57:46 -0700631 if (!isLoadFoldable(MI, FoldAsLoadDefCandidates) &&
632 !FoldAsLoadDefCandidates.empty()) {
633 const MCInstrDesc &MIDesc = MI->getDesc();
634 for (unsigned i = MIDesc.getNumDefs(); i != MIDesc.getNumOperands();
635 ++i) {
636 const MachineOperand &MOp = MI->getOperand(i);
637 if (!MOp.isReg())
638 continue;
639 unsigned FoldAsLoadDefReg = MOp.getReg();
640 if (FoldAsLoadDefCandidates.count(FoldAsLoadDefReg)) {
641 // We need to fold load after optimizeCmpInstr, since
642 // optimizeCmpInstr can enable folding by converting SUB to CMP.
643 // Save FoldAsLoadDefReg because optimizeLoadInstr() resets it and
644 // we need it for markUsesInDebugValueAsUndef().
645 unsigned FoldedReg = FoldAsLoadDefReg;
646 MachineInstr *DefMI = 0;
647 MachineInstr *FoldMI = TII->optimizeLoadInstr(MI, MRI,
648 FoldAsLoadDefReg,
649 DefMI);
650 if (FoldMI) {
651 // Update LocalMIs since we replaced MI with FoldMI and deleted
652 // DefMI.
653 DEBUG(dbgs() << "Replacing: " << *MI);
654 DEBUG(dbgs() << " With: " << *FoldMI);
655 LocalMIs.erase(MI);
656 LocalMIs.erase(DefMI);
657 LocalMIs.insert(FoldMI);
658 MI->eraseFromParent();
659 DefMI->eraseFromParent();
660 MRI->markUsesInDebugValueAsUndef(FoldedReg);
661 FoldAsLoadDefCandidates.erase(FoldedReg);
662 ++NumLoadFold;
663 // MI is replaced with FoldMI.
664 Changed = true;
665 break;
666 }
667 }
Manman Rend7d003c2012-08-02 00:56:42 +0000668 }
669 }
Bill Wendling6cdb1ab2010-08-09 23:59:04 +0000670 }
671 }
672
673 return Changed;
674}