Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 1 | //===- PeepholeOptimizer.cpp - Peephole Optimizations ---------------------===// |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // Perform peephole optimizations on the machine code: |
| 11 | // |
| 12 | // - Optimize Extensions |
| 13 | // |
| 14 | // Optimization of sign / zero extension instructions. It may be extended to |
| 15 | // handle other instructions with similar properties. |
| 16 | // |
| 17 | // On some targets, some instructions, e.g. X86 sign / zero extension, may |
| 18 | // leave the source value in the lower part of the result. This optimization |
| 19 | // will replace some uses of the pre-extension value with uses of the |
| 20 | // sub-register of the results. |
| 21 | // |
| 22 | // - Optimize Comparisons |
| 23 | // |
| 24 | // Optimization of comparison instructions. For instance, in this code: |
| 25 | // |
| 26 | // sub r1, 1 |
| 27 | // cmp r1, 0 |
| 28 | // bz L1 |
| 29 | // |
| 30 | // If the "sub" instruction all ready sets (or could be modified to set) the |
| 31 | // same flag that the "cmp" instruction sets and that "bz" uses, then we can |
| 32 | // eliminate the "cmp" instruction. |
Evan Cheng | e4b8ac9 | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 33 | // |
Manman Ren | dc8ad00 | 2012-05-11 01:30:47 +0000 | [diff] [blame] | 34 | // Another instance, in this code: |
| 35 | // |
| 36 | // sub r1, r3 | sub r1, imm |
| 37 | // cmp r3, r1 or cmp r1, r3 | cmp r1, imm |
| 38 | // bge L1 |
| 39 | // |
| 40 | // If the branch instruction can use flag from "sub", then we can replace |
| 41 | // "sub" with "subs" and eliminate the "cmp" instruction. |
| 42 | // |
Joel Jones | 24e440d | 2012-12-11 16:10:25 +0000 | [diff] [blame] | 43 | // - Optimize Loads: |
| 44 | // |
| 45 | // Loads that can be folded into a later instruction. A load is foldable |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 46 | // if it loads to virtual registers and the virtual register defined has |
Joel Jones | 24e440d | 2012-12-11 16:10:25 +0000 | [diff] [blame] | 47 | // a single use. |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 48 | // |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 49 | // - Optimize Copies and Bitcast (more generally, target specific copies): |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 50 | // |
| 51 | // Rewrite copies and bitcasts to avoid cross register bank copies |
| 52 | // when possible. |
| 53 | // E.g., Consider the following example, where capital and lower |
| 54 | // letters denote different register file: |
| 55 | // b = copy A <-- cross-bank copy |
| 56 | // C = copy b <-- cross-bank copy |
| 57 | // => |
| 58 | // b = copy A <-- cross-bank copy |
| 59 | // C = copy A <-- same-bank copy |
| 60 | // |
| 61 | // E.g., for bitcast: |
| 62 | // b = bitcast A <-- cross-bank copy |
| 63 | // C = bitcast b <-- cross-bank copy |
| 64 | // => |
| 65 | // b = bitcast A <-- cross-bank copy |
| 66 | // C = copy A <-- same-bank copy |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 67 | //===----------------------------------------------------------------------===// |
| 68 | |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 69 | #include "llvm/ADT/DenseMap.h" |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 70 | #include "llvm/ADT/Optional.h" |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 71 | #include "llvm/ADT/SmallPtrSet.h" |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 72 | #include "llvm/ADT/SmallSet.h" |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 73 | #include "llvm/ADT/SmallVector.h" |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 74 | #include "llvm/ADT/Statistic.h" |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 75 | #include "llvm/CodeGen/MachineBasicBlock.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 76 | #include "llvm/CodeGen/MachineDominators.h" |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 77 | #include "llvm/CodeGen/MachineFunction.h" |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 78 | #include "llvm/CodeGen/MachineFunctionPass.h" |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 79 | #include "llvm/CodeGen/MachineInstr.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 80 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 81 | #include "llvm/CodeGen/MachineLoopInfo.h" |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 82 | #include "llvm/CodeGen/MachineOperand.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 83 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
David Blaikie | 3f833ed | 2017-11-08 01:01:31 +0000 | [diff] [blame] | 84 | #include "llvm/CodeGen/TargetInstrInfo.h" |
David Blaikie | b3bde2e | 2017-11-17 01:07:10 +0000 | [diff] [blame] | 85 | #include "llvm/CodeGen/TargetOpcodes.h" |
| 86 | #include "llvm/CodeGen/TargetRegisterInfo.h" |
| 87 | #include "llvm/CodeGen/TargetSubtargetInfo.h" |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 88 | #include "llvm/MC/LaneBitmask.h" |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 89 | #include "llvm/MC/MCInstrDesc.h" |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 90 | #include "llvm/Pass.h" |
Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 91 | #include "llvm/Support/CommandLine.h" |
Craig Topper | 588ceec | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 92 | #include "llvm/Support/Debug.h" |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 93 | #include "llvm/Support/ErrorHandling.h" |
Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 94 | #include "llvm/Support/raw_ostream.h" |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 95 | #include <cassert> |
| 96 | #include <cstdint> |
| 97 | #include <memory> |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 98 | #include <utility> |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 99 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 100 | using namespace llvm; |
| 101 | |
Chandler Carruth | 1b9dde0 | 2014-04-22 02:02:50 +0000 | [diff] [blame] | 102 | #define DEBUG_TYPE "peephole-opt" |
| 103 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 104 | // Optimize Extensions |
| 105 | static cl::opt<bool> |
| 106 | Aggressive("aggressive-ext-opt", cl::Hidden, |
| 107 | cl::desc("Aggressive extension optimization")); |
| 108 | |
Bill Wendling | c6627ee | 2010-11-01 20:41:43 +0000 | [diff] [blame] | 109 | static cl::opt<bool> |
| 110 | DisablePeephole("disable-peephole", cl::Hidden, cl::init(false), |
| 111 | cl::desc("Disable the peephole optimizer")); |
| 112 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 113 | static cl::opt<bool> |
Quentin Colombet | 6674b09 | 2014-08-21 22:23:52 +0000 | [diff] [blame] | 114 | DisableAdvCopyOpt("disable-adv-copy-opt", cl::Hidden, cl::init(false), |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 115 | cl::desc("Disable advanced copy optimization")); |
| 116 | |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 117 | static cl::opt<bool> DisableNAPhysCopyOpt( |
| 118 | "disable-non-allocatable-phys-copy-opt", cl::Hidden, cl::init(false), |
| 119 | cl::desc("Disable non-allocatable physical register copy optimization")); |
| 120 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 121 | // Limit the number of PHI instructions to process |
| 122 | // in PeepholeOptimizer::getNextSource. |
| 123 | static cl::opt<unsigned> RewritePHILimit( |
| 124 | "rewrite-phi-limit", cl::Hidden, cl::init(10), |
| 125 | cl::desc("Limit the length of PHI chains to lookup")); |
| 126 | |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 127 | // Limit the length of recurrence chain when evaluating the benefit of |
| 128 | // commuting operands. |
| 129 | static cl::opt<unsigned> MaxRecurrenceChain( |
| 130 | "recurrence-chain-limit", cl::Hidden, cl::init(3), |
| 131 | cl::desc("Maximum length of recurrence chain when evaluating the benefit " |
| 132 | "of commuting operands")); |
| 133 | |
| 134 | |
Bill Wendling | 6628431 | 2010-08-27 20:39:09 +0000 | [diff] [blame] | 135 | STATISTIC(NumReuse, "Number of extension results reused"); |
Evan Cheng | e4b8ac9 | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 136 | STATISTIC(NumCmps, "Number of compares eliminated"); |
Lang Hames | 31bb57b | 2012-02-25 00:46:38 +0000 | [diff] [blame] | 137 | STATISTIC(NumImmFold, "Number of move immediate folded"); |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 138 | STATISTIC(NumLoadFold, "Number of loads folded"); |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 139 | STATISTIC(NumSelects, "Number of selects optimized"); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 140 | STATISTIC(NumUncoalescableCopies, "Number of uncoalescable copies optimized"); |
| 141 | STATISTIC(NumRewrittenCopies, "Number of copies rewritten"); |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 142 | STATISTIC(NumNAPhysCopies, "Number of non-allocatable physical copies removed"); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 143 | |
| 144 | namespace { |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 145 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 146 | class ValueTrackerResult; |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 147 | class RecurrenceInstr; |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 148 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 149 | class PeepholeOptimizer : public MachineFunctionPass { |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 150 | const TargetInstrInfo *TII; |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 151 | const TargetRegisterInfo *TRI; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 152 | MachineRegisterInfo *MRI; |
| 153 | MachineDominatorTree *DT; // Machine dominator tree |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 154 | MachineLoopInfo *MLI; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 155 | |
| 156 | public: |
| 157 | static char ID; // Pass identification |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 158 | |
Owen Anderson | 6c18d1a | 2010-10-19 17:21:58 +0000 | [diff] [blame] | 159 | PeepholeOptimizer() : MachineFunctionPass(ID) { |
| 160 | initializePeepholeOptimizerPass(*PassRegistry::getPassRegistry()); |
| 161 | } |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 162 | |
Craig Topper | 4584cd5 | 2014-03-07 09:26:03 +0000 | [diff] [blame] | 163 | bool runOnMachineFunction(MachineFunction &MF) override; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 164 | |
Craig Topper | 4584cd5 | 2014-03-07 09:26:03 +0000 | [diff] [blame] | 165 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 166 | AU.setPreservesCFG(); |
| 167 | MachineFunctionPass::getAnalysisUsage(AU); |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 168 | AU.addRequired<MachineLoopInfo>(); |
| 169 | AU.addPreserved<MachineLoopInfo>(); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 170 | if (Aggressive) { |
| 171 | AU.addRequired<MachineDominatorTree>(); |
| 172 | AU.addPreserved<MachineDominatorTree>(); |
| 173 | } |
| 174 | } |
| 175 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 176 | /// \brief Track Def -> Use info used for rewriting copies. |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 177 | using RewriteMapTy = |
| 178 | SmallDenseMap<TargetInstrInfo::RegSubRegPair, ValueTrackerResult>; |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 179 | |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 180 | /// \brief Sequence of instructions that formulate recurrence cycle. |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 181 | using RecurrenceCycle = SmallVector<RecurrenceInstr, 4>; |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 182 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 183 | private: |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 184 | bool optimizeCmpInstr(MachineInstr *MI, MachineBasicBlock *MBB); |
| 185 | bool optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB, |
Hans Wennborg | 97a59ae | 2014-08-11 13:52:46 +0000 | [diff] [blame] | 186 | SmallPtrSetImpl<MachineInstr*> &LocalMIs); |
Mehdi Amini | 22e5974 | 2015-01-13 07:07:13 +0000 | [diff] [blame] | 187 | bool optimizeSelect(MachineInstr *MI, |
| 188 | SmallPtrSetImpl<MachineInstr *> &LocalMIs); |
Gerolf Hoflehner | a4c96d0 | 2014-10-14 23:07:53 +0000 | [diff] [blame] | 189 | bool optimizeCondBranch(MachineInstr *MI); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 190 | bool optimizeCoalescableCopy(MachineInstr *MI); |
| 191 | bool optimizeUncoalescableCopy(MachineInstr *MI, |
| 192 | SmallPtrSetImpl<MachineInstr *> &LocalMIs); |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 193 | bool optimizeRecurrence(MachineInstr &PHI); |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 194 | bool findNextSource(unsigned Reg, unsigned SubReg, |
| 195 | RewriteMapTy &RewriteMap); |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 196 | bool isMoveImmediate(MachineInstr *MI, |
| 197 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 198 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs); |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 199 | bool foldImmediate(MachineInstr *MI, MachineBasicBlock *MBB, |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 200 | SmallSet<unsigned, 4> &ImmDefRegs, |
| 201 | DenseMap<unsigned, MachineInstr*> &ImmDefMIs); |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 202 | |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 203 | /// \brief Finds recurrence cycles, but only ones that formulated around |
| 204 | /// a def operand and a use operand that are tied. If there is a use |
| 205 | /// operand commutable with the tied use operand, find recurrence cycle |
| 206 | /// along that operand as well. |
| 207 | bool findTargetRecurrence(unsigned Reg, |
| 208 | const SmallSet<unsigned, 2> &TargetReg, |
| 209 | RecurrenceCycle &RC); |
Matt Arsenault | 10aa807 | 2015-09-25 20:22:12 +0000 | [diff] [blame] | 210 | |
| 211 | /// \brief If copy instruction \p MI is a virtual register copy, track it in |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 212 | /// the set \p CopySrcRegs and \p CopyMIs. If this virtual register was |
Matt Arsenault | 10aa807 | 2015-09-25 20:22:12 +0000 | [diff] [blame] | 213 | /// previously seen as a copy, replace the uses of this copy with the |
| 214 | /// previously seen copy's destination register. |
| 215 | bool foldRedundantCopy(MachineInstr *MI, |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 216 | SmallSet<unsigned, 4> &CopySrcRegs, |
| 217 | DenseMap<unsigned, MachineInstr *> &CopyMIs); |
| 218 | |
| 219 | /// \brief Is the register \p Reg a non-allocatable physical register? |
| 220 | bool isNAPhysCopy(unsigned Reg); |
| 221 | |
| 222 | /// \brief If copy instruction \p MI is a non-allocatable virtual<->physical |
| 223 | /// register copy, track it in the \p NAPhysToVirtMIs map. If this |
| 224 | /// non-allocatable physical register was previously copied to a virtual |
| 225 | /// registered and hasn't been clobbered, the virt->phys copy can be |
| 226 | /// deleted. |
| 227 | bool foldRedundantNAPhysCopy( |
| 228 | MachineInstr *MI, |
| 229 | DenseMap<unsigned, MachineInstr *> &NAPhysToVirtMIs); |
Matt Arsenault | 10aa807 | 2015-09-25 20:22:12 +0000 | [diff] [blame] | 230 | |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 231 | bool isLoadFoldable(MachineInstr *MI, |
| 232 | SmallSet<unsigned, 16> &FoldAsLoadDefCandidates); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 233 | |
| 234 | /// \brief Check whether \p MI is understood by the register coalescer |
| 235 | /// but may require some rewriting. |
| 236 | bool isCoalescableCopy(const MachineInstr &MI) { |
| 237 | // SubregToRegs are not interesting, because they are already register |
| 238 | // coalescer friendly. |
| 239 | return MI.isCopy() || (!DisableAdvCopyOpt && |
| 240 | (MI.isRegSequence() || MI.isInsertSubreg() || |
| 241 | MI.isExtractSubreg())); |
| 242 | } |
| 243 | |
| 244 | /// \brief Check whether \p MI is a copy like instruction that is |
| 245 | /// not recognized by the register coalescer. |
| 246 | bool isUncoalescableCopy(const MachineInstr &MI) { |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 247 | return MI.isBitcast() || |
| 248 | (!DisableAdvCopyOpt && |
| 249 | (MI.isRegSequenceLike() || MI.isInsertSubregLike() || |
| 250 | MI.isExtractSubregLike())); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 251 | } |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 252 | }; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 253 | |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 254 | /// \brief Helper class to hold instructions that are inside recurrence |
| 255 | /// cycles. The recurrence cycle is formulated around 1) a def operand and its |
| 256 | /// tied use operand, or 2) a def operand and a use operand that is commutable |
| 257 | /// with another use operand which is tied to the def operand. In the latter |
| 258 | /// case, index of the tied use operand and the commutable use operand are |
| 259 | /// maintained with CommutePair. |
| 260 | class RecurrenceInstr { |
| 261 | public: |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 262 | using IndexPair = std::pair<unsigned, unsigned>; |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 263 | |
| 264 | RecurrenceInstr(MachineInstr *MI) : MI(MI) {} |
| 265 | RecurrenceInstr(MachineInstr *MI, unsigned Idx1, unsigned Idx2) |
| 266 | : MI(MI), CommutePair(std::make_pair(Idx1, Idx2)) {} |
| 267 | |
| 268 | MachineInstr *getMI() const { return MI; } |
| 269 | Optional<IndexPair> getCommutePair() const { return CommutePair; } |
| 270 | |
| 271 | private: |
| 272 | MachineInstr *MI; |
| 273 | Optional<IndexPair> CommutePair; |
| 274 | }; |
| 275 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 276 | /// \brief Helper class to hold a reply for ValueTracker queries. Contains the |
| 277 | /// returned sources for a given search and the instructions where the sources |
| 278 | /// were tracked from. |
| 279 | class ValueTrackerResult { |
| 280 | private: |
| 281 | /// Track all sources found by one ValueTracker query. |
| 282 | SmallVector<TargetInstrInfo::RegSubRegPair, 2> RegSrcs; |
| 283 | |
| 284 | /// Instruction using the sources in 'RegSrcs'. |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 285 | const MachineInstr *Inst = nullptr; |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 286 | |
| 287 | public: |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 288 | ValueTrackerResult() = default; |
| 289 | |
| 290 | ValueTrackerResult(unsigned Reg, unsigned SubReg) { |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 291 | addSource(Reg, SubReg); |
| 292 | } |
| 293 | |
| 294 | bool isValid() const { return getNumSources() > 0; } |
| 295 | |
| 296 | void setInst(const MachineInstr *I) { Inst = I; } |
| 297 | const MachineInstr *getInst() const { return Inst; } |
| 298 | |
| 299 | void clear() { |
| 300 | RegSrcs.clear(); |
| 301 | Inst = nullptr; |
| 302 | } |
| 303 | |
| 304 | void addSource(unsigned SrcReg, unsigned SrcSubReg) { |
| 305 | RegSrcs.push_back(TargetInstrInfo::RegSubRegPair(SrcReg, SrcSubReg)); |
| 306 | } |
| 307 | |
| 308 | void setSource(int Idx, unsigned SrcReg, unsigned SrcSubReg) { |
| 309 | assert(Idx < getNumSources() && "Reg pair source out of index"); |
| 310 | RegSrcs[Idx] = TargetInstrInfo::RegSubRegPair(SrcReg, SrcSubReg); |
| 311 | } |
| 312 | |
| 313 | int getNumSources() const { return RegSrcs.size(); } |
| 314 | |
| 315 | unsigned getSrcReg(int Idx) const { |
| 316 | assert(Idx < getNumSources() && "Reg source out of index"); |
| 317 | return RegSrcs[Idx].Reg; |
| 318 | } |
| 319 | |
| 320 | unsigned getSrcSubReg(int Idx) const { |
| 321 | assert(Idx < getNumSources() && "SubReg source out of index"); |
| 322 | return RegSrcs[Idx].SubReg; |
| 323 | } |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 324 | |
| 325 | bool operator==(const ValueTrackerResult &Other) { |
| 326 | if (Other.getInst() != getInst()) |
| 327 | return false; |
| 328 | |
| 329 | if (Other.getNumSources() != getNumSources()) |
| 330 | return false; |
| 331 | |
| 332 | for (int i = 0, e = Other.getNumSources(); i != e; ++i) |
| 333 | if (Other.getSrcReg(i) != getSrcReg(i) || |
| 334 | Other.getSrcSubReg(i) != getSrcSubReg(i)) |
| 335 | return false; |
| 336 | return true; |
| 337 | } |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 338 | }; |
| 339 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 340 | /// \brief Helper class to track the possible sources of a value defined by |
| 341 | /// a (chain of) copy related instructions. |
| 342 | /// Given a definition (instruction and definition index), this class |
| 343 | /// follows the use-def chain to find successive suitable sources. |
| 344 | /// The given source can be used to rewrite the definition into |
| 345 | /// def = COPY src. |
| 346 | /// |
| 347 | /// For instance, let us consider the following snippet: |
| 348 | /// v0 = |
| 349 | /// v2 = INSERT_SUBREG v1, v0, sub0 |
| 350 | /// def = COPY v2.sub0 |
| 351 | /// |
| 352 | /// Using a ValueTracker for def = COPY v2.sub0 will give the following |
| 353 | /// suitable sources: |
| 354 | /// v2.sub0 and v0. |
| 355 | /// Then, def can be rewritten into def = COPY v0. |
| 356 | class ValueTracker { |
| 357 | private: |
| 358 | /// The current point into the use-def chain. |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 359 | const MachineInstr *Def = nullptr; |
| 360 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 361 | /// The index of the definition in Def. |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 362 | unsigned DefIdx = 0; |
| 363 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 364 | /// The sub register index of the definition. |
| 365 | unsigned DefSubReg; |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 366 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 367 | /// The register where the value can be found. |
| 368 | unsigned Reg; |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 369 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 370 | /// Specifiy whether or not the value tracking looks through |
| 371 | /// complex instructions. When this is false, the value tracker |
| 372 | /// bails on everything that is not a copy or a bitcast. |
| 373 | /// |
| 374 | /// Note: This could have been implemented as a specialized version of |
| 375 | /// the ValueTracker class but that would have complicated the code of |
| 376 | /// the users of this class. |
| 377 | bool UseAdvancedTracking; |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 378 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 379 | /// MachineRegisterInfo used to perform tracking. |
| 380 | const MachineRegisterInfo &MRI; |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 381 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 382 | /// Optional TargetInstrInfo used to perform some complex |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 383 | /// tracking. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 384 | const TargetInstrInfo *TII; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 385 | |
| 386 | /// \brief Dispatcher to the right underlying implementation of |
| 387 | /// getNextSource. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 388 | ValueTrackerResult getNextSourceImpl(); |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 389 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 390 | /// \brief Specialized version of getNextSource for Copy instructions. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 391 | ValueTrackerResult getNextSourceFromCopy(); |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 392 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 393 | /// \brief Specialized version of getNextSource for Bitcast instructions. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 394 | ValueTrackerResult getNextSourceFromBitcast(); |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 395 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 396 | /// \brief Specialized version of getNextSource for RegSequence |
| 397 | /// instructions. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 398 | ValueTrackerResult getNextSourceFromRegSequence(); |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 399 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 400 | /// \brief Specialized version of getNextSource for InsertSubreg |
| 401 | /// instructions. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 402 | ValueTrackerResult getNextSourceFromInsertSubreg(); |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 403 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 404 | /// \brief Specialized version of getNextSource for ExtractSubreg |
| 405 | /// instructions. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 406 | ValueTrackerResult getNextSourceFromExtractSubreg(); |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 407 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 408 | /// \brief Specialized version of getNextSource for SubregToReg |
| 409 | /// instructions. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 410 | ValueTrackerResult getNextSourceFromSubregToReg(); |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 411 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 412 | /// \brief Specialized version of getNextSource for PHI instructions. |
| 413 | ValueTrackerResult getNextSourceFromPHI(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 414 | |
| 415 | public: |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 416 | /// \brief Create a ValueTracker instance for the value defined by \p Reg. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 417 | /// \p DefSubReg represents the sub register index the value tracker will |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 418 | /// track. It does not need to match the sub register index used in the |
| 419 | /// definition of \p Reg. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 420 | /// \p UseAdvancedTracking specifies whether or not the value tracker looks |
| 421 | /// through complex instructions. By default (false), it handles only copy |
| 422 | /// and bitcast instructions. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 423 | /// If \p Reg is a physical register, a value tracker constructed with |
| 424 | /// this constructor will not find any alternative source. |
| 425 | /// Indeed, when \p Reg is a physical register that constructor does not |
| 426 | /// know which definition of \p Reg it should track. |
| 427 | /// Use the next constructor to track a physical register. |
| 428 | ValueTracker(unsigned Reg, unsigned DefSubReg, |
| 429 | const MachineRegisterInfo &MRI, |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 430 | bool UseAdvancedTracking = false, |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 431 | const TargetInstrInfo *TII = nullptr) |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 432 | : DefSubReg(DefSubReg), Reg(Reg), |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 433 | UseAdvancedTracking(UseAdvancedTracking), MRI(MRI), TII(TII) { |
| 434 | if (!TargetRegisterInfo::isPhysicalRegister(Reg)) { |
| 435 | Def = MRI.getVRegDef(Reg); |
| 436 | DefIdx = MRI.def_begin(Reg).getOperandNo(); |
| 437 | } |
| 438 | } |
| 439 | |
| 440 | /// \brief Create a ValueTracker instance for the value defined by |
| 441 | /// the pair \p MI, \p DefIdx. |
| 442 | /// Unlike the other constructor, the value tracker produced by this one |
| 443 | /// may be able to find a new source when the definition is a physical |
| 444 | /// register. |
| 445 | /// This could be useful to rewrite target specific instructions into |
| 446 | /// generic copy instructions. |
| 447 | ValueTracker(const MachineInstr &MI, unsigned DefIdx, unsigned DefSubReg, |
| 448 | const MachineRegisterInfo &MRI, |
| 449 | bool UseAdvancedTracking = false, |
| 450 | const TargetInstrInfo *TII = nullptr) |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 451 | : Def(&MI), DefIdx(DefIdx), DefSubReg(DefSubReg), |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 452 | UseAdvancedTracking(UseAdvancedTracking), MRI(MRI), TII(TII) { |
| 453 | assert(DefIdx < Def->getDesc().getNumDefs() && |
| 454 | Def->getOperand(DefIdx).isReg() && "Invalid definition"); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 455 | Reg = Def->getOperand(DefIdx).getReg(); |
| 456 | } |
| 457 | |
| 458 | /// \brief Following the use-def chain, get the next available source |
| 459 | /// for the tracked value. |
Benjamin Kramer | df005cb | 2015-08-08 18:27:36 +0000 | [diff] [blame] | 460 | /// \return A ValueTrackerResult containing a set of registers |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 461 | /// and sub registers with tracked values. A ValueTrackerResult with |
| 462 | /// an empty set of registers means no source was found. |
| 463 | ValueTrackerResult getNextSource(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 464 | |
| 465 | /// \brief Get the last register where the initial value can be found. |
| 466 | /// Initially this is the register of the definition. |
| 467 | /// Then, after each successful call to getNextSource, this is the |
| 468 | /// register of the last source. |
| 469 | unsigned getReg() const { return Reg; } |
| 470 | }; |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 471 | |
| 472 | } // end anonymous namespace |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 473 | |
| 474 | char PeepholeOptimizer::ID = 0; |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 475 | |
Andrew Trick | 1fa5bcb | 2012-02-08 21:23:13 +0000 | [diff] [blame] | 476 | char &llvm::PeepholeOptimizerID = PeepholeOptimizer::ID; |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 477 | |
Matt Arsenault | 44540a3 | 2016-07-08 16:29:11 +0000 | [diff] [blame] | 478 | INITIALIZE_PASS_BEGIN(PeepholeOptimizer, DEBUG_TYPE, |
Owen Anderson | 8ac477f | 2010-10-12 19:48:12 +0000 | [diff] [blame] | 479 | "Peephole Optimizations", false, false) |
| 480 | INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree) |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 481 | INITIALIZE_PASS_DEPENDENCY(MachineLoopInfo) |
Matt Arsenault | 44540a3 | 2016-07-08 16:29:11 +0000 | [diff] [blame] | 482 | INITIALIZE_PASS_END(PeepholeOptimizer, DEBUG_TYPE, |
Owen Anderson | df7a4f2 | 2010-10-07 22:25:06 +0000 | [diff] [blame] | 483 | "Peephole Optimizations", false, false) |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 484 | |
Sanjay Patel | 59309cc | 2015-12-29 18:14:06 +0000 | [diff] [blame] | 485 | /// If instruction is a copy-like instruction, i.e. it reads a single register |
| 486 | /// and writes a single register and it does not modify the source, and if the |
| 487 | /// source value is preserved as a sub-register of the result, then replace all |
| 488 | /// reachable uses of the source with the subreg of the result. |
Andrew Trick | 9e76199 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 489 | /// |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 490 | /// Do not generate an EXTRACT that is used only in a debug use, as this changes |
| 491 | /// the code. Since this code does not currently share EXTRACTs, just ignore all |
| 492 | /// debug uses. |
| 493 | bool PeepholeOptimizer:: |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 494 | optimizeExtInstr(MachineInstr *MI, MachineBasicBlock *MBB, |
Hans Wennborg | 97a59ae | 2014-08-11 13:52:46 +0000 | [diff] [blame] | 495 | SmallPtrSetImpl<MachineInstr*> &LocalMIs) { |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 496 | unsigned SrcReg, DstReg, SubIdx; |
| 497 | if (!TII->isCoalescableExtInstr(*MI, SrcReg, DstReg, SubIdx)) |
| 498 | return false; |
Andrew Trick | 9e76199 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 499 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 500 | if (TargetRegisterInfo::isPhysicalRegister(DstReg) || |
| 501 | TargetRegisterInfo::isPhysicalRegister(SrcReg)) |
| 502 | return false; |
| 503 | |
Jakob Stoklund Olesen | 8eb9905 | 2012-06-19 21:10:18 +0000 | [diff] [blame] | 504 | if (MRI->hasOneNonDBGUse(SrcReg)) |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 505 | // No other uses. |
| 506 | return false; |
| 507 | |
Jakob Stoklund Olesen | 2f06a65 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 508 | // Ensure DstReg can get a register class that actually supports |
| 509 | // sub-registers. Don't change the class until we commit. |
| 510 | const TargetRegisterClass *DstRC = MRI->getRegClass(DstReg); |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 511 | DstRC = TRI->getSubClassWithSubReg(DstRC, SubIdx); |
Jakob Stoklund Olesen | 2f06a65 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 512 | if (!DstRC) |
| 513 | return false; |
| 514 | |
Jakob Stoklund Olesen | 0f855e4 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 515 | // The ext instr may be operating on a sub-register of SrcReg as well. |
| 516 | // PPC::EXTSW is a 32 -> 64-bit sign extension, but it reads a 64-bit |
| 517 | // register. |
| 518 | // If UseSrcSubIdx is Set, SubIdx also applies to SrcReg, and only uses of |
| 519 | // SrcReg:SubIdx should be replaced. |
Eric Christopher | d913448 | 2014-08-04 21:25:23 +0000 | [diff] [blame] | 520 | bool UseSrcSubIdx = |
Eric Christopher | 92b4bcb | 2014-10-14 07:17:20 +0000 | [diff] [blame] | 521 | TRI->getSubClassWithSubReg(MRI->getRegClass(SrcReg), SubIdx) != nullptr; |
Jakob Stoklund Olesen | 0f855e4 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 522 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 523 | // The source has other uses. See if we can replace the other uses with use of |
| 524 | // the result of the extension. |
| 525 | SmallPtrSet<MachineBasicBlock*, 4> ReachedBBs; |
Owen Anderson | b36376e | 2014-03-17 19:36:09 +0000 | [diff] [blame] | 526 | for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg)) |
| 527 | ReachedBBs.insert(UI.getParent()); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 528 | |
| 529 | // Uses that are in the same BB of uses of the result of the instruction. |
| 530 | SmallVector<MachineOperand*, 8> Uses; |
| 531 | |
| 532 | // Uses that the result of the instruction can reach. |
| 533 | SmallVector<MachineOperand*, 8> ExtendedUses; |
| 534 | |
| 535 | bool ExtendLife = true; |
Owen Anderson | b36376e | 2014-03-17 19:36:09 +0000 | [diff] [blame] | 536 | for (MachineOperand &UseMO : MRI->use_nodbg_operands(SrcReg)) { |
Owen Anderson | 16c6bf4 | 2014-03-13 23:12:04 +0000 | [diff] [blame] | 537 | MachineInstr *UseMI = UseMO.getParent(); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 538 | if (UseMI == MI) |
| 539 | continue; |
| 540 | |
| 541 | if (UseMI->isPHI()) { |
| 542 | ExtendLife = false; |
| 543 | continue; |
| 544 | } |
| 545 | |
Jakob Stoklund Olesen | 0f855e4 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 546 | // Only accept uses of SrcReg:SubIdx. |
| 547 | if (UseSrcSubIdx && UseMO.getSubReg() != SubIdx) |
| 548 | continue; |
| 549 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 550 | // It's an error to translate this: |
| 551 | // |
| 552 | // %reg1025 = <sext> %reg1024 |
| 553 | // ... |
| 554 | // %reg1026 = SUBREG_TO_REG 0, %reg1024, 4 |
| 555 | // |
| 556 | // into this: |
| 557 | // |
| 558 | // %reg1025 = <sext> %reg1024 |
| 559 | // ... |
| 560 | // %reg1027 = COPY %reg1025:4 |
| 561 | // %reg1026 = SUBREG_TO_REG 0, %reg1027, 4 |
| 562 | // |
| 563 | // The problem here is that SUBREG_TO_REG is there to assert that an |
| 564 | // implicit zext occurs. It doesn't insert a zext instruction. If we allow |
| 565 | // the COPY here, it will give us the value after the <sext>, not the |
| 566 | // original value of %reg1024 before <sext>. |
| 567 | if (UseMI->getOpcode() == TargetOpcode::SUBREG_TO_REG) |
| 568 | continue; |
| 569 | |
| 570 | MachineBasicBlock *UseMBB = UseMI->getParent(); |
| 571 | if (UseMBB == MBB) { |
| 572 | // Local uses that come after the extension. |
| 573 | if (!LocalMIs.count(UseMI)) |
| 574 | Uses.push_back(&UseMO); |
| 575 | } else if (ReachedBBs.count(UseMBB)) { |
| 576 | // Non-local uses where the result of the extension is used. Always |
| 577 | // replace these unless it's a PHI. |
| 578 | Uses.push_back(&UseMO); |
| 579 | } else if (Aggressive && DT->dominates(MBB, UseMBB)) { |
| 580 | // We may want to extend the live range of the extension result in order |
| 581 | // to replace these uses. |
| 582 | ExtendedUses.push_back(&UseMO); |
| 583 | } else { |
| 584 | // Both will be live out of the def MBB anyway. Don't extend live range of |
| 585 | // the extension result. |
| 586 | ExtendLife = false; |
| 587 | break; |
| 588 | } |
| 589 | } |
| 590 | |
| 591 | if (ExtendLife && !ExtendedUses.empty()) |
| 592 | // Extend the liveness of the extension result. |
Benjamin Kramer | 4f6ac16 | 2015-02-28 10:11:12 +0000 | [diff] [blame] | 593 | Uses.append(ExtendedUses.begin(), ExtendedUses.end()); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 594 | |
| 595 | // Now replace all uses. |
| 596 | bool Changed = false; |
| 597 | if (!Uses.empty()) { |
| 598 | SmallPtrSet<MachineBasicBlock*, 4> PHIBBs; |
| 599 | |
| 600 | // Look for PHI uses of the extended result, we don't want to extend the |
| 601 | // liveness of a PHI input. It breaks all kinds of assumptions down |
| 602 | // stream. A PHI use is expected to be the kill of its source values. |
Owen Anderson | b36376e | 2014-03-17 19:36:09 +0000 | [diff] [blame] | 603 | for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg)) |
| 604 | if (UI.isPHI()) |
| 605 | PHIBBs.insert(UI.getParent()); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 606 | |
| 607 | const TargetRegisterClass *RC = MRI->getRegClass(SrcReg); |
| 608 | for (unsigned i = 0, e = Uses.size(); i != e; ++i) { |
| 609 | MachineOperand *UseMO = Uses[i]; |
| 610 | MachineInstr *UseMI = UseMO->getParent(); |
| 611 | MachineBasicBlock *UseMBB = UseMI->getParent(); |
| 612 | if (PHIBBs.count(UseMBB)) |
| 613 | continue; |
| 614 | |
Lang Hames | d5862ce | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 615 | // About to add uses of DstReg, clear DstReg's kill flags. |
Jakob Stoklund Olesen | 2f06a65 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 616 | if (!Changed) { |
Lang Hames | d5862ce | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 617 | MRI->clearKillFlags(DstReg); |
Jakob Stoklund Olesen | 2f06a65 | 2012-05-20 18:42:55 +0000 | [diff] [blame] | 618 | MRI->constrainRegClass(DstReg, DstRC); |
| 619 | } |
Lang Hames | d5862ce | 2012-02-25 02:01:00 +0000 | [diff] [blame] | 620 | |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 621 | unsigned NewVR = MRI->createVirtualRegister(RC); |
Jakob Stoklund Olesen | 0f855e4 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 622 | MachineInstr *Copy = BuildMI(*UseMBB, UseMI, UseMI->getDebugLoc(), |
| 623 | TII->get(TargetOpcode::COPY), NewVR) |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 624 | .addReg(DstReg, 0, SubIdx); |
Jakob Stoklund Olesen | 0f855e4 | 2012-06-19 21:14:34 +0000 | [diff] [blame] | 625 | // SubIdx applies to both SrcReg and DstReg when UseSrcSubIdx is set. |
| 626 | if (UseSrcSubIdx) { |
| 627 | Copy->getOperand(0).setSubReg(SubIdx); |
| 628 | Copy->getOperand(0).setIsUndef(); |
| 629 | } |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 630 | UseMO->setReg(NewVR); |
| 631 | ++NumReuse; |
| 632 | Changed = true; |
| 633 | } |
| 634 | } |
| 635 | |
| 636 | return Changed; |
| 637 | } |
| 638 | |
Sanjay Patel | 59309cc | 2015-12-29 18:14:06 +0000 | [diff] [blame] | 639 | /// If the instruction is a compare and the previous instruction it's comparing |
| 640 | /// against already sets (or could be modified to set) the same flag as the |
| 641 | /// compare, then we can remove the comparison and use the flag from the |
| 642 | /// previous instruction. |
Jim Grosbach | edcb868 | 2012-05-01 23:21:41 +0000 | [diff] [blame] | 643 | bool PeepholeOptimizer::optimizeCmpInstr(MachineInstr *MI, |
Evan Cheng | e4b8ac9 | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 644 | MachineBasicBlock *MBB) { |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 645 | // If this instruction is a comparison against zero and isn't comparing a |
| 646 | // physical register, we can try to optimize it. |
Manman Ren | 6fa76dc | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 647 | unsigned SrcReg, SrcReg2; |
Gabor Greif | adbbb93 | 2010-09-21 12:01:15 +0000 | [diff] [blame] | 648 | int CmpMask, CmpValue; |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 649 | if (!TII->analyzeCompare(*MI, SrcReg, SrcReg2, CmpMask, CmpValue) || |
Manman Ren | 6fa76dc | 2012-06-29 21:33:59 +0000 | [diff] [blame] | 650 | TargetRegisterInfo::isPhysicalRegister(SrcReg) || |
| 651 | (SrcReg2 != 0 && TargetRegisterInfo::isPhysicalRegister(SrcReg2))) |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 652 | return false; |
| 653 | |
Bill Wendling | 27dddd1 | 2010-09-11 00:13:50 +0000 | [diff] [blame] | 654 | // Attempt to optimize the comparison instruction. |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 655 | if (TII->optimizeCompareInstr(*MI, SrcReg, SrcReg2, CmpMask, CmpValue, MRI)) { |
Evan Cheng | e4b8ac9 | 2011-03-15 05:13:13 +0000 | [diff] [blame] | 656 | ++NumCmps; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 657 | return true; |
| 658 | } |
| 659 | |
| 660 | return false; |
| 661 | } |
| 662 | |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 663 | /// Optimize a select instruction. |
Mehdi Amini | 22e5974 | 2015-01-13 07:07:13 +0000 | [diff] [blame] | 664 | bool PeepholeOptimizer::optimizeSelect(MachineInstr *MI, |
| 665 | SmallPtrSetImpl<MachineInstr *> &LocalMIs) { |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 666 | unsigned TrueOp = 0; |
| 667 | unsigned FalseOp = 0; |
| 668 | bool Optimizable = false; |
| 669 | SmallVector<MachineOperand, 4> Cond; |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 670 | if (TII->analyzeSelect(*MI, Cond, TrueOp, FalseOp, Optimizable)) |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 671 | return false; |
| 672 | if (!Optimizable) |
| 673 | return false; |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 674 | if (!TII->optimizeSelect(*MI, LocalMIs)) |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 675 | return false; |
| 676 | MI->eraseFromParent(); |
| 677 | ++NumSelects; |
| 678 | return true; |
| 679 | } |
| 680 | |
Gerolf Hoflehner | a4c96d0 | 2014-10-14 23:07:53 +0000 | [diff] [blame] | 681 | /// \brief Check if a simpler conditional branch can be |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 682 | /// generated |
Gerolf Hoflehner | a4c96d0 | 2014-10-14 23:07:53 +0000 | [diff] [blame] | 683 | bool PeepholeOptimizer::optimizeCondBranch(MachineInstr *MI) { |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 684 | return TII->optimizeCondBranch(*MI); |
Gerolf Hoflehner | a4c96d0 | 2014-10-14 23:07:53 +0000 | [diff] [blame] | 685 | } |
| 686 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 687 | /// \brief Try to find the next source that share the same register file |
| 688 | /// for the value defined by \p Reg and \p SubReg. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 689 | /// When true is returned, the \p RewriteMap can be used by the client to |
| 690 | /// retrieve all Def -> Use along the way up to the next source. Any found |
| 691 | /// Use that is not itself a key for another entry, is the next source to |
| 692 | /// use. During the search for the next source, multiple sources can be found |
| 693 | /// given multiple incoming sources of a PHI instruction. In this case, we |
| 694 | /// look in each PHI source for the next source; all found next sources must |
| 695 | /// share the same register file as \p Reg and \p SubReg. The client should |
| 696 | /// then be capable to rewrite all intermediate PHIs to get the next source. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 697 | /// \return False if no alternative sources are available. True otherwise. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 698 | bool PeepholeOptimizer::findNextSource(unsigned Reg, unsigned SubReg, |
| 699 | RewriteMapTy &RewriteMap) { |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 700 | // Do not try to find a new source for a physical register. |
| 701 | // So far we do not have any motivating example for doing that. |
| 702 | // Thus, instead of maintaining untested code, we will revisit that if |
| 703 | // that changes at some point. |
| 704 | if (TargetRegisterInfo::isPhysicalRegister(Reg)) |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 705 | return false; |
Bruno Cardoso Lopes | 38c0250 | 2015-07-29 17:46:47 +0000 | [diff] [blame] | 706 | const TargetRegisterClass *DefRC = MRI->getRegClass(Reg); |
Bruno Cardoso Lopes | 38c0250 | 2015-07-29 17:46:47 +0000 | [diff] [blame] | 707 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 708 | SmallVector<TargetInstrInfo::RegSubRegPair, 4> SrcToLook; |
| 709 | TargetInstrInfo::RegSubRegPair CurSrcPair(Reg, SubReg); |
| 710 | SrcToLook.push_back(CurSrcPair); |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 711 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 712 | unsigned PHICount = 0; |
| 713 | while (!SrcToLook.empty() && PHICount < RewritePHILimit) { |
| 714 | TargetInstrInfo::RegSubRegPair Pair = SrcToLook.pop_back_val(); |
| 715 | // As explained above, do not handle physical registers |
| 716 | if (TargetRegisterInfo::isPhysicalRegister(Pair.Reg)) |
| 717 | return false; |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 718 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 719 | CurSrcPair = Pair; |
| 720 | ValueTracker ValTracker(CurSrcPair.Reg, CurSrcPair.SubReg, *MRI, |
| 721 | !DisableAdvCopyOpt, TII); |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 722 | |
Matthias Braun | 08abcac | 2018-01-11 21:57:03 +0000 | [diff] [blame] | 723 | // Follow the chain of copies until we find a more suitable source, a phi |
| 724 | // or have to abort. |
| 725 | while (true) { |
| 726 | ValueTrackerResult Res = ValTracker.getNextSource(); |
| 727 | // Abort at the end of a chain (without finding a suitable source). |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 728 | if (!Res.isValid()) |
Matthias Braun | 08abcac | 2018-01-11 21:57:03 +0000 | [diff] [blame] | 729 | return false; |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 730 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 731 | // Insert the Def -> Use entry for the recently found source. |
| 732 | ValueTrackerResult CurSrcRes = RewriteMap.lookup(CurSrcPair); |
| 733 | if (CurSrcRes.isValid()) { |
| 734 | assert(CurSrcRes == Res && "ValueTrackerResult found must match"); |
| 735 | // An existent entry with multiple sources is a PHI cycle we must avoid. |
| 736 | // Otherwise it's an entry with a valid next source we already found. |
| 737 | if (CurSrcRes.getNumSources() > 1) { |
| 738 | DEBUG(dbgs() << "findNextSource: found PHI cycle, aborting...\n"); |
| 739 | return false; |
| 740 | } |
| 741 | break; |
| 742 | } |
| 743 | RewriteMap.insert(std::make_pair(CurSrcPair, Res)); |
| 744 | |
| 745 | // ValueTrackerResult usually have one source unless it's the result from |
| 746 | // a PHI instruction. Add the found PHI edges to be looked up further. |
| 747 | unsigned NumSrcs = Res.getNumSources(); |
| 748 | if (NumSrcs > 1) { |
| 749 | PHICount++; |
| 750 | for (unsigned i = 0; i < NumSrcs; ++i) |
| 751 | SrcToLook.push_back(TargetInstrInfo::RegSubRegPair( |
| 752 | Res.getSrcReg(i), Res.getSrcSubReg(i))); |
| 753 | break; |
| 754 | } |
| 755 | |
| 756 | CurSrcPair.Reg = Res.getSrcReg(0); |
| 757 | CurSrcPair.SubReg = Res.getSrcSubReg(0); |
| 758 | // Do not extend the live-ranges of physical registers as they add |
| 759 | // constraints to the register allocator. Moreover, if we want to extend |
| 760 | // the live-range of a physical register, unlike SSA virtual register, |
| 761 | // we will have to check that they aren't redefine before the related use. |
| 762 | if (TargetRegisterInfo::isPhysicalRegister(CurSrcPair.Reg)) |
| 763 | return false; |
| 764 | |
Matthias Braun | 08abcac | 2018-01-11 21:57:03 +0000 | [diff] [blame] | 765 | // Keep following the chain if the value isn't any better yet. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 766 | const TargetRegisterClass *SrcRC = MRI->getRegClass(CurSrcPair.Reg); |
Matthias Braun | 08abcac | 2018-01-11 21:57:03 +0000 | [diff] [blame] | 767 | if (!TRI->shouldRewriteCopySrc(DefRC, SubReg, SrcRC, CurSrcPair.SubReg)) |
| 768 | continue; |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 769 | |
Matthias Braun | 08abcac | 2018-01-11 21:57:03 +0000 | [diff] [blame] | 770 | // We currently cannot deal with subreg operands on PHI instructions |
| 771 | // (see insertPHI()). |
| 772 | if (PHICount > 0 && CurSrcPair.SubReg != 0) |
| 773 | continue; |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 774 | |
Matthias Braun | 08abcac | 2018-01-11 21:57:03 +0000 | [diff] [blame] | 775 | // We found a suitable source, and are done with this chain. |
| 776 | break; |
| 777 | } |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 778 | } |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 779 | |
| 780 | // If we did not find a more suitable source, there is nothing to optimize. |
Rafael Espindola | 84921b9 | 2015-10-24 23:11:13 +0000 | [diff] [blame] | 781 | return CurSrcPair.Reg != Reg; |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 782 | } |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 783 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 784 | /// \brief Insert a PHI instruction with incoming edges \p SrcRegs that are |
| 785 | /// guaranteed to have the same register class. This is necessary whenever we |
| 786 | /// successfully traverse a PHI instruction and find suitable sources coming |
| 787 | /// from its edges. By inserting a new PHI, we provide a rewritten PHI def |
| 788 | /// suitable to be used in a new COPY instruction. |
Benjamin Kramer | fcdb1c1 | 2015-08-20 09:57:22 +0000 | [diff] [blame] | 789 | static MachineInstr * |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 790 | insertPHI(MachineRegisterInfo *MRI, const TargetInstrInfo *TII, |
| 791 | const SmallVectorImpl<TargetInstrInfo::RegSubRegPair> &SrcRegs, |
| 792 | MachineInstr *OrigPHI) { |
| 793 | assert(!SrcRegs.empty() && "No sources to create a PHI instruction?"); |
| 794 | |
| 795 | const TargetRegisterClass *NewRC = MRI->getRegClass(SrcRegs[0].Reg); |
Matthias Braun | 08abcac | 2018-01-11 21:57:03 +0000 | [diff] [blame] | 796 | // NewRC is only correct if no subregisters are involved. findNextSource() |
| 797 | // should have rejected those cases already. |
| 798 | assert(SrcRegs[0].SubReg == 0 && "should not have subreg operand"); |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 799 | unsigned NewVR = MRI->createVirtualRegister(NewRC); |
| 800 | MachineBasicBlock *MBB = OrigPHI->getParent(); |
| 801 | MachineInstrBuilder MIB = BuildMI(*MBB, OrigPHI, OrigPHI->getDebugLoc(), |
| 802 | TII->get(TargetOpcode::PHI), NewVR); |
| 803 | |
| 804 | unsigned MBBOpIdx = 2; |
| 805 | for (auto RegPair : SrcRegs) { |
| 806 | MIB.addReg(RegPair.Reg, 0, RegPair.SubReg); |
| 807 | MIB.addMBB(OrigPHI->getOperand(MBBOpIdx).getMBB()); |
| 808 | // Since we're extended the lifetime of RegPair.Reg, clear the |
| 809 | // kill flags to account for that and make RegPair.Reg reaches |
| 810 | // the new PHI. |
| 811 | MRI->clearKillFlags(RegPair.Reg); |
| 812 | MBBOpIdx += 2; |
| 813 | } |
| 814 | |
| 815 | return MIB; |
| 816 | } |
| 817 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 818 | namespace { |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 819 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 820 | /// \brief Helper class to rewrite the arguments of a copy-like instruction. |
| 821 | class CopyRewriter { |
| 822 | protected: |
| 823 | /// The copy-like instruction. |
| 824 | MachineInstr &CopyLike; |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 825 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 826 | /// The index of the source being rewritten. |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 827 | unsigned CurrentSrcIdx = 0; |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 828 | |
| 829 | public: |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 830 | CopyRewriter(MachineInstr &MI) : CopyLike(MI) {} |
| 831 | virtual ~CopyRewriter() = default; |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 832 | |
| 833 | /// \brief Get the next rewritable source (SrcReg, SrcSubReg) and |
| 834 | /// the related value that it affects (TrackReg, TrackSubReg). |
| 835 | /// A source is considered rewritable if its register class and the |
| 836 | /// register class of the related TrackReg may not be register |
| 837 | /// coalescer friendly. In other words, given a copy-like instruction |
| 838 | /// not all the arguments may be returned at rewritable source, since |
| 839 | /// some arguments are none to be register coalescer friendly. |
| 840 | /// |
| 841 | /// Each call of this method moves the current source to the next |
| 842 | /// rewritable source. |
| 843 | /// For instance, let CopyLike be the instruction to rewrite. |
| 844 | /// CopyLike has one definition and one source: |
| 845 | /// dst.dstSubIdx = CopyLike src.srcSubIdx. |
| 846 | /// |
| 847 | /// The first call will give the first rewritable source, i.e., |
| 848 | /// the only source this instruction has: |
| 849 | /// (SrcReg, SrcSubReg) = (src, srcSubIdx). |
| 850 | /// This source defines the whole definition, i.e., |
| 851 | /// (TrackReg, TrackSubReg) = (dst, dstSubIdx). |
| 852 | /// |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 853 | /// The second and subsequent calls will return false, as there is only one |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 854 | /// rewritable source. |
| 855 | /// |
| 856 | /// \return True if a rewritable source has been found, false otherwise. |
| 857 | /// The output arguments are valid if and only if true is returned. |
| 858 | virtual bool getNextRewritableSource(unsigned &SrcReg, unsigned &SrcSubReg, |
| 859 | unsigned &TrackReg, |
| 860 | unsigned &TrackSubReg) { |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 861 | // If CurrentSrcIdx == 1, this means this function has already been called |
| 862 | // once. CopyLike has one definition and one argument, thus, there is |
| 863 | // nothing else to rewrite. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 864 | if (!CopyLike.isCopy() || CurrentSrcIdx == 1) |
| 865 | return false; |
| 866 | // This is the first call to getNextRewritableSource. |
| 867 | // Move the CurrentSrcIdx to remember that we made that call. |
| 868 | CurrentSrcIdx = 1; |
| 869 | // The rewritable source is the argument. |
| 870 | const MachineOperand &MOSrc = CopyLike.getOperand(1); |
| 871 | SrcReg = MOSrc.getReg(); |
| 872 | SrcSubReg = MOSrc.getSubReg(); |
| 873 | // What we track are the alternative sources of the definition. |
| 874 | const MachineOperand &MODef = CopyLike.getOperand(0); |
| 875 | TrackReg = MODef.getReg(); |
| 876 | TrackSubReg = MODef.getSubReg(); |
| 877 | return true; |
| 878 | } |
| 879 | |
| 880 | /// \brief Rewrite the current source with \p NewReg and \p NewSubReg |
| 881 | /// if possible. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 882 | /// \return True if the rewriting was possible, false otherwise. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 883 | virtual bool RewriteCurrentSource(unsigned NewReg, unsigned NewSubReg) { |
| 884 | if (!CopyLike.isCopy() || CurrentSrcIdx != 1) |
| 885 | return false; |
| 886 | MachineOperand &MOSrc = CopyLike.getOperand(CurrentSrcIdx); |
| 887 | MOSrc.setReg(NewReg); |
| 888 | MOSrc.setSubReg(NewSubReg); |
| 889 | return true; |
| 890 | } |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 891 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 892 | /// \brief Given a \p Def.Reg and Def.SubReg pair, use \p RewriteMap to find |
| 893 | /// the new source to use for rewrite. If \p HandleMultipleSources is true and |
| 894 | /// multiple sources for a given \p Def are found along the way, we found a |
| 895 | /// PHI instructions that needs to be rewritten. |
| 896 | /// TODO: HandleMultipleSources should be removed once we test PHI handling |
| 897 | /// with coalescable copies. |
| 898 | TargetInstrInfo::RegSubRegPair |
| 899 | getNewSource(MachineRegisterInfo *MRI, const TargetInstrInfo *TII, |
| 900 | TargetInstrInfo::RegSubRegPair Def, |
| 901 | PeepholeOptimizer::RewriteMapTy &RewriteMap, |
| 902 | bool HandleMultipleSources = true) { |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 903 | TargetInstrInfo::RegSubRegPair LookupSrc(Def.Reg, Def.SubReg); |
| 904 | do { |
| 905 | ValueTrackerResult Res = RewriteMap.lookup(LookupSrc); |
| 906 | // If there are no entries on the map, LookupSrc is the new source. |
| 907 | if (!Res.isValid()) |
| 908 | return LookupSrc; |
| 909 | |
| 910 | // There's only one source for this definition, keep searching... |
| 911 | unsigned NumSrcs = Res.getNumSources(); |
| 912 | if (NumSrcs == 1) { |
| 913 | LookupSrc.Reg = Res.getSrcReg(0); |
| 914 | LookupSrc.SubReg = Res.getSrcSubReg(0); |
| 915 | continue; |
| 916 | } |
| 917 | |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 918 | // TODO: Remove once multiple srcs w/ coalescable copies are supported. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 919 | if (!HandleMultipleSources) |
| 920 | break; |
| 921 | |
| 922 | // Multiple sources, recurse into each source to find a new source |
| 923 | // for it. Then, rewrite the PHI accordingly to its new edges. |
| 924 | SmallVector<TargetInstrInfo::RegSubRegPair, 4> NewPHISrcs; |
| 925 | for (unsigned i = 0; i < NumSrcs; ++i) { |
| 926 | TargetInstrInfo::RegSubRegPair PHISrc(Res.getSrcReg(i), |
| 927 | Res.getSrcSubReg(i)); |
| 928 | NewPHISrcs.push_back( |
| 929 | getNewSource(MRI, TII, PHISrc, RewriteMap, HandleMultipleSources)); |
| 930 | } |
| 931 | |
| 932 | // Build the new PHI node and return its def register as the new source. |
| 933 | MachineInstr *OrigPHI = const_cast<MachineInstr *>(Res.getInst()); |
| 934 | MachineInstr *NewPHI = insertPHI(MRI, TII, NewPHISrcs, OrigPHI); |
| 935 | DEBUG(dbgs() << "-- getNewSource\n"); |
| 936 | DEBUG(dbgs() << " Replacing: " << *OrigPHI); |
| 937 | DEBUG(dbgs() << " With: " << *NewPHI); |
| 938 | const MachineOperand &MODef = NewPHI->getOperand(0); |
| 939 | return TargetInstrInfo::RegSubRegPair(MODef.getReg(), MODef.getSubReg()); |
| 940 | |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 941 | } while (true); |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 942 | |
| 943 | return TargetInstrInfo::RegSubRegPair(0, 0); |
| 944 | } |
| 945 | |
| 946 | /// \brief Rewrite the source found through \p Def, by using the \p RewriteMap |
| 947 | /// and create a new COPY instruction. More info about RewriteMap in |
| 948 | /// PeepholeOptimizer::findNextSource. Right now this is only used to handle |
| 949 | /// Uncoalescable copies, since they are copy like instructions that aren't |
| 950 | /// recognized by the register allocator. |
| 951 | virtual MachineInstr * |
| 952 | RewriteSource(TargetInstrInfo::RegSubRegPair Def, |
| 953 | PeepholeOptimizer::RewriteMapTy &RewriteMap) { |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 954 | return nullptr; |
| 955 | } |
| 956 | }; |
| 957 | |
| 958 | /// \brief Helper class to rewrite uncoalescable copy like instructions |
| 959 | /// into new COPY (coalescable friendly) instructions. |
| 960 | class UncoalescableRewriter : public CopyRewriter { |
| 961 | protected: |
| 962 | const TargetInstrInfo &TII; |
| 963 | MachineRegisterInfo &MRI; |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 964 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 965 | /// The number of defs in the bitcast |
| 966 | unsigned NumDefs; |
| 967 | |
| 968 | public: |
| 969 | UncoalescableRewriter(MachineInstr &MI, const TargetInstrInfo &TII, |
| 970 | MachineRegisterInfo &MRI) |
| 971 | : CopyRewriter(MI), TII(TII), MRI(MRI) { |
| 972 | NumDefs = MI.getDesc().getNumDefs(); |
| 973 | } |
| 974 | |
| 975 | /// \brief Get the next rewritable def source (TrackReg, TrackSubReg) |
| 976 | /// All such sources need to be considered rewritable in order to |
| 977 | /// rewrite a uncoalescable copy-like instruction. This method return |
| 978 | /// each definition that must be checked if rewritable. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 979 | bool getNextRewritableSource(unsigned &SrcReg, unsigned &SrcSubReg, |
| 980 | unsigned &TrackReg, |
| 981 | unsigned &TrackSubReg) override { |
| 982 | // Find the next non-dead definition and continue from there. |
| 983 | if (CurrentSrcIdx == NumDefs) |
| 984 | return false; |
| 985 | |
| 986 | while (CopyLike.getOperand(CurrentSrcIdx).isDead()) { |
| 987 | ++CurrentSrcIdx; |
| 988 | if (CurrentSrcIdx == NumDefs) |
| 989 | return false; |
| 990 | } |
| 991 | |
| 992 | // What we track are the alternative sources of the definition. |
| 993 | const MachineOperand &MODef = CopyLike.getOperand(CurrentSrcIdx); |
| 994 | TrackReg = MODef.getReg(); |
| 995 | TrackSubReg = MODef.getSubReg(); |
| 996 | |
| 997 | CurrentSrcIdx++; |
| 998 | return true; |
| 999 | } |
| 1000 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1001 | /// \brief Rewrite the source found through \p Def, by using the \p RewriteMap |
| 1002 | /// and create a new COPY instruction. More info about RewriteMap in |
| 1003 | /// PeepholeOptimizer::findNextSource. Right now this is only used to handle |
| 1004 | /// Uncoalescable copies, since they are copy like instructions that aren't |
| 1005 | /// recognized by the register allocator. |
| 1006 | MachineInstr * |
| 1007 | RewriteSource(TargetInstrInfo::RegSubRegPair Def, |
| 1008 | PeepholeOptimizer::RewriteMapTy &RewriteMap) override { |
| 1009 | assert(!TargetRegisterInfo::isPhysicalRegister(Def.Reg) && |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1010 | "We do not rewrite physical registers"); |
| 1011 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1012 | // Find the new source to use in the COPY rewrite. |
| 1013 | TargetInstrInfo::RegSubRegPair NewSrc = |
| 1014 | getNewSource(&MRI, &TII, Def, RewriteMap); |
| 1015 | |
| 1016 | // Insert the COPY. |
| 1017 | const TargetRegisterClass *DefRC = MRI.getRegClass(Def.Reg); |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1018 | unsigned NewVR = MRI.createVirtualRegister(DefRC); |
| 1019 | |
| 1020 | MachineInstr *NewCopy = |
| 1021 | BuildMI(*CopyLike.getParent(), &CopyLike, CopyLike.getDebugLoc(), |
| 1022 | TII.get(TargetOpcode::COPY), NewVR) |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1023 | .addReg(NewSrc.Reg, 0, NewSrc.SubReg); |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1024 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1025 | NewCopy->getOperand(0).setSubReg(Def.SubReg); |
| 1026 | if (Def.SubReg) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1027 | NewCopy->getOperand(0).setIsUndef(); |
| 1028 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1029 | DEBUG(dbgs() << "-- RewriteSource\n"); |
| 1030 | DEBUG(dbgs() << " Replacing: " << CopyLike); |
| 1031 | DEBUG(dbgs() << " With: " << *NewCopy); |
| 1032 | MRI.replaceRegWith(Def.Reg, NewVR); |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1033 | MRI.clearKillFlags(NewVR); |
| 1034 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1035 | // We extended the lifetime of NewSrc.Reg, clear the kill flags to |
| 1036 | // account for that. |
| 1037 | MRI.clearKillFlags(NewSrc.Reg); |
| 1038 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1039 | return NewCopy; |
| 1040 | } |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1041 | }; |
| 1042 | |
| 1043 | /// \brief Specialized rewriter for INSERT_SUBREG instruction. |
| 1044 | class InsertSubregRewriter : public CopyRewriter { |
| 1045 | public: |
| 1046 | InsertSubregRewriter(MachineInstr &MI) : CopyRewriter(MI) { |
| 1047 | assert(MI.isInsertSubreg() && "Invalid instruction"); |
| 1048 | } |
| 1049 | |
| 1050 | /// \brief See CopyRewriter::getNextRewritableSource. |
| 1051 | /// Here CopyLike has the following form: |
| 1052 | /// dst = INSERT_SUBREG Src1, Src2.src2SubIdx, subIdx. |
| 1053 | /// Src1 has the same register class has dst, hence, there is |
| 1054 | /// nothing to rewrite. |
| 1055 | /// Src2.src2SubIdx, may not be register coalescer friendly. |
| 1056 | /// Therefore, the first call to this method returns: |
| 1057 | /// (SrcReg, SrcSubReg) = (Src2, src2SubIdx). |
| 1058 | /// (TrackReg, TrackSubReg) = (dst, subIdx). |
| 1059 | /// |
| 1060 | /// Subsequence calls will return false. |
| 1061 | bool getNextRewritableSource(unsigned &SrcReg, unsigned &SrcSubReg, |
| 1062 | unsigned &TrackReg, |
| 1063 | unsigned &TrackSubReg) override { |
| 1064 | // If we already get the only source we can rewrite, return false. |
| 1065 | if (CurrentSrcIdx == 2) |
| 1066 | return false; |
| 1067 | // We are looking at v2 = INSERT_SUBREG v0, v1, sub0. |
| 1068 | CurrentSrcIdx = 2; |
| 1069 | const MachineOperand &MOInsertedReg = CopyLike.getOperand(2); |
| 1070 | SrcReg = MOInsertedReg.getReg(); |
| 1071 | SrcSubReg = MOInsertedReg.getSubReg(); |
| 1072 | const MachineOperand &MODef = CopyLike.getOperand(0); |
| 1073 | |
| 1074 | // We want to track something that is compatible with the |
| 1075 | // partial definition. |
| 1076 | TrackReg = MODef.getReg(); |
| 1077 | if (MODef.getSubReg()) |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1078 | // Bail if we have to compose sub-register indices. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1079 | return false; |
| 1080 | TrackSubReg = (unsigned)CopyLike.getOperand(3).getImm(); |
| 1081 | return true; |
| 1082 | } |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 1083 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1084 | bool RewriteCurrentSource(unsigned NewReg, unsigned NewSubReg) override { |
| 1085 | if (CurrentSrcIdx != 2) |
| 1086 | return false; |
| 1087 | // We are rewriting the inserted reg. |
| 1088 | MachineOperand &MO = CopyLike.getOperand(CurrentSrcIdx); |
| 1089 | MO.setReg(NewReg); |
| 1090 | MO.setSubReg(NewSubReg); |
| 1091 | return true; |
| 1092 | } |
| 1093 | }; |
| 1094 | |
| 1095 | /// \brief Specialized rewriter for EXTRACT_SUBREG instruction. |
| 1096 | class ExtractSubregRewriter : public CopyRewriter { |
| 1097 | const TargetInstrInfo &TII; |
| 1098 | |
| 1099 | public: |
| 1100 | ExtractSubregRewriter(MachineInstr &MI, const TargetInstrInfo &TII) |
| 1101 | : CopyRewriter(MI), TII(TII) { |
| 1102 | assert(MI.isExtractSubreg() && "Invalid instruction"); |
| 1103 | } |
| 1104 | |
| 1105 | /// \brief See CopyRewriter::getNextRewritableSource. |
| 1106 | /// Here CopyLike has the following form: |
| 1107 | /// dst.dstSubIdx = EXTRACT_SUBREG Src, subIdx. |
| 1108 | /// There is only one rewritable source: Src.subIdx, |
| 1109 | /// which defines dst.dstSubIdx. |
| 1110 | bool getNextRewritableSource(unsigned &SrcReg, unsigned &SrcSubReg, |
| 1111 | unsigned &TrackReg, |
| 1112 | unsigned &TrackSubReg) override { |
| 1113 | // If we already get the only source we can rewrite, return false. |
| 1114 | if (CurrentSrcIdx == 1) |
| 1115 | return false; |
| 1116 | // We are looking at v1 = EXTRACT_SUBREG v0, sub0. |
| 1117 | CurrentSrcIdx = 1; |
| 1118 | const MachineOperand &MOExtractedReg = CopyLike.getOperand(1); |
| 1119 | SrcReg = MOExtractedReg.getReg(); |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1120 | // If we have to compose sub-register indices, bail out. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1121 | if (MOExtractedReg.getSubReg()) |
| 1122 | return false; |
| 1123 | |
| 1124 | SrcSubReg = CopyLike.getOperand(2).getImm(); |
| 1125 | |
| 1126 | // We want to track something that is compatible with the definition. |
| 1127 | const MachineOperand &MODef = CopyLike.getOperand(0); |
| 1128 | TrackReg = MODef.getReg(); |
| 1129 | TrackSubReg = MODef.getSubReg(); |
| 1130 | return true; |
| 1131 | } |
| 1132 | |
| 1133 | bool RewriteCurrentSource(unsigned NewReg, unsigned NewSubReg) override { |
| 1134 | // The only source we can rewrite is the input register. |
| 1135 | if (CurrentSrcIdx != 1) |
| 1136 | return false; |
| 1137 | |
| 1138 | CopyLike.getOperand(CurrentSrcIdx).setReg(NewReg); |
| 1139 | |
| 1140 | // If we find a source that does not require to extract something, |
| 1141 | // rewrite the operation with a copy. |
| 1142 | if (!NewSubReg) { |
| 1143 | // Move the current index to an invalid position. |
| 1144 | // We do not want another call to this method to be able |
| 1145 | // to do any change. |
| 1146 | CurrentSrcIdx = -1; |
| 1147 | // Rewrite the operation as a COPY. |
| 1148 | // Get rid of the sub-register index. |
| 1149 | CopyLike.RemoveOperand(2); |
| 1150 | // Morph the operation into a COPY. |
| 1151 | CopyLike.setDesc(TII.get(TargetOpcode::COPY)); |
| 1152 | return true; |
| 1153 | } |
| 1154 | CopyLike.getOperand(CurrentSrcIdx + 1).setImm(NewSubReg); |
| 1155 | return true; |
| 1156 | } |
| 1157 | }; |
| 1158 | |
| 1159 | /// \brief Specialized rewriter for REG_SEQUENCE instruction. |
| 1160 | class RegSequenceRewriter : public CopyRewriter { |
| 1161 | public: |
| 1162 | RegSequenceRewriter(MachineInstr &MI) : CopyRewriter(MI) { |
| 1163 | assert(MI.isRegSequence() && "Invalid instruction"); |
| 1164 | } |
| 1165 | |
| 1166 | /// \brief See CopyRewriter::getNextRewritableSource. |
| 1167 | /// Here CopyLike has the following form: |
| 1168 | /// dst = REG_SEQUENCE Src1.src1SubIdx, subIdx1, Src2.src2SubIdx, subIdx2. |
| 1169 | /// Each call will return a different source, walking all the available |
| 1170 | /// source. |
| 1171 | /// |
| 1172 | /// The first call returns: |
| 1173 | /// (SrcReg, SrcSubReg) = (Src1, src1SubIdx). |
| 1174 | /// (TrackReg, TrackSubReg) = (dst, subIdx1). |
| 1175 | /// |
| 1176 | /// The second call returns: |
| 1177 | /// (SrcReg, SrcSubReg) = (Src2, src2SubIdx). |
| 1178 | /// (TrackReg, TrackSubReg) = (dst, subIdx2). |
| 1179 | /// |
| 1180 | /// And so on, until all the sources have been traversed, then |
| 1181 | /// it returns false. |
| 1182 | bool getNextRewritableSource(unsigned &SrcReg, unsigned &SrcSubReg, |
| 1183 | unsigned &TrackReg, |
| 1184 | unsigned &TrackSubReg) override { |
| 1185 | // We are looking at v0 = REG_SEQUENCE v1, sub1, v2, sub2, etc. |
| 1186 | |
| 1187 | // If this is the first call, move to the first argument. |
| 1188 | if (CurrentSrcIdx == 0) { |
| 1189 | CurrentSrcIdx = 1; |
| 1190 | } else { |
| 1191 | // Otherwise, move to the next argument and check that it is valid. |
| 1192 | CurrentSrcIdx += 2; |
| 1193 | if (CurrentSrcIdx >= CopyLike.getNumOperands()) |
| 1194 | return false; |
| 1195 | } |
| 1196 | const MachineOperand &MOInsertedReg = CopyLike.getOperand(CurrentSrcIdx); |
| 1197 | SrcReg = MOInsertedReg.getReg(); |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1198 | // If we have to compose sub-register indices, bail out. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1199 | if ((SrcSubReg = MOInsertedReg.getSubReg())) |
| 1200 | return false; |
| 1201 | |
| 1202 | // We want to track something that is compatible with the related |
| 1203 | // partial definition. |
| 1204 | TrackSubReg = CopyLike.getOperand(CurrentSrcIdx + 1).getImm(); |
| 1205 | |
| 1206 | const MachineOperand &MODef = CopyLike.getOperand(0); |
| 1207 | TrackReg = MODef.getReg(); |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1208 | // If we have to compose sub-registers, bail. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1209 | return MODef.getSubReg() == 0; |
| 1210 | } |
| 1211 | |
| 1212 | bool RewriteCurrentSource(unsigned NewReg, unsigned NewSubReg) override { |
| 1213 | // We cannot rewrite out of bound operands. |
| 1214 | // Moreover, rewritable sources are at odd positions. |
| 1215 | if ((CurrentSrcIdx & 1) != 1 || CurrentSrcIdx > CopyLike.getNumOperands()) |
| 1216 | return false; |
| 1217 | |
| 1218 | MachineOperand &MO = CopyLike.getOperand(CurrentSrcIdx); |
| 1219 | MO.setReg(NewReg); |
| 1220 | MO.setSubReg(NewSubReg); |
| 1221 | return true; |
| 1222 | } |
| 1223 | }; |
Eugene Zelenko | 1804a77 | 2016-08-25 00:45:04 +0000 | [diff] [blame] | 1224 | |
Eugene Zelenko | 32a4056 | 2017-09-11 23:00:48 +0000 | [diff] [blame] | 1225 | } // end anonymous namespace |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1226 | |
| 1227 | /// \brief Get the appropriated CopyRewriter for \p MI. |
| 1228 | /// \return A pointer to a dynamically allocated CopyRewriter or nullptr |
| 1229 | /// if no rewriter works for \p MI. |
| 1230 | static CopyRewriter *getCopyRewriter(MachineInstr &MI, |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1231 | const TargetInstrInfo &TII, |
| 1232 | MachineRegisterInfo &MRI) { |
| 1233 | // Handle uncoalescable copy-like instructions. |
| 1234 | if (MI.isBitcast() || (MI.isRegSequenceLike() || MI.isInsertSubregLike() || |
| 1235 | MI.isExtractSubregLike())) |
| 1236 | return new UncoalescableRewriter(MI, TII, MRI); |
| 1237 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1238 | switch (MI.getOpcode()) { |
| 1239 | default: |
| 1240 | return nullptr; |
| 1241 | case TargetOpcode::COPY: |
| 1242 | return new CopyRewriter(MI); |
| 1243 | case TargetOpcode::INSERT_SUBREG: |
| 1244 | return new InsertSubregRewriter(MI); |
| 1245 | case TargetOpcode::EXTRACT_SUBREG: |
| 1246 | return new ExtractSubregRewriter(MI, TII); |
| 1247 | case TargetOpcode::REG_SEQUENCE: |
| 1248 | return new RegSequenceRewriter(MI); |
| 1249 | } |
| 1250 | llvm_unreachable(nullptr); |
| 1251 | } |
| 1252 | |
| 1253 | /// \brief Optimize generic copy instructions to avoid cross |
| 1254 | /// register bank copy. The optimization looks through a chain of |
| 1255 | /// copies and tries to find a source that has a compatible register |
| 1256 | /// class. |
| 1257 | /// Two register classes are considered to be compatible if they share |
| 1258 | /// the same register bank. |
| 1259 | /// New copies issued by this optimization are register allocator |
| 1260 | /// friendly. This optimization does not remove any copy as it may |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1261 | /// overconstrain the register allocator, but replaces some operands |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1262 | /// when possible. |
| 1263 | /// \pre isCoalescableCopy(*MI) is true. |
| 1264 | /// \return True, when \p MI has been rewritten. False otherwise. |
| 1265 | bool PeepholeOptimizer::optimizeCoalescableCopy(MachineInstr *MI) { |
| 1266 | assert(MI && isCoalescableCopy(*MI) && "Invalid argument"); |
| 1267 | assert(MI->getDesc().getNumDefs() == 1 && |
| 1268 | "Coalescer can understand multiple defs?!"); |
| 1269 | const MachineOperand &MODef = MI->getOperand(0); |
| 1270 | // Do not rewrite physical definitions. |
| 1271 | if (TargetRegisterInfo::isPhysicalRegister(MODef.getReg())) |
| 1272 | return false; |
| 1273 | |
| 1274 | bool Changed = false; |
| 1275 | // Get the right rewriter for the current copy. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1276 | std::unique_ptr<CopyRewriter> CpyRewriter(getCopyRewriter(*MI, *TII, *MRI)); |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1277 | // If none exists, bail out. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1278 | if (!CpyRewriter) |
| 1279 | return false; |
| 1280 | // Rewrite each rewritable source. |
| 1281 | unsigned SrcReg, SrcSubReg, TrackReg, TrackSubReg; |
| 1282 | while (CpyRewriter->getNextRewritableSource(SrcReg, SrcSubReg, TrackReg, |
| 1283 | TrackSubReg)) { |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1284 | // Keep track of PHI nodes and its incoming edges when looking for sources. |
| 1285 | RewriteMapTy RewriteMap; |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1286 | // Try to find a more suitable source. If we failed to do so, or get the |
| 1287 | // actual source, move to the next source. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1288 | if (!findNextSource(TrackReg, TrackSubReg, RewriteMap)) |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1289 | continue; |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1290 | |
| 1291 | // Get the new source to rewrite. TODO: Only enable handling of multiple |
| 1292 | // sources (PHIs) once we have a motivating example and testcases for it. |
| 1293 | TargetInstrInfo::RegSubRegPair TrackPair(TrackReg, TrackSubReg); |
| 1294 | TargetInstrInfo::RegSubRegPair NewSrc = CpyRewriter->getNewSource( |
| 1295 | MRI, TII, TrackPair, RewriteMap, false /* multiple sources */); |
| 1296 | if (SrcReg == NewSrc.Reg || NewSrc.Reg == 0) |
| 1297 | continue; |
| 1298 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1299 | // Rewrite source. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1300 | if (CpyRewriter->RewriteCurrentSource(NewSrc.Reg, NewSrc.SubReg)) { |
Quentin Colombet | 6b36337 | 2014-08-21 21:34:06 +0000 | [diff] [blame] | 1301 | // We may have extended the live-range of NewSrc, account for that. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1302 | MRI->clearKillFlags(NewSrc.Reg); |
Quentin Colombet | 6b36337 | 2014-08-21 21:34:06 +0000 | [diff] [blame] | 1303 | Changed = true; |
| 1304 | } |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1305 | } |
| 1306 | // TODO: We could have a clean-up method to tidy the instruction. |
| 1307 | // E.g., v0 = INSERT_SUBREG v1, v1.sub0, sub0 |
| 1308 | // => v0 = COPY v1 |
| 1309 | // Currently we haven't seen motivating example for that and we |
| 1310 | // want to avoid untested code. |
David Blaikie | dc3f01e | 2015-03-09 01:57:13 +0000 | [diff] [blame] | 1311 | NumRewrittenCopies += Changed; |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1312 | return Changed; |
| 1313 | } |
| 1314 | |
| 1315 | /// \brief Optimize copy-like instructions to create |
| 1316 | /// register coalescer friendly instruction. |
| 1317 | /// The optimization tries to kill-off the \p MI by looking |
| 1318 | /// through a chain of copies to find a source that has a compatible |
| 1319 | /// register class. |
| 1320 | /// If such a source is found, it replace \p MI by a generic COPY |
| 1321 | /// operation. |
| 1322 | /// \pre isUncoalescableCopy(*MI) is true. |
| 1323 | /// \return True, when \p MI has been optimized. In that case, \p MI has |
| 1324 | /// been removed from its parent. |
| 1325 | /// All COPY instructions created, are inserted in \p LocalMIs. |
| 1326 | bool PeepholeOptimizer::optimizeUncoalescableCopy( |
| 1327 | MachineInstr *MI, SmallPtrSetImpl<MachineInstr *> &LocalMIs) { |
| 1328 | assert(MI && isUncoalescableCopy(*MI) && "Invalid argument"); |
| 1329 | |
| 1330 | // Check if we can rewrite all the values defined by this instruction. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1331 | SmallVector<TargetInstrInfo::RegSubRegPair, 4> RewritePairs; |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1332 | // Get the right rewriter for the current copy. |
| 1333 | std::unique_ptr<CopyRewriter> CpyRewriter(getCopyRewriter(*MI, *TII, *MRI)); |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1334 | // If none exists, bail out. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1335 | if (!CpyRewriter) |
| 1336 | return false; |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1337 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1338 | // Rewrite each rewritable source by generating new COPYs. This works |
| 1339 | // differently from optimizeCoalescableCopy since it first makes sure that all |
| 1340 | // definitions can be rewritten. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1341 | RewriteMapTy RewriteMap; |
| 1342 | unsigned Reg, SubReg, CopyDefReg, CopyDefSubReg; |
| 1343 | while (CpyRewriter->getNextRewritableSource(Reg, SubReg, CopyDefReg, |
| 1344 | CopyDefSubReg)) { |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1345 | // If a physical register is here, this is probably for a good reason. |
| 1346 | // Do not rewrite that. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1347 | if (TargetRegisterInfo::isPhysicalRegister(CopyDefReg)) |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1348 | return false; |
| 1349 | |
| 1350 | // If we do not know how to rewrite this definition, there is no point |
| 1351 | // in trying to kill this instruction. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1352 | TargetInstrInfo::RegSubRegPair Def(CopyDefReg, CopyDefSubReg); |
| 1353 | if (!findNextSource(Def.Reg, Def.SubReg, RewriteMap)) |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1354 | return false; |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1355 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1356 | RewritePairs.push_back(Def); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1357 | } |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1358 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1359 | // The change is possible for all defs, do it. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1360 | for (const auto &Def : RewritePairs) { |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1361 | // Rewrite the "copy" in a way the register coalescer understands. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1362 | MachineInstr *NewCopy = CpyRewriter->RewriteSource(Def, RewriteMap); |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1363 | assert(NewCopy && "Should be able to always generate a new copy"); |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1364 | LocalMIs.insert(NewCopy); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1365 | } |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 1366 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1367 | // MI is now dead. |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 1368 | MI->eraseFromParent(); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1369 | ++NumUncoalescableCopies; |
Quentin Colombet | cf71c63 | 2013-09-13 18:26:31 +0000 | [diff] [blame] | 1370 | return true; |
| 1371 | } |
| 1372 | |
Sanjay Patel | 59309cc | 2015-12-29 18:14:06 +0000 | [diff] [blame] | 1373 | /// Check whether MI is a candidate for folding into a later instruction. |
| 1374 | /// We only fold loads to virtual registers and the virtual register defined |
| 1375 | /// has a single use. |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1376 | bool PeepholeOptimizer::isLoadFoldable( |
Sanjay Patel | b120ae9 | 2015-12-29 19:34:53 +0000 | [diff] [blame] | 1377 | MachineInstr *MI, SmallSet<unsigned, 16> &FoldAsLoadDefCandidates) { |
Manman Ren | ba8122c | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 1378 | if (!MI->canFoldAsLoad() || !MI->mayLoad()) |
| 1379 | return false; |
| 1380 | const MCInstrDesc &MCID = MI->getDesc(); |
| 1381 | if (MCID.getNumDefs() != 1) |
| 1382 | return false; |
| 1383 | |
| 1384 | unsigned Reg = MI->getOperand(0).getReg(); |
Ekaterina Romanova | 8d62008 | 2014-03-13 18:47:12 +0000 | [diff] [blame] | 1385 | // To reduce compilation time, we check MRI->hasOneNonDBGUse when inserting |
Manman Ren | ba8122c | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 1386 | // loads. It should be checked when processing uses of the load, since |
| 1387 | // uses can be removed during peephole. |
| 1388 | if (!MI->getOperand(0).getSubReg() && |
| 1389 | TargetRegisterInfo::isVirtualRegister(Reg) && |
Ekaterina Romanova | 8d62008 | 2014-03-13 18:47:12 +0000 | [diff] [blame] | 1390 | MRI->hasOneNonDBGUse(Reg)) { |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1391 | FoldAsLoadDefCandidates.insert(Reg); |
Manman Ren | ba8122c | 2012-08-02 19:37:32 +0000 | [diff] [blame] | 1392 | return true; |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 1393 | } |
| 1394 | return false; |
| 1395 | } |
| 1396 | |
Sanjay Patel | b120ae9 | 2015-12-29 19:34:53 +0000 | [diff] [blame] | 1397 | bool PeepholeOptimizer::isMoveImmediate( |
| 1398 | MachineInstr *MI, SmallSet<unsigned, 4> &ImmDefRegs, |
| 1399 | DenseMap<unsigned, MachineInstr *> &ImmDefMIs) { |
Evan Cheng | 6cc775f | 2011-06-28 19:10:37 +0000 | [diff] [blame] | 1400 | const MCInstrDesc &MCID = MI->getDesc(); |
Evan Cheng | 7f8e563 | 2011-12-07 07:15:52 +0000 | [diff] [blame] | 1401 | if (!MI->isMoveImmediate()) |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1402 | return false; |
Evan Cheng | 6cc775f | 2011-06-28 19:10:37 +0000 | [diff] [blame] | 1403 | if (MCID.getNumDefs() != 1) |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1404 | return false; |
| 1405 | unsigned Reg = MI->getOperand(0).getReg(); |
| 1406 | if (TargetRegisterInfo::isVirtualRegister(Reg)) { |
| 1407 | ImmDefMIs.insert(std::make_pair(Reg, MI)); |
| 1408 | ImmDefRegs.insert(Reg); |
| 1409 | return true; |
| 1410 | } |
Andrew Trick | 9e76199 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 1411 | |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1412 | return false; |
| 1413 | } |
| 1414 | |
Sanjay Patel | 59309cc | 2015-12-29 18:14:06 +0000 | [diff] [blame] | 1415 | /// Try folding register operands that are defined by move immediate |
| 1416 | /// instructions, i.e. a trivial constant folding optimization, if |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1417 | /// and only if the def and use are in the same BB. |
Sanjay Patel | b120ae9 | 2015-12-29 19:34:53 +0000 | [diff] [blame] | 1418 | bool PeepholeOptimizer::foldImmediate( |
| 1419 | MachineInstr *MI, MachineBasicBlock *MBB, SmallSet<unsigned, 4> &ImmDefRegs, |
| 1420 | DenseMap<unsigned, MachineInstr *> &ImmDefMIs) { |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1421 | for (unsigned i = 0, e = MI->getDesc().getNumOperands(); i != e; ++i) { |
| 1422 | MachineOperand &MO = MI->getOperand(i); |
| 1423 | if (!MO.isReg() || MO.isDef()) |
| 1424 | continue; |
Dan Gohman | dab313e | 2015-12-10 00:37:51 +0000 | [diff] [blame] | 1425 | // Ignore dead implicit defs. |
| 1426 | if (MO.isImplicit() && MO.isDead()) |
| 1427 | continue; |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1428 | unsigned Reg = MO.getReg(); |
Jakob Stoklund Olesen | 2fb5b31 | 2011-01-10 02:58:51 +0000 | [diff] [blame] | 1429 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1430 | continue; |
| 1431 | if (ImmDefRegs.count(Reg) == 0) |
| 1432 | continue; |
| 1433 | DenseMap<unsigned, MachineInstr*>::iterator II = ImmDefMIs.find(Reg); |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 1434 | assert(II != ImmDefMIs.end() && "couldn't find immediate definition"); |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 1435 | if (TII->FoldImmediate(*MI, *II->second, Reg, MRI)) { |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1436 | ++NumImmFold; |
| 1437 | return true; |
| 1438 | } |
| 1439 | } |
| 1440 | return false; |
| 1441 | } |
| 1442 | |
Matt Arsenault | 10aa807 | 2015-09-25 20:22:12 +0000 | [diff] [blame] | 1443 | // FIXME: This is very simple and misses some cases which should be handled when |
| 1444 | // motivating examples are found. |
| 1445 | // |
| 1446 | // The copy rewriting logic should look at uses as well as defs and be able to |
| 1447 | // eliminate copies across blocks. |
| 1448 | // |
| 1449 | // Later copies that are subregister extracts will also not be eliminated since |
| 1450 | // only the first copy is considered. |
| 1451 | // |
| 1452 | // e.g. |
Francis Visoiu Mistrih | 93ef145 | 2017-11-30 12:12:19 +0000 | [diff] [blame] | 1453 | // %1 = COPY %0 |
| 1454 | // %2 = COPY %0:sub1 |
Matt Arsenault | 10aa807 | 2015-09-25 20:22:12 +0000 | [diff] [blame] | 1455 | // |
Francis Visoiu Mistrih | 93ef145 | 2017-11-30 12:12:19 +0000 | [diff] [blame] | 1456 | // Should replace %2 uses with %1:sub1 |
Matt Arsenault | 10aa807 | 2015-09-25 20:22:12 +0000 | [diff] [blame] | 1457 | bool PeepholeOptimizer::foldRedundantCopy( |
Sanjay Patel | b120ae9 | 2015-12-29 19:34:53 +0000 | [diff] [blame] | 1458 | MachineInstr *MI, SmallSet<unsigned, 4> &CopySrcRegs, |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 1459 | DenseMap<unsigned, MachineInstr *> &CopyMIs) { |
| 1460 | assert(MI->isCopy() && "expected a COPY machine instruction"); |
Matt Arsenault | 10aa807 | 2015-09-25 20:22:12 +0000 | [diff] [blame] | 1461 | |
| 1462 | unsigned SrcReg = MI->getOperand(1).getReg(); |
| 1463 | if (!TargetRegisterInfo::isVirtualRegister(SrcReg)) |
| 1464 | return false; |
| 1465 | |
| 1466 | unsigned DstReg = MI->getOperand(0).getReg(); |
| 1467 | if (!TargetRegisterInfo::isVirtualRegister(DstReg)) |
| 1468 | return false; |
| 1469 | |
| 1470 | if (CopySrcRegs.insert(SrcReg).second) { |
| 1471 | // First copy of this reg seen. |
| 1472 | CopyMIs.insert(std::make_pair(SrcReg, MI)); |
| 1473 | return false; |
| 1474 | } |
| 1475 | |
| 1476 | MachineInstr *PrevCopy = CopyMIs.find(SrcReg)->second; |
| 1477 | |
| 1478 | unsigned SrcSubReg = MI->getOperand(1).getSubReg(); |
| 1479 | unsigned PrevSrcSubReg = PrevCopy->getOperand(1).getSubReg(); |
| 1480 | |
| 1481 | // Can't replace different subregister extracts. |
| 1482 | if (SrcSubReg != PrevSrcSubReg) |
| 1483 | return false; |
| 1484 | |
| 1485 | unsigned PrevDstReg = PrevCopy->getOperand(0).getReg(); |
| 1486 | |
| 1487 | // Only replace if the copy register class is the same. |
| 1488 | // |
| 1489 | // TODO: If we have multiple copies to different register classes, we may want |
| 1490 | // to track multiple copies of the same source register. |
| 1491 | if (MRI->getRegClass(DstReg) != MRI->getRegClass(PrevDstReg)) |
| 1492 | return false; |
| 1493 | |
| 1494 | MRI->replaceRegWith(DstReg, PrevDstReg); |
| 1495 | |
| 1496 | // Lifetime of the previous copy has been extended. |
| 1497 | MRI->clearKillFlags(PrevDstReg); |
| 1498 | return true; |
| 1499 | } |
| 1500 | |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 1501 | bool PeepholeOptimizer::isNAPhysCopy(unsigned Reg) { |
| 1502 | return TargetRegisterInfo::isPhysicalRegister(Reg) && |
| 1503 | !MRI->isAllocatable(Reg); |
| 1504 | } |
| 1505 | |
| 1506 | bool PeepholeOptimizer::foldRedundantNAPhysCopy( |
| 1507 | MachineInstr *MI, DenseMap<unsigned, MachineInstr *> &NAPhysToVirtMIs) { |
| 1508 | assert(MI->isCopy() && "expected a COPY machine instruction"); |
| 1509 | |
| 1510 | if (DisableNAPhysCopyOpt) |
| 1511 | return false; |
| 1512 | |
| 1513 | unsigned DstReg = MI->getOperand(0).getReg(); |
| 1514 | unsigned SrcReg = MI->getOperand(1).getReg(); |
| 1515 | if (isNAPhysCopy(SrcReg) && TargetRegisterInfo::isVirtualRegister(DstReg)) { |
Francis Visoiu Mistrih | 9d7bb0c | 2017-11-28 17:15:09 +0000 | [diff] [blame] | 1516 | // %vreg = COPY %physreg |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 1517 | // Avoid using a datastructure which can track multiple live non-allocatable |
| 1518 | // phys->virt copies since LLVM doesn't seem to do this. |
| 1519 | NAPhysToVirtMIs.insert({SrcReg, MI}); |
| 1520 | return false; |
| 1521 | } |
| 1522 | |
| 1523 | if (!(TargetRegisterInfo::isVirtualRegister(SrcReg) && isNAPhysCopy(DstReg))) |
| 1524 | return false; |
| 1525 | |
Francis Visoiu Mistrih | 9d7bb0c | 2017-11-28 17:15:09 +0000 | [diff] [blame] | 1526 | // %physreg = COPY %vreg |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 1527 | auto PrevCopy = NAPhysToVirtMIs.find(DstReg); |
| 1528 | if (PrevCopy == NAPhysToVirtMIs.end()) { |
| 1529 | // We can't remove the copy: there was an intervening clobber of the |
| 1530 | // non-allocatable physical register after the copy to virtual. |
| 1531 | DEBUG(dbgs() << "NAPhysCopy: intervening clobber forbids erasing " << *MI |
| 1532 | << '\n'); |
| 1533 | return false; |
| 1534 | } |
| 1535 | |
| 1536 | unsigned PrevDstReg = PrevCopy->second->getOperand(0).getReg(); |
| 1537 | if (PrevDstReg == SrcReg) { |
| 1538 | // Remove the virt->phys copy: we saw the virtual register definition, and |
| 1539 | // the non-allocatable physical register's state hasn't changed since then. |
| 1540 | DEBUG(dbgs() << "NAPhysCopy: erasing " << *MI << '\n'); |
| 1541 | ++NumNAPhysCopies; |
| 1542 | return true; |
| 1543 | } |
| 1544 | |
| 1545 | // Potential missed optimization opportunity: we saw a different virtual |
| 1546 | // register get a copy of the non-allocatable physical register, and we only |
| 1547 | // track one such copy. Avoid getting confused by this new non-allocatable |
| 1548 | // physical register definition, and remove it from the tracked copies. |
| 1549 | DEBUG(dbgs() << "NAPhysCopy: missed opportunity " << *MI << '\n'); |
| 1550 | NAPhysToVirtMIs.erase(PrevCopy); |
| 1551 | return false; |
| 1552 | } |
| 1553 | |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1554 | /// \bried Returns true if \p MO is a virtual register operand. |
| 1555 | static bool isVirtualRegisterOperand(MachineOperand &MO) { |
| 1556 | if (!MO.isReg()) |
| 1557 | return false; |
| 1558 | return TargetRegisterInfo::isVirtualRegister(MO.getReg()); |
| 1559 | } |
| 1560 | |
| 1561 | bool PeepholeOptimizer::findTargetRecurrence( |
| 1562 | unsigned Reg, const SmallSet<unsigned, 2> &TargetRegs, |
| 1563 | RecurrenceCycle &RC) { |
| 1564 | // Recurrence found if Reg is in TargetRegs. |
| 1565 | if (TargetRegs.count(Reg)) |
| 1566 | return true; |
| 1567 | |
| 1568 | // TODO: Curerntly, we only allow the last instruction of the recurrence |
| 1569 | // cycle (the instruction that feeds the PHI instruction) to have more than |
| 1570 | // one uses to guarantee that commuting operands does not tie registers |
| 1571 | // with overlapping live range. Once we have actual live range info of |
| 1572 | // each register, this constraint can be relaxed. |
| 1573 | if (!MRI->hasOneNonDBGUse(Reg)) |
| 1574 | return false; |
| 1575 | |
| 1576 | // Give up if the reccurrence chain length is longer than the limit. |
| 1577 | if (RC.size() >= MaxRecurrenceChain) |
| 1578 | return false; |
| 1579 | |
| 1580 | MachineInstr &MI = *(MRI->use_instr_nodbg_begin(Reg)); |
| 1581 | unsigned Idx = MI.findRegisterUseOperandIdx(Reg); |
| 1582 | |
| 1583 | // Only interested in recurrences whose instructions have only one def, which |
| 1584 | // is a virtual register. |
| 1585 | if (MI.getDesc().getNumDefs() != 1) |
| 1586 | return false; |
| 1587 | |
| 1588 | MachineOperand &DefOp = MI.getOperand(0); |
| 1589 | if (!isVirtualRegisterOperand(DefOp)) |
| 1590 | return false; |
| 1591 | |
| 1592 | // Check if def operand of MI is tied to any use operand. We are only |
| 1593 | // interested in the case that all the instructions in the recurrence chain |
| 1594 | // have there def operand tied with one of the use operand. |
| 1595 | unsigned TiedUseIdx; |
| 1596 | if (!MI.isRegTiedToUseOperand(0, &TiedUseIdx)) |
| 1597 | return false; |
| 1598 | |
| 1599 | if (Idx == TiedUseIdx) { |
| 1600 | RC.push_back(RecurrenceInstr(&MI)); |
| 1601 | return findTargetRecurrence(DefOp.getReg(), TargetRegs, RC); |
| 1602 | } else { |
| 1603 | // If Idx is not TiedUseIdx, check if Idx is commutable with TiedUseIdx. |
| 1604 | unsigned CommIdx = TargetInstrInfo::CommuteAnyOperandIndex; |
| 1605 | if (TII->findCommutedOpIndices(MI, Idx, CommIdx) && CommIdx == TiedUseIdx) { |
| 1606 | RC.push_back(RecurrenceInstr(&MI, Idx, CommIdx)); |
| 1607 | return findTargetRecurrence(DefOp.getReg(), TargetRegs, RC); |
| 1608 | } |
| 1609 | } |
| 1610 | |
| 1611 | return false; |
| 1612 | } |
| 1613 | |
| 1614 | /// \brief Phi instructions will eventually be lowered to copy instructions. If |
| 1615 | /// phi is in a loop header, a recurrence may formulated around the source and |
| 1616 | /// destination of the phi. For such case commuting operands of the instructions |
| 1617 | /// in the recurrence may enable coalescing of the copy instruction generated |
| 1618 | /// from the phi. For example, if there is a recurrence of |
| 1619 | /// |
| 1620 | /// LoopHeader: |
Francis Visoiu Mistrih | 93ef145 | 2017-11-30 12:12:19 +0000 | [diff] [blame] | 1621 | /// %1 = phi(%0, %100) |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1622 | /// LoopLatch: |
Francis Visoiu Mistrih | 93ef145 | 2017-11-30 12:12:19 +0000 | [diff] [blame] | 1623 | /// %0<def, tied1> = ADD %2<def, tied0>, %1 |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1624 | /// |
Francis Visoiu Mistrih | 93ef145 | 2017-11-30 12:12:19 +0000 | [diff] [blame] | 1625 | /// , the fact that %0 and %2 are in the same tied operands set makes |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1626 | /// the coalescing of copy instruction generated from the phi in |
Francis Visoiu Mistrih | 93ef145 | 2017-11-30 12:12:19 +0000 | [diff] [blame] | 1627 | /// LoopHeader(i.e. %1 = COPY %0) impossible, because %1 and |
| 1628 | /// %2 have overlapping live range. This introduces additional move |
| 1629 | /// instruction to the final assembly. However, if we commute %2 and |
| 1630 | /// %1 of ADD instruction, the redundant move instruction can be |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1631 | /// avoided. |
| 1632 | bool PeepholeOptimizer::optimizeRecurrence(MachineInstr &PHI) { |
| 1633 | SmallSet<unsigned, 2> TargetRegs; |
| 1634 | for (unsigned Idx = 1; Idx < PHI.getNumOperands(); Idx += 2) { |
| 1635 | MachineOperand &MO = PHI.getOperand(Idx); |
| 1636 | assert(isVirtualRegisterOperand(MO) && "Invalid PHI instruction"); |
| 1637 | TargetRegs.insert(MO.getReg()); |
| 1638 | } |
| 1639 | |
| 1640 | bool Changed = false; |
| 1641 | RecurrenceCycle RC; |
| 1642 | if (findTargetRecurrence(PHI.getOperand(0).getReg(), TargetRegs, RC)) { |
| 1643 | // Commutes operands of instructions in RC if necessary so that the copy to |
| 1644 | // be generated from PHI can be coalesced. |
| 1645 | DEBUG(dbgs() << "Optimize recurrence chain from " << PHI); |
| 1646 | for (auto &RI : RC) { |
| 1647 | DEBUG(dbgs() << "\tInst: " << *(RI.getMI())); |
| 1648 | auto CP = RI.getCommutePair(); |
| 1649 | if (CP) { |
| 1650 | Changed = true; |
| 1651 | TII->commuteInstruction(*(RI.getMI()), false, (*CP).first, |
| 1652 | (*CP).second); |
| 1653 | DEBUG(dbgs() << "\t\tCommuted: " << *(RI.getMI())); |
| 1654 | } |
| 1655 | } |
| 1656 | } |
| 1657 | |
| 1658 | return Changed; |
| 1659 | } |
| 1660 | |
Eric Christopher | 2181fb2 | 2014-10-15 21:06:25 +0000 | [diff] [blame] | 1661 | bool PeepholeOptimizer::runOnMachineFunction(MachineFunction &MF) { |
Matthias Braun | f1caa28 | 2017-12-15 22:22:58 +0000 | [diff] [blame] | 1662 | if (skipFunction(MF.getFunction())) |
Paul Robinson | 7c99ec5 | 2014-03-31 17:43:35 +0000 | [diff] [blame] | 1663 | return false; |
| 1664 | |
Craig Topper | 588ceec | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 1665 | DEBUG(dbgs() << "********** PEEPHOLE OPTIMIZER **********\n"); |
Eric Christopher | 2181fb2 | 2014-10-15 21:06:25 +0000 | [diff] [blame] | 1666 | DEBUG(dbgs() << "********** Function: " << MF.getName() << '\n'); |
Craig Topper | 588ceec | 2012-12-17 03:56:00 +0000 | [diff] [blame] | 1667 | |
Evan Cheng | 2ce016c | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 1668 | if (DisablePeephole) |
| 1669 | return false; |
Andrew Trick | 9e76199 | 2012-02-08 21:22:43 +0000 | [diff] [blame] | 1670 | |
Eric Christopher | 2181fb2 | 2014-10-15 21:06:25 +0000 | [diff] [blame] | 1671 | TII = MF.getSubtarget().getInstrInfo(); |
| 1672 | TRI = MF.getSubtarget().getRegisterInfo(); |
| 1673 | MRI = &MF.getRegInfo(); |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 1674 | DT = Aggressive ? &getAnalysis<MachineDominatorTree>() : nullptr; |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1675 | MLI = &getAnalysis<MachineLoopInfo>(); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1676 | |
| 1677 | bool Changed = false; |
| 1678 | |
Sanjay Patel | faeee6f | 2015-12-29 18:30:09 +0000 | [diff] [blame] | 1679 | for (MachineBasicBlock &MBB : MF) { |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1680 | bool SeenMoveImm = false; |
Mehdi Amini | 22e5974 | 2015-01-13 07:07:13 +0000 | [diff] [blame] | 1681 | |
| 1682 | // During this forward scan, at some point it needs to answer the question |
| 1683 | // "given a pointer to an MI in the current BB, is it located before or |
| 1684 | // after the current instruction". |
| 1685 | // To perform this, the following set keeps track of the MIs already seen |
| 1686 | // during the scan, if a MI is not in the set, it is assumed to be located |
| 1687 | // after. Newly created MIs have to be inserted in the set as well. |
Hans Wennborg | 941a570 | 2014-08-11 02:50:43 +0000 | [diff] [blame] | 1688 | SmallPtrSet<MachineInstr*, 16> LocalMIs; |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1689 | SmallSet<unsigned, 4> ImmDefRegs; |
| 1690 | DenseMap<unsigned, MachineInstr*> ImmDefMIs; |
| 1691 | SmallSet<unsigned, 16> FoldAsLoadDefCandidates; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1692 | |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 1693 | // Track when a non-allocatable physical register is copied to a virtual |
| 1694 | // register so that useless moves can be removed. |
| 1695 | // |
Francis Visoiu Mistrih | 9d7bb0c | 2017-11-28 17:15:09 +0000 | [diff] [blame] | 1696 | // %physreg is the map index; MI is the last valid `%vreg = COPY %physreg` |
| 1697 | // without any intervening re-definition of %physreg. |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 1698 | DenseMap<unsigned, MachineInstr *> NAPhysToVirtMIs; |
| 1699 | |
Matt Arsenault | 10aa807 | 2015-09-25 20:22:12 +0000 | [diff] [blame] | 1700 | // Set of virtual registers that are copied from. |
| 1701 | SmallSet<unsigned, 4> CopySrcRegs; |
| 1702 | DenseMap<unsigned, MachineInstr *> CopySrcMIs; |
| 1703 | |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1704 | bool IsLoopHeader = MLI->isLoopHeader(&MBB); |
| 1705 | |
Sanjay Patel | faeee6f | 2015-12-29 18:30:09 +0000 | [diff] [blame] | 1706 | for (MachineBasicBlock::iterator MII = MBB.begin(), MIE = MBB.end(); |
| 1707 | MII != MIE; ) { |
Evan Cheng | 9bf3f8e | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 1708 | MachineInstr *MI = &*MII; |
Jakob Stoklund Olesen | 714f595 | 2012-08-17 14:38:59 +0000 | [diff] [blame] | 1709 | // We may be erasing MI below, increment MII now. |
| 1710 | ++MII; |
Evan Cheng | 2ce016c | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 1711 | LocalMIs.insert(MI); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1712 | |
Ekaterina Romanova | 8d62008 | 2014-03-13 18:47:12 +0000 | [diff] [blame] | 1713 | // Skip debug values. They should not affect this peephole optimization. |
| 1714 | if (MI->isDebugValue()) |
| 1715 | continue; |
| 1716 | |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1717 | if (MI->isPosition()) |
Evan Cheng | 2ce016c | 2010-11-15 21:20:45 +0000 | [diff] [blame] | 1718 | continue; |
| 1719 | |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1720 | if (IsLoopHeader && MI->isPHI()) { |
| 1721 | if (optimizeRecurrence(*MI)) { |
| 1722 | Changed = true; |
| 1723 | continue; |
| 1724 | } |
| 1725 | } |
| 1726 | |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 1727 | if (!MI->isCopy()) { |
| 1728 | for (const auto &Op : MI->operands()) { |
| 1729 | // Visit all operands: definitions can be implicit or explicit. |
| 1730 | if (Op.isReg()) { |
| 1731 | unsigned Reg = Op.getReg(); |
| 1732 | if (Op.isDef() && isNAPhysCopy(Reg)) { |
| 1733 | const auto &Def = NAPhysToVirtMIs.find(Reg); |
| 1734 | if (Def != NAPhysToVirtMIs.end()) { |
| 1735 | // A new definition of the non-allocatable physical register |
| 1736 | // invalidates previous copies. |
| 1737 | DEBUG(dbgs() << "NAPhysCopy: invalidating because of " << *MI |
| 1738 | << '\n'); |
| 1739 | NAPhysToVirtMIs.erase(Def); |
| 1740 | } |
| 1741 | } |
| 1742 | } else if (Op.isRegMask()) { |
| 1743 | const uint32_t *RegMask = Op.getRegMask(); |
| 1744 | for (auto &RegMI : NAPhysToVirtMIs) { |
| 1745 | unsigned Def = RegMI.first; |
| 1746 | if (MachineOperand::clobbersPhysReg(RegMask, Def)) { |
| 1747 | DEBUG(dbgs() << "NAPhysCopy: invalidating because of " << *MI |
| 1748 | << '\n'); |
| 1749 | NAPhysToVirtMIs.erase(Def); |
| 1750 | } |
| 1751 | } |
| 1752 | } |
| 1753 | } |
| 1754 | } |
| 1755 | |
| 1756 | if (MI->isImplicitDef() || MI->isKill()) |
| 1757 | continue; |
| 1758 | |
| 1759 | if (MI->isInlineAsm() || MI->hasUnmodeledSideEffects()) { |
| 1760 | // Blow away all non-allocatable physical registers knowledge since we |
| 1761 | // don't know what's correct anymore. |
| 1762 | // |
| 1763 | // FIXME: handle explicit asm clobbers. |
| 1764 | DEBUG(dbgs() << "NAPhysCopy: blowing away all info due to " << *MI |
| 1765 | << '\n'); |
| 1766 | NAPhysToVirtMIs.clear(); |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 1767 | } |
| 1768 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1769 | if ((isUncoalescableCopy(*MI) && |
| 1770 | optimizeUncoalescableCopy(MI, LocalMIs)) || |
Sanjay Patel | faeee6f | 2015-12-29 18:30:09 +0000 | [diff] [blame] | 1771 | (MI->isCompare() && optimizeCmpInstr(MI, &MBB)) || |
Mehdi Amini | 22e5974 | 2015-01-13 07:07:13 +0000 | [diff] [blame] | 1772 | (MI->isSelect() && optimizeSelect(MI, LocalMIs))) { |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 1773 | // MI is deleted. |
| 1774 | LocalMIs.erase(MI); |
| 1775 | Changed = true; |
Jakob Stoklund Olesen | 2382d32 | 2012-08-16 23:11:47 +0000 | [diff] [blame] | 1776 | continue; |
Evan Cheng | 9bf3f8e | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 1777 | } |
| 1778 | |
Gerolf Hoflehner | a4c96d0 | 2014-10-14 23:07:53 +0000 | [diff] [blame] | 1779 | if (MI->isConditionalBranch() && optimizeCondBranch(MI)) { |
| 1780 | Changed = true; |
| 1781 | continue; |
| 1782 | } |
| 1783 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1784 | if (isCoalescableCopy(*MI) && optimizeCoalescableCopy(MI)) { |
| 1785 | // MI is just rewritten. |
| 1786 | Changed = true; |
| 1787 | continue; |
| 1788 | } |
| 1789 | |
JF Bastien | 1ac6994 | 2015-12-03 23:43:56 +0000 | [diff] [blame] | 1790 | if (MI->isCopy() && |
| 1791 | (foldRedundantCopy(MI, CopySrcRegs, CopySrcMIs) || |
| 1792 | foldRedundantNAPhysCopy(MI, NAPhysToVirtMIs))) { |
Matt Arsenault | 10aa807 | 2015-09-25 20:22:12 +0000 | [diff] [blame] | 1793 | LocalMIs.erase(MI); |
| 1794 | MI->eraseFromParent(); |
| 1795 | Changed = true; |
| 1796 | continue; |
| 1797 | } |
| 1798 | |
Evan Cheng | 9bf3f8e | 2011-02-14 21:50:37 +0000 | [diff] [blame] | 1799 | if (isMoveImmediate(MI, ImmDefRegs, ImmDefMIs)) { |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1800 | SeenMoveImm = true; |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1801 | } else { |
Sanjay Patel | faeee6f | 2015-12-29 18:30:09 +0000 | [diff] [blame] | 1802 | Changed |= optimizeExtInstr(MI, &MBB, LocalMIs); |
Rafael Espindola | 048405f | 2012-10-15 18:21:07 +0000 | [diff] [blame] | 1803 | // optimizeExtInstr might have created new instructions after MI |
| 1804 | // and before the already incremented MII. Adjust MII so that the |
| 1805 | // next iteration sees the new instructions. |
| 1806 | MII = MI; |
| 1807 | ++MII; |
Evan Cheng | 7f8ab6e | 2010-11-17 20:13:28 +0000 | [diff] [blame] | 1808 | if (SeenMoveImm) |
Sanjay Patel | faeee6f | 2015-12-29 18:30:09 +0000 | [diff] [blame] | 1809 | Changed |= foldImmediate(MI, &MBB, ImmDefRegs, ImmDefMIs); |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1810 | } |
Evan Cheng | 98196b4 | 2011-02-15 05:00:24 +0000 | [diff] [blame] | 1811 | |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 1812 | // Check whether MI is a load candidate for folding into a later |
| 1813 | // instruction. If MI is not a candidate, check whether we can fold an |
| 1814 | // earlier load into MI. |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1815 | if (!isLoadFoldable(MI, FoldAsLoadDefCandidates) && |
| 1816 | !FoldAsLoadDefCandidates.empty()) { |
Philip Reames | 1f1bbac | 2016-12-13 01:38:41 +0000 | [diff] [blame] | 1817 | |
| 1818 | // We visit each operand even after successfully folding a previous |
| 1819 | // one. This allows us to fold multiple loads into a single |
| 1820 | // instruction. We do assume that optimizeLoadInstr doesn't insert |
| 1821 | // foldable uses earlier in the argument list. Since we don't restart |
| 1822 | // iteration, we'd miss such cases. |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1823 | const MCInstrDesc &MIDesc = MI->getDesc(); |
Philip Reames | 1f1bbac | 2016-12-13 01:38:41 +0000 | [diff] [blame] | 1824 | for (unsigned i = MIDesc.getNumDefs(); i != MI->getNumOperands(); |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1825 | ++i) { |
| 1826 | const MachineOperand &MOp = MI->getOperand(i); |
| 1827 | if (!MOp.isReg()) |
| 1828 | continue; |
Lang Hames | 3c0dc2a | 2014-04-03 05:03:20 +0000 | [diff] [blame] | 1829 | unsigned FoldAsLoadDefReg = MOp.getReg(); |
| 1830 | if (FoldAsLoadDefCandidates.count(FoldAsLoadDefReg)) { |
| 1831 | // We need to fold load after optimizeCmpInstr, since |
| 1832 | // optimizeCmpInstr can enable folding by converting SUB to CMP. |
| 1833 | // Save FoldAsLoadDefReg because optimizeLoadInstr() resets it and |
| 1834 | // we need it for markUsesInDebugValueAsUndef(). |
| 1835 | unsigned FoldedReg = FoldAsLoadDefReg; |
Craig Topper | c0196b1 | 2014-04-14 00:51:57 +0000 | [diff] [blame] | 1836 | MachineInstr *DefMI = nullptr; |
Duncan P. N. Exon Smith | 9cfc75c | 2016-06-30 00:01:54 +0000 | [diff] [blame] | 1837 | if (MachineInstr *FoldMI = |
| 1838 | TII->optimizeLoadInstr(*MI, MRI, FoldAsLoadDefReg, DefMI)) { |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1839 | // Update LocalMIs since we replaced MI with FoldMI and deleted |
| 1840 | // DefMI. |
| 1841 | DEBUG(dbgs() << "Replacing: " << *MI); |
| 1842 | DEBUG(dbgs() << " With: " << *FoldMI); |
| 1843 | LocalMIs.erase(MI); |
| 1844 | LocalMIs.erase(DefMI); |
| 1845 | LocalMIs.insert(FoldMI); |
| 1846 | MI->eraseFromParent(); |
| 1847 | DefMI->eraseFromParent(); |
Lang Hames | 3c0dc2a | 2014-04-03 05:03:20 +0000 | [diff] [blame] | 1848 | MRI->markUsesInDebugValueAsUndef(FoldedReg); |
| 1849 | FoldAsLoadDefCandidates.erase(FoldedReg); |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1850 | ++NumLoadFold; |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1851 | |
Philip Reames | 1f1bbac | 2016-12-13 01:38:41 +0000 | [diff] [blame] | 1852 | // MI is replaced with FoldMI so we can continue trying to fold |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1853 | Changed = true; |
Philip Reames | 1f1bbac | 2016-12-13 01:38:41 +0000 | [diff] [blame] | 1854 | MI = FoldMI; |
Lang Hames | 5dc14bd | 2014-04-02 22:59:58 +0000 | [diff] [blame] | 1855 | } |
| 1856 | } |
Manman Ren | 5759d01 | 2012-08-02 00:56:42 +0000 | [diff] [blame] | 1857 | } |
| 1858 | } |
Taewook Oh | 0e35ea3 | 2017-06-29 23:11:24 +0000 | [diff] [blame] | 1859 | |
Philip Reames | 1f1bbac | 2016-12-13 01:38:41 +0000 | [diff] [blame] | 1860 | // If we run into an instruction we can't fold across, discard |
| 1861 | // the load candidates. Note: We might be able to fold *into* this |
| 1862 | // instruction, so this needs to be after the folding logic. |
| 1863 | if (MI->isLoadFoldBarrier()) { |
| 1864 | DEBUG(dbgs() << "Encountered load fold barrier on " << *MI << "\n"); |
| 1865 | FoldAsLoadDefCandidates.clear(); |
| 1866 | } |
Bill Wendling | ca67835 | 2010-08-09 23:59:04 +0000 | [diff] [blame] | 1867 | } |
| 1868 | } |
| 1869 | |
| 1870 | return Changed; |
| 1871 | } |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1872 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1873 | ValueTrackerResult ValueTracker::getNextSourceFromCopy() { |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1874 | assert(Def->isCopy() && "Invalid definition"); |
| 1875 | // Copy instruction are supposed to be: Def = Src. |
| 1876 | // If someone breaks this assumption, bad things will happen everywhere. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1877 | assert(Def->getNumOperands() == 2 && "Invalid number of operands"); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1878 | |
| 1879 | if (Def->getOperand(DefIdx).getSubReg() != DefSubReg) |
| 1880 | // If we look for a different subreg, it means we want a subreg of src. |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1881 | // Bails as we do not support composing subregs yet. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1882 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1883 | // Otherwise, we want the whole source. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1884 | const MachineOperand &Src = Def->getOperand(1); |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1885 | return ValueTrackerResult(Src.getReg(), Src.getSubReg()); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1886 | } |
| 1887 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1888 | ValueTrackerResult ValueTracker::getNextSourceFromBitcast() { |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1889 | assert(Def->isBitcast() && "Invalid definition"); |
| 1890 | |
| 1891 | // Bail if there are effects that a plain copy will not expose. |
| 1892 | if (Def->hasUnmodeledSideEffects()) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1893 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1894 | |
| 1895 | // Bitcasts with more than one def are not supported. |
| 1896 | if (Def->getDesc().getNumDefs() != 1) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1897 | return ValueTrackerResult(); |
Matthias Braun | ba7d95d | 2017-01-09 21:38:17 +0000 | [diff] [blame] | 1898 | const MachineOperand DefOp = Def->getOperand(DefIdx); |
| 1899 | if (DefOp.getSubReg() != DefSubReg) |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1900 | // If we look for a different subreg, it means we want a subreg of the src. |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1901 | // Bails as we do not support composing subregs yet. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1902 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1903 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1904 | unsigned SrcIdx = Def->getNumOperands(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1905 | for (unsigned OpIdx = DefIdx + 1, EndOpIdx = SrcIdx; OpIdx != EndOpIdx; |
| 1906 | ++OpIdx) { |
| 1907 | const MachineOperand &MO = Def->getOperand(OpIdx); |
| 1908 | if (!MO.isReg() || !MO.getReg()) |
| 1909 | continue; |
Dan Gohman | dab313e | 2015-12-10 00:37:51 +0000 | [diff] [blame] | 1910 | // Ignore dead implicit defs. |
| 1911 | if (MO.isImplicit() && MO.isDead()) |
| 1912 | continue; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1913 | assert(!MO.isDef() && "We should have skipped all the definitions by now"); |
| 1914 | if (SrcIdx != EndOpIdx) |
| 1915 | // Multiple sources? |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1916 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1917 | SrcIdx = OpIdx; |
| 1918 | } |
Matthias Braun | ba7d95d | 2017-01-09 21:38:17 +0000 | [diff] [blame] | 1919 | |
| 1920 | // Stop when any user of the bitcast is a SUBREG_TO_REG, replacing with a COPY |
| 1921 | // will break the assumed guarantees for the upper bits. |
| 1922 | for (const MachineInstr &UseMI : MRI.use_nodbg_instructions(DefOp.getReg())) { |
| 1923 | if (UseMI.isSubregToReg()) |
| 1924 | return ValueTrackerResult(); |
| 1925 | } |
| 1926 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1927 | const MachineOperand &Src = Def->getOperand(SrcIdx); |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1928 | return ValueTrackerResult(Src.getReg(), Src.getSubReg()); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1929 | } |
| 1930 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1931 | ValueTrackerResult ValueTracker::getNextSourceFromRegSequence() { |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1932 | assert((Def->isRegSequence() || Def->isRegSequenceLike()) && |
| 1933 | "Invalid definition"); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1934 | |
| 1935 | if (Def->getOperand(DefIdx).getSubReg()) |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1936 | // If we are composing subregs, bail out. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1937 | // The case we are checking is Def.<subreg> = REG_SEQUENCE. |
| 1938 | // This should almost never happen as the SSA property is tracked at |
| 1939 | // the register level (as opposed to the subreg level). |
| 1940 | // I.e., |
| 1941 | // Def.sub0 = |
| 1942 | // Def.sub1 = |
| 1943 | // is a valid SSA representation for Def.sub0 and Def.sub1, but not for |
| 1944 | // Def. Thus, it must not be generated. |
Quentin Colombet | 6d590d5 | 2014-07-01 16:23:44 +0000 | [diff] [blame] | 1945 | // However, some code could theoretically generates a single |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1946 | // Def.sub0 (i.e, not defining the other subregs) and we would |
| 1947 | // have this case. |
| 1948 | // If we can ascertain (or force) that this never happens, we could |
| 1949 | // turn that into an assertion. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1950 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1951 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1952 | if (!TII) |
| 1953 | // We could handle the REG_SEQUENCE here, but we do not want to |
| 1954 | // duplicate the code from the generic TII. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1955 | return ValueTrackerResult(); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1956 | |
| 1957 | SmallVector<TargetInstrInfo::RegSubRegPairAndIdx, 8> RegSeqInputRegs; |
| 1958 | if (!TII->getRegSequenceInputs(*Def, DefIdx, RegSeqInputRegs)) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1959 | return ValueTrackerResult(); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1960 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1961 | // We are looking at: |
| 1962 | // Def = REG_SEQUENCE v0, sub0, v1, sub1, ... |
| 1963 | // Check if one of the operand defines the subreg we are interested in. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1964 | for (auto &RegSeqInput : RegSeqInputRegs) { |
| 1965 | if (RegSeqInput.SubIdx == DefSubReg) { |
| 1966 | if (RegSeqInput.SubReg) |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1967 | // Bail if we have to compose sub registers. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1968 | return ValueTrackerResult(); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1969 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1970 | return ValueTrackerResult(RegSeqInput.Reg, RegSeqInput.SubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1971 | } |
| 1972 | } |
| 1973 | |
| 1974 | // If the subreg we are tracking is super-defined by another subreg, |
| 1975 | // we could follow this value. However, this would require to compose |
| 1976 | // the subreg and we do not do that for now. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1977 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1978 | } |
| 1979 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1980 | ValueTrackerResult ValueTracker::getNextSourceFromInsertSubreg() { |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 1981 | assert((Def->isInsertSubreg() || Def->isInsertSubregLike()) && |
| 1982 | "Invalid definition"); |
| 1983 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1984 | if (Def->getOperand(DefIdx).getSubReg()) |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 1985 | // If we are composing subreg, bail out. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1986 | // Same remark as getNextSourceFromRegSequence. |
| 1987 | // I.e., this may be turned into an assert. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1988 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 1989 | |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 1990 | if (!TII) |
| 1991 | // We could handle the REG_SEQUENCE here, but we do not want to |
| 1992 | // duplicate the code from the generic TII. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1993 | return ValueTrackerResult(); |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 1994 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1995 | TargetInstrInfo::RegSubRegPair BaseReg; |
| 1996 | TargetInstrInfo::RegSubRegPairAndIdx InsertedReg; |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 1997 | if (!TII->getInsertSubregInputs(*Def, DefIdx, BaseReg, InsertedReg)) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 1998 | return ValueTrackerResult(); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 1999 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2000 | // We are looking at: |
| 2001 | // Def = INSERT_SUBREG v0, v1, sub1 |
| 2002 | // There are two cases: |
| 2003 | // 1. DefSubReg == sub1, get v1. |
| 2004 | // 2. DefSubReg != sub1, the value may be available through v0. |
| 2005 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 2006 | // #1 Check if the inserted register matches the required sub index. |
| 2007 | if (InsertedReg.SubIdx == DefSubReg) { |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2008 | return ValueTrackerResult(InsertedReg.Reg, InsertedReg.SubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2009 | } |
| 2010 | // #2 Otherwise, if the sub register we are looking for is not partial |
| 2011 | // defined by the inserted element, we can look through the main |
| 2012 | // register (v0). |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2013 | const MachineOperand &MODef = Def->getOperand(DefIdx); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2014 | // If the result register (Def) and the base register (v0) do not |
| 2015 | // have the same register class or if we have to compose |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 2016 | // subregisters, bail out. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 2017 | if (MRI.getRegClass(MODef.getReg()) != MRI.getRegClass(BaseReg.Reg) || |
| 2018 | BaseReg.SubReg) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2019 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2020 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 2021 | // Get the TRI and check if the inserted sub-register overlaps with the |
| 2022 | // sub-register we are tracking. |
| 2023 | const TargetRegisterInfo *TRI = MRI.getTargetRegisterInfo(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2024 | if (!TRI || |
Krzysztof Parzyszek | 91b5cf8 | 2016-12-15 14:36:06 +0000 | [diff] [blame] | 2025 | !(TRI->getSubRegIndexLaneMask(DefSubReg) & |
| 2026 | TRI->getSubRegIndexLaneMask(InsertedReg.SubIdx)).none()) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2027 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2028 | // At this point, the value is available in v0 via the same subreg |
| 2029 | // we used for Def. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2030 | return ValueTrackerResult(BaseReg.Reg, DefSubReg); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2031 | } |
| 2032 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2033 | ValueTrackerResult ValueTracker::getNextSourceFromExtractSubreg() { |
Quentin Colombet | 67639df | 2014-08-20 23:13:02 +0000 | [diff] [blame] | 2034 | assert((Def->isExtractSubreg() || |
| 2035 | Def->isExtractSubregLike()) && "Invalid definition"); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2036 | // We are looking at: |
| 2037 | // Def = EXTRACT_SUBREG v0, sub0 |
| 2038 | |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 2039 | // Bail if we have to compose sub registers. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2040 | // Indeed, if DefSubReg != 0, we would have to compose it with sub0. |
| 2041 | if (DefSubReg) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2042 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2043 | |
Quentin Colombet | 67639df | 2014-08-20 23:13:02 +0000 | [diff] [blame] | 2044 | if (!TII) |
| 2045 | // We could handle the EXTRACT_SUBREG here, but we do not want to |
| 2046 | // duplicate the code from the generic TII. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2047 | return ValueTrackerResult(); |
Quentin Colombet | 67639df | 2014-08-20 23:13:02 +0000 | [diff] [blame] | 2048 | |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 2049 | TargetInstrInfo::RegSubRegPairAndIdx ExtractSubregInputReg; |
Quentin Colombet | 67639df | 2014-08-20 23:13:02 +0000 | [diff] [blame] | 2050 | if (!TII->getExtractSubregInputs(*Def, DefIdx, ExtractSubregInputReg)) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2051 | return ValueTrackerResult(); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 2052 | |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 2053 | // Bail if we have to compose sub registers. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2054 | // Likewise, if v0.subreg != 0, we would have to compose v0.subreg with sub0. |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 2055 | if (ExtractSubregInputReg.SubReg) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2056 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2057 | // Otherwise, the value is available in the v0.sub0. |
Sanjay Patel | b120ae9 | 2015-12-29 19:34:53 +0000 | [diff] [blame] | 2058 | return ValueTrackerResult(ExtractSubregInputReg.Reg, |
| 2059 | ExtractSubregInputReg.SubIdx); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2060 | } |
| 2061 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2062 | ValueTrackerResult ValueTracker::getNextSourceFromSubregToReg() { |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2063 | assert(Def->isSubregToReg() && "Invalid definition"); |
| 2064 | // We are looking at: |
| 2065 | // Def = SUBREG_TO_REG Imm, v0, sub0 |
| 2066 | |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 2067 | // Bail if we have to compose sub registers. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2068 | // If DefSubReg != sub0, we would have to check that all the bits |
| 2069 | // we track are included in sub0 and if yes, we would have to |
| 2070 | // determine the right subreg in v0. |
| 2071 | if (DefSubReg != Def->getOperand(3).getImm()) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2072 | return ValueTrackerResult(); |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 2073 | // Bail if we have to compose sub registers. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2074 | // Likewise, if v0.subreg != 0, we would have to compose it with sub0. |
| 2075 | if (Def->getOperand(2).getSubReg()) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2076 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2077 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2078 | return ValueTrackerResult(Def->getOperand(2).getReg(), |
| 2079 | Def->getOperand(3).getImm()); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2080 | } |
| 2081 | |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 2082 | /// \brief Explore each PHI incoming operand and return its sources |
| 2083 | ValueTrackerResult ValueTracker::getNextSourceFromPHI() { |
| 2084 | assert(Def->isPHI() && "Invalid definition"); |
| 2085 | ValueTrackerResult Res; |
| 2086 | |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 2087 | // If we look for a different subreg, bail as we do not support composing |
| 2088 | // subregs yet. |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 2089 | if (Def->getOperand(0).getSubReg() != DefSubReg) |
| 2090 | return ValueTrackerResult(); |
| 2091 | |
| 2092 | // Return all register sources for PHI instructions. |
| 2093 | for (unsigned i = 1, e = Def->getNumOperands(); i < e; i += 2) { |
| 2094 | auto &MO = Def->getOperand(i); |
| 2095 | assert(MO.isReg() && "Invalid PHI instruction"); |
| 2096 | Res.addSource(MO.getReg(), MO.getSubReg()); |
| 2097 | } |
| 2098 | |
| 2099 | return Res; |
| 2100 | } |
| 2101 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2102 | ValueTrackerResult ValueTracker::getNextSourceImpl() { |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2103 | assert(Def && "This method needs a valid definition"); |
| 2104 | |
Eric Liu | e617ade | 2016-07-04 12:10:08 +0000 | [diff] [blame] | 2105 | assert(((Def->getOperand(DefIdx).isDef() && |
| 2106 | (DefIdx < Def->getDesc().getNumDefs() || |
| 2107 | Def->getDesc().isVariadic())) || |
| 2108 | Def->getOperand(DefIdx).isImplicit()) && |
| 2109 | "Invalid DefIdx"); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2110 | if (Def->isCopy()) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2111 | return getNextSourceFromCopy(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2112 | if (Def->isBitcast()) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2113 | return getNextSourceFromBitcast(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2114 | // All the remaining cases involve "complex" instructions. |
Matt Arsenault | 3099156 | 2015-09-09 00:38:33 +0000 | [diff] [blame] | 2115 | // Bail if we did not ask for the advanced tracking. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2116 | if (!UseAdvancedTracking) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2117 | return ValueTrackerResult(); |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 2118 | if (Def->isRegSequence() || Def->isRegSequenceLike()) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2119 | return getNextSourceFromRegSequence(); |
Quentin Colombet | 6896230 | 2014-08-21 00:19:16 +0000 | [diff] [blame] | 2120 | if (Def->isInsertSubreg() || Def->isInsertSubregLike()) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2121 | return getNextSourceFromInsertSubreg(); |
Quentin Colombet | 67639df | 2014-08-20 23:13:02 +0000 | [diff] [blame] | 2122 | if (Def->isExtractSubreg() || Def->isExtractSubregLike()) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2123 | return getNextSourceFromExtractSubreg(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2124 | if (Def->isSubregToReg()) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2125 | return getNextSourceFromSubregToReg(); |
Bruno Cardoso Lopes | 27fd069 | 2015-08-19 18:53:36 +0000 | [diff] [blame] | 2126 | if (Def->isPHI()) |
| 2127 | return getNextSourceFromPHI(); |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2128 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2129 | } |
| 2130 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2131 | ValueTrackerResult ValueTracker::getNextSource() { |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2132 | // If we reach a point where we cannot move up in the use-def chain, |
| 2133 | // there is nothing we can get. |
| 2134 | if (!Def) |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2135 | return ValueTrackerResult(); |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2136 | |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2137 | ValueTrackerResult Res = getNextSourceImpl(); |
| 2138 | if (Res.isValid()) { |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2139 | // Update definition, definition index, and subregister for the |
| 2140 | // next call of getNextSource. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2141 | // Update the current register. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2142 | bool OneRegSrc = Res.getNumSources() == 1; |
| 2143 | if (OneRegSrc) |
| 2144 | Reg = Res.getSrcReg(0); |
| 2145 | // Update the result before moving up in the use-def chain |
| 2146 | // with the instruction containing the last found sources. |
| 2147 | Res.setInst(Def); |
| 2148 | |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2149 | // If we can still move up in the use-def chain, move to the next |
Benjamin Kramer | df005cb | 2015-08-08 18:27:36 +0000 | [diff] [blame] | 2150 | // definition. |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2151 | if (!TargetRegisterInfo::isPhysicalRegister(Reg) && OneRegSrc) { |
Quentin Colombet | 03e43f8 | 2014-08-20 17:41:48 +0000 | [diff] [blame] | 2152 | Def = MRI.getVRegDef(Reg); |
| 2153 | DefIdx = MRI.def_begin(Reg).getOperandNo(); |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2154 | DefSubReg = Res.getSrcSubReg(0); |
| 2155 | return Res; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2156 | } |
| 2157 | } |
| 2158 | // If we end up here, this means we will not be able to find another source |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2159 | // for the next iteration. Make sure any new call to getNextSource bails out |
| 2160 | // early by cutting the use-def chain. |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2161 | Def = nullptr; |
Bruno Cardoso Lopes | f16ec12 | 2015-07-22 21:30:16 +0000 | [diff] [blame] | 2162 | return Res; |
Quentin Colombet | 1111e6f | 2014-07-01 14:33:36 +0000 | [diff] [blame] | 2163 | } |