Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 1 | //===-- SIFoldOperands.cpp - Fold operands --- ----------------------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | /// \file |
| 9 | //===----------------------------------------------------------------------===// |
| 10 | // |
| 11 | |
| 12 | #include "AMDGPU.h" |
| 13 | #include "AMDGPUSubtarget.h" |
| 14 | #include "SIInstrInfo.h" |
| 15 | #include "llvm/CodeGen/LiveIntervalAnalysis.h" |
| 16 | #include "llvm/CodeGen/MachineDominators.h" |
| 17 | #include "llvm/CodeGen/MachineFunctionPass.h" |
| 18 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
| 19 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
| 20 | #include "llvm/IR/LLVMContext.h" |
| 21 | #include "llvm/IR/Function.h" |
| 22 | #include "llvm/Support/Debug.h" |
| 23 | #include "llvm/Target/TargetMachine.h" |
| 24 | |
| 25 | #define DEBUG_TYPE "si-fold-operands" |
| 26 | using namespace llvm; |
| 27 | |
| 28 | namespace { |
| 29 | |
| 30 | class SIFoldOperands : public MachineFunctionPass { |
| 31 | public: |
| 32 | static char ID; |
| 33 | |
| 34 | public: |
| 35 | SIFoldOperands() : MachineFunctionPass(ID) { |
| 36 | initializeSIFoldOperandsPass(*PassRegistry::getPassRegistry()); |
| 37 | } |
| 38 | |
| 39 | bool runOnMachineFunction(MachineFunction &MF) override; |
| 40 | |
| 41 | const char *getPassName() const override { |
| 42 | return "SI Fold Operands"; |
| 43 | } |
| 44 | |
| 45 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
| 46 | AU.addRequired<MachineDominatorTree>(); |
| 47 | AU.setPreservesCFG(); |
| 48 | MachineFunctionPass::getAnalysisUsage(AU); |
| 49 | } |
| 50 | }; |
| 51 | |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 52 | struct FoldCandidate { |
| 53 | MachineInstr *UseMI; |
| 54 | unsigned UseOpNo; |
| 55 | MachineOperand *OpToFold; |
| 56 | uint64_t ImmToFold; |
| 57 | |
| 58 | FoldCandidate(MachineInstr *MI, unsigned OpNo, MachineOperand *FoldOp) : |
Tom Stellard | 0599297 | 2015-01-07 22:44:19 +0000 | [diff] [blame] | 59 | UseMI(MI), UseOpNo(OpNo) { |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 60 | |
Tom Stellard | 0599297 | 2015-01-07 22:44:19 +0000 | [diff] [blame] | 61 | if (FoldOp->isImm()) { |
| 62 | OpToFold = nullptr; |
| 63 | ImmToFold = FoldOp->getImm(); |
| 64 | } else { |
| 65 | assert(FoldOp->isReg()); |
| 66 | OpToFold = FoldOp; |
| 67 | } |
| 68 | } |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 69 | |
| 70 | bool isImm() const { |
| 71 | return !OpToFold; |
| 72 | } |
| 73 | }; |
| 74 | |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 75 | } // End anonymous namespace. |
| 76 | |
| 77 | INITIALIZE_PASS_BEGIN(SIFoldOperands, DEBUG_TYPE, |
| 78 | "SI Fold Operands", false, false) |
| 79 | INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree) |
| 80 | INITIALIZE_PASS_END(SIFoldOperands, DEBUG_TYPE, |
| 81 | "SI Fold Operands", false, false) |
| 82 | |
| 83 | char SIFoldOperands::ID = 0; |
| 84 | |
| 85 | char &llvm::SIFoldOperandsID = SIFoldOperands::ID; |
| 86 | |
| 87 | FunctionPass *llvm::createSIFoldOperandsPass() { |
| 88 | return new SIFoldOperands(); |
| 89 | } |
| 90 | |
| 91 | static bool isSafeToFold(unsigned Opcode) { |
| 92 | switch(Opcode) { |
| 93 | case AMDGPU::V_MOV_B32_e32: |
| 94 | case AMDGPU::V_MOV_B32_e64: |
Tom Stellard | 4842c05 | 2015-01-07 20:27:25 +0000 | [diff] [blame] | 95 | case AMDGPU::V_MOV_B64_PSEUDO: |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 96 | case AMDGPU::S_MOV_B32: |
| 97 | case AMDGPU::S_MOV_B64: |
| 98 | case AMDGPU::COPY: |
| 99 | return true; |
| 100 | default: |
| 101 | return false; |
| 102 | } |
| 103 | } |
| 104 | |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 105 | static bool updateOperand(FoldCandidate &Fold, |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 106 | const TargetRegisterInfo &TRI) { |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 107 | MachineInstr *MI = Fold.UseMI; |
| 108 | MachineOperand &Old = MI->getOperand(Fold.UseOpNo); |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 109 | assert(Old.isReg()); |
| 110 | |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 111 | if (Fold.isImm()) { |
| 112 | Old.ChangeToImmediate(Fold.ImmToFold); |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 113 | return true; |
| 114 | } |
| 115 | |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 116 | MachineOperand *New = Fold.OpToFold; |
| 117 | if (TargetRegisterInfo::isVirtualRegister(Old.getReg()) && |
| 118 | TargetRegisterInfo::isVirtualRegister(New->getReg())) { |
| 119 | Old.substVirtReg(New->getReg(), New->getSubReg(), TRI); |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 120 | return true; |
| 121 | } |
| 122 | |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 123 | // FIXME: Handle physical registers. |
| 124 | |
| 125 | return false; |
| 126 | } |
| 127 | |
Tom Stellard | 0599297 | 2015-01-07 22:44:19 +0000 | [diff] [blame] | 128 | static bool tryAddToFoldList(std::vector<FoldCandidate> &FoldList, |
| 129 | MachineInstr *MI, unsigned OpNo, |
| 130 | MachineOperand *OpToFold, |
| 131 | const SIInstrInfo *TII) { |
| 132 | if (!TII->isOperandLegal(MI, OpNo, OpToFold)) { |
| 133 | // Operand is not legal, so try to commute the instruction to |
| 134 | // see if this makes it possible to fold. |
| 135 | unsigned CommuteIdx0; |
| 136 | unsigned CommuteIdx1; |
| 137 | bool CanCommute = TII->findCommutedOpIndices(MI, CommuteIdx0, CommuteIdx1); |
| 138 | |
| 139 | if (CanCommute) { |
| 140 | if (CommuteIdx0 == OpNo) |
| 141 | OpNo = CommuteIdx1; |
| 142 | else if (CommuteIdx1 == OpNo) |
| 143 | OpNo = CommuteIdx0; |
| 144 | } |
| 145 | |
| 146 | if (!CanCommute || !TII->commuteInstruction(MI)) |
| 147 | return false; |
| 148 | |
| 149 | if (!TII->isOperandLegal(MI, OpNo, OpToFold)) |
| 150 | return false; |
| 151 | } |
| 152 | |
| 153 | FoldList.push_back(FoldCandidate(MI, OpNo, OpToFold)); |
| 154 | return true; |
| 155 | } |
| 156 | |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 157 | bool SIFoldOperands::runOnMachineFunction(MachineFunction &MF) { |
| 158 | MachineRegisterInfo &MRI = MF.getRegInfo(); |
| 159 | const SIInstrInfo *TII = |
| 160 | static_cast<const SIInstrInfo *>(MF.getSubtarget().getInstrInfo()); |
| 161 | const SIRegisterInfo &TRI = TII->getRegisterInfo(); |
| 162 | |
| 163 | for (MachineFunction::iterator BI = MF.begin(), BE = MF.end(); |
| 164 | BI != BE; ++BI) { |
| 165 | |
| 166 | MachineBasicBlock &MBB = *BI; |
| 167 | MachineBasicBlock::iterator I, Next; |
| 168 | for (I = MBB.begin(); I != MBB.end(); I = Next) { |
| 169 | Next = std::next(I); |
| 170 | MachineInstr &MI = *I; |
| 171 | |
| 172 | if (!isSafeToFold(MI.getOpcode())) |
| 173 | continue; |
| 174 | |
Matt Arsenault | 11a4d67 | 2015-02-13 19:05:03 +0000 | [diff] [blame] | 175 | unsigned OpSize = TII->getOpSize(MI, 1); |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 176 | MachineOperand &OpToFold = MI.getOperand(1); |
Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 177 | bool FoldingImm = OpToFold.isImm(); |
Tom Stellard | 26cc18d | 2015-01-07 22:18:27 +0000 | [diff] [blame] | 178 | |
Tom Stellard | 0599297 | 2015-01-07 22:44:19 +0000 | [diff] [blame] | 179 | // FIXME: We could also be folding things like FrameIndexes and |
| 180 | // TargetIndexes. |
| 181 | if (!FoldingImm && !OpToFold.isReg()) |
| 182 | continue; |
| 183 | |
Matt Arsenault | 25f61a6 | 2015-01-31 23:37:27 +0000 | [diff] [blame] | 184 | // Folding immediates with more than one use will increase program size. |
Tom Stellard | 26cc18d | 2015-01-07 22:18:27 +0000 | [diff] [blame] | 185 | // FIXME: This will also reduce register usage, which may be better |
| 186 | // in some cases. A better heuristic is needed. |
Matt Arsenault | 11a4d67 | 2015-02-13 19:05:03 +0000 | [diff] [blame] | 187 | if (FoldingImm && !TII->isInlineConstant(OpToFold, OpSize) && |
Tom Stellard | 26cc18d | 2015-01-07 22:18:27 +0000 | [diff] [blame] | 188 | !MRI.hasOneUse(MI.getOperand(0).getReg())) |
| 189 | continue; |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 190 | |
| 191 | // FIXME: Fold operands with subregs. |
| 192 | if (OpToFold.isReg() && |
| 193 | (!TargetRegisterInfo::isVirtualRegister(OpToFold.getReg()) || |
| 194 | OpToFold.getSubReg())) |
| 195 | continue; |
| 196 | |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 197 | std::vector<FoldCandidate> FoldList; |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 198 | for (MachineRegisterInfo::use_iterator |
| 199 | Use = MRI.use_begin(MI.getOperand(0).getReg()), E = MRI.use_end(); |
| 200 | Use != E; ++Use) { |
| 201 | |
| 202 | MachineInstr *UseMI = Use->getParent(); |
| 203 | const MachineOperand &UseOp = UseMI->getOperand(Use.getOperandNo()); |
| 204 | |
| 205 | // FIXME: Fold operands with subregs. |
Tom Stellard | ef3b864 | 2015-01-07 19:56:17 +0000 | [diff] [blame] | 206 | if (UseOp.isReg() && UseOp.getSubReg() && OpToFold.isReg()) { |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 207 | continue; |
| 208 | } |
| 209 | |
Tom Stellard | ef3b864 | 2015-01-07 19:56:17 +0000 | [diff] [blame] | 210 | APInt Imm; |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 211 | |
Tom Stellard | ef3b864 | 2015-01-07 19:56:17 +0000 | [diff] [blame] | 212 | if (FoldingImm) { |
| 213 | const TargetRegisterClass *UseRC = MRI.getRegClass(UseOp.getReg()); |
Tom Stellard | fb77f00 | 2015-01-13 22:59:41 +0000 | [diff] [blame] | 214 | Imm = APInt(64, OpToFold.getImm()); |
Tom Stellard | ef3b864 | 2015-01-07 19:56:17 +0000 | [diff] [blame] | 215 | |
| 216 | // Split 64-bit constants into 32-bits for folding. |
| 217 | if (UseOp.getSubReg()) { |
| 218 | if (UseRC->getSize() != 8) |
| 219 | continue; |
| 220 | |
| 221 | if (UseOp.getSubReg() == AMDGPU::sub0) { |
| 222 | Imm = Imm.getLoBits(32); |
| 223 | } else { |
| 224 | assert(UseOp.getSubReg() == AMDGPU::sub1); |
| 225 | Imm = Imm.getHiBits(32); |
| 226 | } |
| 227 | } |
| 228 | |
| 229 | // In order to fold immediates into copies, we need to change the |
| 230 | // copy to a MOV. |
| 231 | if (UseMI->getOpcode() == AMDGPU::COPY) { |
| 232 | unsigned MovOp = TII->getMovOpcode( |
| 233 | MRI.getRegClass(UseMI->getOperand(0).getReg())); |
| 234 | if (MovOp == AMDGPU::COPY) |
| 235 | continue; |
| 236 | |
| 237 | UseMI->setDesc(TII->get(MovOp)); |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 238 | } |
| 239 | } |
| 240 | |
| 241 | const MCInstrDesc &UseDesc = UseMI->getDesc(); |
| 242 | |
| 243 | // Don't fold into target independent nodes. Target independent opcodes |
| 244 | // don't have defined register classes. |
| 245 | if (UseDesc.isVariadic() || |
| 246 | UseDesc.OpInfo[Use.getOperandNo()].RegClass == -1) |
| 247 | continue; |
| 248 | |
Tom Stellard | ef3b864 | 2015-01-07 19:56:17 +0000 | [diff] [blame] | 249 | if (FoldingImm) { |
Tom Stellard | 0599297 | 2015-01-07 22:44:19 +0000 | [diff] [blame] | 250 | MachineOperand ImmOp = MachineOperand::CreateImm(Imm.getSExtValue()); |
| 251 | tryAddToFoldList(FoldList, UseMI, Use.getOperandNo(), &ImmOp, TII); |
Tom Stellard | ef3b864 | 2015-01-07 19:56:17 +0000 | [diff] [blame] | 252 | continue; |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 253 | } |
| 254 | |
Tom Stellard | 0599297 | 2015-01-07 22:44:19 +0000 | [diff] [blame] | 255 | tryAddToFoldList(FoldList, UseMI, Use.getOperandNo(), &OpToFold, TII); |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 256 | |
| 257 | // FIXME: We could try to change the instruction from 64-bit to 32-bit |
| 258 | // to enable more folding opportunites. The shrink operands pass |
| 259 | // already does this. |
| 260 | } |
| 261 | |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 262 | for (FoldCandidate &Fold : FoldList) { |
| 263 | if (updateOperand(Fold, TRI)) { |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 264 | // Clear kill flags. |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 265 | if (!Fold.isImm()) { |
| 266 | assert(Fold.OpToFold && Fold.OpToFold->isReg()); |
| 267 | Fold.OpToFold->setIsKill(false); |
| 268 | } |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 269 | DEBUG(dbgs() << "Folded source from " << MI << " into OpNo " << |
Tom Stellard | bb763e6 | 2015-01-07 17:42:16 +0000 | [diff] [blame] | 270 | Fold.UseOpNo << " of " << *Fold.UseMI << '\n'); |
Tom Stellard | 6596ba7 | 2014-11-21 22:06:37 +0000 | [diff] [blame] | 271 | } |
| 272 | } |
| 273 | } |
| 274 | } |
| 275 | return false; |
| 276 | } |