Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 1 | //===-- SIFormMemoryClauses.cpp -------------------------------------------===// |
| 2 | // |
Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | /// \file |
| 10 | /// This pass creates bundles of SMEM and VMEM instructions forming memory |
| 11 | /// clauses if XNACK is enabled. Def operands of clauses are marked as early |
| 12 | /// clobber to make sure we will not override any source within a clause. |
| 13 | /// |
| 14 | //===----------------------------------------------------------------------===// |
| 15 | |
| 16 | #include "AMDGPU.h" |
| 17 | #include "AMDGPUSubtarget.h" |
| 18 | #include "GCNRegPressure.h" |
Reid Kleckner | 05da2fe | 2019-11-13 13:15:01 -0800 | [diff] [blame^] | 19 | #include "MCTargetDesc/AMDGPUMCTargetDesc.h" |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 20 | #include "SIInstrInfo.h" |
| 21 | #include "SIMachineFunctionInfo.h" |
| 22 | #include "SIRegisterInfo.h" |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 23 | #include "llvm/ADT/DenseMap.h" |
| 24 | #include "llvm/CodeGen/LiveIntervals.h" |
| 25 | #include "llvm/CodeGen/MachineFunctionPass.h" |
Reid Kleckner | 05da2fe | 2019-11-13 13:15:01 -0800 | [diff] [blame^] | 26 | #include "llvm/InitializePasses.h" |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 27 | |
| 28 | using namespace llvm; |
| 29 | |
| 30 | #define DEBUG_TYPE "si-form-memory-clauses" |
| 31 | |
| 32 | // Clauses longer then 15 instructions would overflow one of the counters |
| 33 | // and stall. They can stall even earlier if there are outstanding counters. |
| 34 | static cl::opt<unsigned> |
| 35 | MaxClause("amdgpu-max-memory-clause", cl::Hidden, cl::init(15), |
| 36 | cl::desc("Maximum length of a memory clause, instructions")); |
| 37 | |
| 38 | namespace { |
| 39 | |
| 40 | class SIFormMemoryClauses : public MachineFunctionPass { |
| 41 | typedef DenseMap<unsigned, std::pair<unsigned, LaneBitmask>> RegUse; |
| 42 | |
| 43 | public: |
| 44 | static char ID; |
| 45 | |
| 46 | public: |
| 47 | SIFormMemoryClauses() : MachineFunctionPass(ID) { |
| 48 | initializeSIFormMemoryClausesPass(*PassRegistry::getPassRegistry()); |
| 49 | } |
| 50 | |
| 51 | bool runOnMachineFunction(MachineFunction &MF) override; |
| 52 | |
| 53 | StringRef getPassName() const override { |
| 54 | return "SI Form memory clauses"; |
| 55 | } |
| 56 | |
| 57 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
| 58 | AU.addRequired<LiveIntervals>(); |
| 59 | AU.setPreservesAll(); |
| 60 | MachineFunctionPass::getAnalysisUsage(AU); |
| 61 | } |
| 62 | |
| 63 | private: |
| 64 | template <typename Callable> |
| 65 | void forAllLanes(unsigned Reg, LaneBitmask LaneMask, Callable Func) const; |
| 66 | |
| 67 | bool canBundle(const MachineInstr &MI, RegUse &Defs, RegUse &Uses) const; |
| 68 | bool checkPressure(const MachineInstr &MI, GCNDownwardRPTracker &RPT); |
| 69 | void collectRegUses(const MachineInstr &MI, RegUse &Defs, RegUse &Uses) const; |
| 70 | bool processRegUses(const MachineInstr &MI, RegUse &Defs, RegUse &Uses, |
| 71 | GCNDownwardRPTracker &RPT); |
| 72 | |
Tom Stellard | 5bfbae5 | 2018-07-11 20:59:01 +0000 | [diff] [blame] | 73 | const GCNSubtarget *ST; |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 74 | const SIRegisterInfo *TRI; |
| 75 | const MachineRegisterInfo *MRI; |
| 76 | SIMachineFunctionInfo *MFI; |
| 77 | |
| 78 | unsigned LastRecordedOccupancy; |
| 79 | unsigned MaxVGPRs; |
| 80 | unsigned MaxSGPRs; |
| 81 | }; |
| 82 | |
| 83 | } // End anonymous namespace. |
| 84 | |
| 85 | INITIALIZE_PASS_BEGIN(SIFormMemoryClauses, DEBUG_TYPE, |
| 86 | "SI Form memory clauses", false, false) |
| 87 | INITIALIZE_PASS_DEPENDENCY(LiveIntervals) |
| 88 | INITIALIZE_PASS_END(SIFormMemoryClauses, DEBUG_TYPE, |
| 89 | "SI Form memory clauses", false, false) |
| 90 | |
| 91 | |
| 92 | char SIFormMemoryClauses::ID = 0; |
| 93 | |
| 94 | char &llvm::SIFormMemoryClausesID = SIFormMemoryClauses::ID; |
| 95 | |
| 96 | FunctionPass *llvm::createSIFormMemoryClausesPass() { |
| 97 | return new SIFormMemoryClauses(); |
| 98 | } |
| 99 | |
| 100 | static bool isVMEMClauseInst(const MachineInstr &MI) { |
| 101 | return SIInstrInfo::isFLAT(MI) || SIInstrInfo::isVMEM(MI); |
| 102 | } |
| 103 | |
| 104 | static bool isSMEMClauseInst(const MachineInstr &MI) { |
| 105 | return SIInstrInfo::isSMRD(MI); |
| 106 | } |
| 107 | |
| 108 | // There no sense to create store clauses, they do not define anything, |
| 109 | // thus there is nothing to set early-clobber. |
| 110 | static bool isValidClauseInst(const MachineInstr &MI, bool IsVMEMClause) { |
| 111 | if (MI.isDebugValue() || MI.isBundled()) |
| 112 | return false; |
| 113 | if (!MI.mayLoad() || MI.mayStore()) |
| 114 | return false; |
| 115 | if (AMDGPU::getAtomicNoRetOp(MI.getOpcode()) != -1 || |
| 116 | AMDGPU::getAtomicRetOp(MI.getOpcode()) != -1) |
| 117 | return false; |
| 118 | if (IsVMEMClause && !isVMEMClauseInst(MI)) |
| 119 | return false; |
| 120 | if (!IsVMEMClause && !isSMEMClauseInst(MI)) |
| 121 | return false; |
Tim Renouf | f64f8ef | 2019-01-23 13:38:06 +0000 | [diff] [blame] | 122 | // If this is a load instruction where the result has been coalesced with an operand, then we cannot clause it. |
| 123 | for (const MachineOperand &ResMO : MI.defs()) { |
Daniel Sanders | 0c47611 | 2019-08-15 19:22:08 +0000 | [diff] [blame] | 124 | Register ResReg = ResMO.getReg(); |
Tim Renouf | f64f8ef | 2019-01-23 13:38:06 +0000 | [diff] [blame] | 125 | for (const MachineOperand &MO : MI.uses()) { |
| 126 | if (!MO.isReg() || MO.isDef()) |
| 127 | continue; |
| 128 | if (MO.getReg() == ResReg) |
| 129 | return false; |
| 130 | } |
| 131 | break; // Only check the first def. |
| 132 | } |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 133 | return true; |
| 134 | } |
| 135 | |
| 136 | static unsigned getMopState(const MachineOperand &MO) { |
| 137 | unsigned S = 0; |
| 138 | if (MO.isImplicit()) |
| 139 | S |= RegState::Implicit; |
| 140 | if (MO.isDead()) |
| 141 | S |= RegState::Dead; |
| 142 | if (MO.isUndef()) |
| 143 | S |= RegState::Undef; |
| 144 | if (MO.isKill()) |
| 145 | S |= RegState::Kill; |
| 146 | if (MO.isEarlyClobber()) |
| 147 | S |= RegState::EarlyClobber; |
Daniel Sanders | 2bea69b | 2019-08-01 23:27:28 +0000 | [diff] [blame] | 148 | if (Register::isPhysicalRegister(MO.getReg()) && MO.isRenamable()) |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 149 | S |= RegState::Renamable; |
| 150 | return S; |
| 151 | } |
| 152 | |
| 153 | template <typename Callable> |
| 154 | void SIFormMemoryClauses::forAllLanes(unsigned Reg, LaneBitmask LaneMask, |
| 155 | Callable Func) const { |
Daniel Sanders | 2bea69b | 2019-08-01 23:27:28 +0000 | [diff] [blame] | 156 | if (LaneMask.all() || Register::isPhysicalRegister(Reg) || |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 157 | LaneMask == MRI->getMaxLaneMaskForVReg(Reg)) { |
| 158 | Func(0); |
| 159 | return; |
| 160 | } |
| 161 | |
| 162 | const TargetRegisterClass *RC = MRI->getRegClass(Reg); |
| 163 | unsigned E = TRI->getNumSubRegIndices(); |
| 164 | SmallVector<unsigned, AMDGPU::NUM_TARGET_SUBREGS> CoveringSubregs; |
| 165 | for (unsigned Idx = 1; Idx < E; ++Idx) { |
| 166 | // Is this index even compatible with the given class? |
| 167 | if (TRI->getSubClassWithSubReg(RC, Idx) != RC) |
| 168 | continue; |
| 169 | LaneBitmask SubRegMask = TRI->getSubRegIndexLaneMask(Idx); |
| 170 | // Early exit if we found a perfect match. |
| 171 | if (SubRegMask == LaneMask) { |
| 172 | Func(Idx); |
| 173 | return; |
| 174 | } |
| 175 | |
| 176 | if ((SubRegMask & ~LaneMask).any() || (SubRegMask & LaneMask).none()) |
| 177 | continue; |
| 178 | |
| 179 | CoveringSubregs.push_back(Idx); |
| 180 | } |
| 181 | |
Fangrui Song | 0cac726 | 2018-09-27 02:13:45 +0000 | [diff] [blame] | 182 | llvm::sort(CoveringSubregs, [this](unsigned A, unsigned B) { |
| 183 | LaneBitmask MaskA = TRI->getSubRegIndexLaneMask(A); |
| 184 | LaneBitmask MaskB = TRI->getSubRegIndexLaneMask(B); |
| 185 | unsigned NA = MaskA.getNumLanes(); |
| 186 | unsigned NB = MaskB.getNumLanes(); |
| 187 | if (NA != NB) |
| 188 | return NA > NB; |
| 189 | return MaskA.getHighestLane() > MaskB.getHighestLane(); |
| 190 | }); |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 191 | |
| 192 | for (unsigned Idx : CoveringSubregs) { |
| 193 | LaneBitmask SubRegMask = TRI->getSubRegIndexLaneMask(Idx); |
| 194 | if ((SubRegMask & ~LaneMask).any() || (SubRegMask & LaneMask).none()) |
| 195 | continue; |
| 196 | |
| 197 | Func(Idx); |
| 198 | LaneMask &= ~SubRegMask; |
| 199 | if (LaneMask.none()) |
| 200 | return; |
| 201 | } |
| 202 | |
| 203 | llvm_unreachable("Failed to find all subregs to cover lane mask"); |
| 204 | } |
| 205 | |
| 206 | // Returns false if there is a use of a def already in the map. |
| 207 | // In this case we must break the clause. |
| 208 | bool SIFormMemoryClauses::canBundle(const MachineInstr &MI, |
| 209 | RegUse &Defs, RegUse &Uses) const { |
| 210 | // Check interference with defs. |
| 211 | for (const MachineOperand &MO : MI.operands()) { |
| 212 | // TODO: Prologue/Epilogue Insertion pass does not process bundled |
| 213 | // instructions. |
| 214 | if (MO.isFI()) |
| 215 | return false; |
| 216 | |
| 217 | if (!MO.isReg()) |
| 218 | continue; |
| 219 | |
Daniel Sanders | 0c47611 | 2019-08-15 19:22:08 +0000 | [diff] [blame] | 220 | Register Reg = MO.getReg(); |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 221 | |
| 222 | // If it is tied we will need to write same register as we read. |
| 223 | if (MO.isTied()) |
| 224 | return false; |
| 225 | |
| 226 | RegUse &Map = MO.isDef() ? Uses : Defs; |
| 227 | auto Conflict = Map.find(Reg); |
| 228 | if (Conflict == Map.end()) |
| 229 | continue; |
| 230 | |
Daniel Sanders | 2bea69b | 2019-08-01 23:27:28 +0000 | [diff] [blame] | 231 | if (Register::isPhysicalRegister(Reg)) |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 232 | return false; |
| 233 | |
| 234 | LaneBitmask Mask = TRI->getSubRegIndexLaneMask(MO.getSubReg()); |
| 235 | if ((Conflict->second.second & Mask).any()) |
| 236 | return false; |
| 237 | } |
| 238 | |
| 239 | return true; |
| 240 | } |
| 241 | |
| 242 | // Since all defs in the clause are early clobber we can run out of registers. |
| 243 | // Function returns false if pressure would hit the limit if instruction is |
| 244 | // bundled into a memory clause. |
| 245 | bool SIFormMemoryClauses::checkPressure(const MachineInstr &MI, |
| 246 | GCNDownwardRPTracker &RPT) { |
| 247 | // NB: skip advanceBeforeNext() call. Since all defs will be marked |
| 248 | // early-clobber they will all stay alive at least to the end of the |
| 249 | // clause. Therefor we should not decrease pressure even if load |
| 250 | // pointer becomes dead and could otherwise be reused for destination. |
| 251 | RPT.advanceToNext(); |
| 252 | GCNRegPressure MaxPressure = RPT.moveMaxPressure(); |
| 253 | unsigned Occupancy = MaxPressure.getOccupancy(*ST); |
| 254 | if (Occupancy >= MFI->getMinAllowedOccupancy() && |
| 255 | MaxPressure.getVGPRNum() <= MaxVGPRs && |
| 256 | MaxPressure.getSGPRNum() <= MaxSGPRs) { |
| 257 | LastRecordedOccupancy = Occupancy; |
| 258 | return true; |
| 259 | } |
| 260 | return false; |
| 261 | } |
| 262 | |
| 263 | // Collect register defs and uses along with their lane masks and states. |
| 264 | void SIFormMemoryClauses::collectRegUses(const MachineInstr &MI, |
| 265 | RegUse &Defs, RegUse &Uses) const { |
| 266 | for (const MachineOperand &MO : MI.operands()) { |
| 267 | if (!MO.isReg()) |
| 268 | continue; |
Daniel Sanders | 0c47611 | 2019-08-15 19:22:08 +0000 | [diff] [blame] | 269 | Register Reg = MO.getReg(); |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 270 | if (!Reg) |
| 271 | continue; |
| 272 | |
Daniel Sanders | 2bea69b | 2019-08-01 23:27:28 +0000 | [diff] [blame] | 273 | LaneBitmask Mask = Register::isVirtualRegister(Reg) |
| 274 | ? TRI->getSubRegIndexLaneMask(MO.getSubReg()) |
| 275 | : LaneBitmask::getAll(); |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 276 | RegUse &Map = MO.isDef() ? Defs : Uses; |
| 277 | |
| 278 | auto Loc = Map.find(Reg); |
| 279 | unsigned State = getMopState(MO); |
| 280 | if (Loc == Map.end()) { |
| 281 | Map[Reg] = std::make_pair(State, Mask); |
| 282 | } else { |
| 283 | Loc->second.first |= State; |
| 284 | Loc->second.second |= Mask; |
| 285 | } |
| 286 | } |
| 287 | } |
| 288 | |
| 289 | // Check register def/use conflicts, occupancy limits and collect def/use maps. |
| 290 | // Return true if instruction can be bundled with previous. It it cannot |
| 291 | // def/use maps are not updated. |
| 292 | bool SIFormMemoryClauses::processRegUses(const MachineInstr &MI, |
| 293 | RegUse &Defs, RegUse &Uses, |
| 294 | GCNDownwardRPTracker &RPT) { |
| 295 | if (!canBundle(MI, Defs, Uses)) |
| 296 | return false; |
| 297 | |
| 298 | if (!checkPressure(MI, RPT)) |
| 299 | return false; |
| 300 | |
| 301 | collectRegUses(MI, Defs, Uses); |
| 302 | return true; |
| 303 | } |
| 304 | |
| 305 | bool SIFormMemoryClauses::runOnMachineFunction(MachineFunction &MF) { |
| 306 | if (skipFunction(MF.getFunction())) |
| 307 | return false; |
| 308 | |
Tom Stellard | 5bfbae5 | 2018-07-11 20:59:01 +0000 | [diff] [blame] | 309 | ST = &MF.getSubtarget<GCNSubtarget>(); |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 310 | if (!ST->isXNACKEnabled()) |
| 311 | return false; |
| 312 | |
| 313 | const SIInstrInfo *TII = ST->getInstrInfo(); |
| 314 | TRI = ST->getRegisterInfo(); |
| 315 | MRI = &MF.getRegInfo(); |
| 316 | MFI = MF.getInfo<SIMachineFunctionInfo>(); |
| 317 | LiveIntervals *LIS = &getAnalysis<LiveIntervals>(); |
| 318 | SlotIndexes *Ind = LIS->getSlotIndexes(); |
| 319 | bool Changed = false; |
| 320 | |
| 321 | MaxVGPRs = TRI->getAllocatableSet(MF, &AMDGPU::VGPR_32RegClass).count(); |
| 322 | MaxSGPRs = TRI->getAllocatableSet(MF, &AMDGPU::SGPR_32RegClass).count(); |
Tim Renouf | 7fecdf3 | 2019-05-30 18:46:34 +0000 | [diff] [blame] | 323 | unsigned FuncMaxClause = AMDGPU::getIntegerAttribute( |
| 324 | MF.getFunction(), "amdgpu-max-memory-clause", MaxClause); |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 325 | |
| 326 | for (MachineBasicBlock &MBB : MF) { |
| 327 | MachineBasicBlock::instr_iterator Next; |
| 328 | for (auto I = MBB.instr_begin(), E = MBB.instr_end(); I != E; I = Next) { |
| 329 | MachineInstr &MI = *I; |
| 330 | Next = std::next(I); |
| 331 | |
| 332 | bool IsVMEM = isVMEMClauseInst(MI); |
| 333 | |
| 334 | if (!isValidClauseInst(MI, IsVMEM)) |
| 335 | continue; |
| 336 | |
| 337 | RegUse Defs, Uses; |
| 338 | GCNDownwardRPTracker RPT(*LIS); |
| 339 | RPT.reset(MI); |
| 340 | |
| 341 | if (!processRegUses(MI, Defs, Uses, RPT)) |
| 342 | continue; |
| 343 | |
| 344 | unsigned Length = 1; |
Tim Renouf | 7fecdf3 | 2019-05-30 18:46:34 +0000 | [diff] [blame] | 345 | for ( ; Next != E && Length < FuncMaxClause; ++Next) { |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 346 | if (!isValidClauseInst(*Next, IsVMEM)) |
| 347 | break; |
| 348 | |
| 349 | // A load from pointer which was loaded inside the same bundle is an |
| 350 | // impossible clause because we will need to write and read the same |
| 351 | // register inside. In this case processRegUses will return false. |
| 352 | if (!processRegUses(*Next, Defs, Uses, RPT)) |
| 353 | break; |
| 354 | |
| 355 | ++Length; |
| 356 | } |
| 357 | if (Length < 2) |
| 358 | continue; |
| 359 | |
| 360 | Changed = true; |
| 361 | MFI->limitOccupancy(LastRecordedOccupancy); |
| 362 | |
| 363 | auto B = BuildMI(MBB, I, DebugLoc(), TII->get(TargetOpcode::BUNDLE)); |
| 364 | Ind->insertMachineInstrInMaps(*B); |
| 365 | |
| 366 | for (auto BI = I; BI != Next; ++BI) { |
| 367 | BI->bundleWithPred(); |
| 368 | Ind->removeSingleMachineInstrFromMaps(*BI); |
| 369 | |
| 370 | for (MachineOperand &MO : BI->defs()) |
| 371 | if (MO.readsReg()) |
| 372 | MO.setIsInternalRead(true); |
| 373 | } |
| 374 | |
| 375 | for (auto &&R : Defs) { |
| 376 | forAllLanes(R.first, R.second.second, [&R, &B](unsigned SubReg) { |
| 377 | unsigned S = R.second.first | RegState::EarlyClobber; |
| 378 | if (!SubReg) |
| 379 | S &= ~(RegState::Undef | RegState::Dead); |
| 380 | B.addDef(R.first, S, SubReg); |
| 381 | }); |
| 382 | } |
| 383 | |
| 384 | for (auto &&R : Uses) { |
| 385 | forAllLanes(R.first, R.second.second, [&R, &B](unsigned SubReg) { |
| 386 | B.addUse(R.first, R.second.first & ~RegState::Kill, SubReg); |
| 387 | }); |
| 388 | } |
| 389 | |
| 390 | for (auto &&R : Defs) { |
| 391 | unsigned Reg = R.first; |
| 392 | Uses.erase(Reg); |
Daniel Sanders | 2bea69b | 2019-08-01 23:27:28 +0000 | [diff] [blame] | 393 | if (Register::isPhysicalRegister(Reg)) |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 394 | continue; |
| 395 | LIS->removeInterval(Reg); |
| 396 | LIS->createAndComputeVirtRegInterval(Reg); |
| 397 | } |
| 398 | |
| 399 | for (auto &&R : Uses) { |
| 400 | unsigned Reg = R.first; |
Daniel Sanders | 2bea69b | 2019-08-01 23:27:28 +0000 | [diff] [blame] | 401 | if (Register::isPhysicalRegister(Reg)) |
Stanislav Mekhanoshin | 739174c | 2018-05-31 20:13:51 +0000 | [diff] [blame] | 402 | continue; |
| 403 | LIS->removeInterval(Reg); |
| 404 | LIS->createAndComputeVirtRegInterval(Reg); |
| 405 | } |
| 406 | } |
| 407 | } |
| 408 | |
| 409 | return Changed; |
| 410 | } |