David Goodwin | 3487771 | 2009-10-26 19:32:42 +0000 | [diff] [blame] | 1 | //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker -------- ---------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file implements the AggressiveAntiDepBreaker class, which |
| 11 | // implements register anti-dependence breaking during post-RA |
| 12 | // scheduling. It attempts to break all anti-dependencies within a |
| 13 | // block. |
| 14 | // |
| 15 | //===----------------------------------------------------------------------===// |
| 16 | |
| 17 | #define DEBUG_TYPE "aggressive-antidep" |
| 18 | #include "AggressiveAntiDepBreaker.h" |
| 19 | #include "llvm/CodeGen/MachineBasicBlock.h" |
| 20 | #include "llvm/CodeGen/MachineFrameInfo.h" |
| 21 | #include "llvm/CodeGen/MachineInstr.h" |
| 22 | #include "llvm/Target/TargetInstrInfo.h" |
| 23 | #include "llvm/Target/TargetMachine.h" |
| 24 | #include "llvm/Target/TargetRegisterInfo.h" |
| 25 | #include "llvm/Support/Debug.h" |
| 26 | #include "llvm/Support/ErrorHandling.h" |
| 27 | #include "llvm/Support/raw_ostream.h" |
| 28 | |
| 29 | using namespace llvm; |
| 30 | |
| 31 | AggressiveAntiDepBreaker:: |
| 32 | AggressiveAntiDepBreaker(MachineFunction& MFi) : |
| 33 | AntiDepBreaker(), MF(MFi), |
| 34 | MRI(MF.getRegInfo()), |
| 35 | TRI(MF.getTarget().getRegisterInfo()), |
| 36 | AllocatableSet(TRI->getAllocatableSet(MF)), |
| 37 | GroupNodes(TargetRegisterInfo::FirstVirtualRegister, 0) |
| 38 | { |
| 39 | } |
| 40 | |
| 41 | AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() { |
| 42 | } |
| 43 | |
| 44 | void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) { |
| 45 | // Initialize all registers to be in their own group. Initially we |
| 46 | // assign the register to the same-indexed GroupNode. |
| 47 | for (unsigned i = 0; i < TargetRegisterInfo::FirstVirtualRegister; ++i) |
| 48 | GroupNodeIndices[i] = i; |
| 49 | |
| 50 | // Initialize the indices to indicate that no registers are live. |
| 51 | std::fill(KillIndices, array_endof(KillIndices), ~0u); |
| 52 | std::fill(DefIndices, array_endof(DefIndices), BB->size()); |
| 53 | |
| 54 | bool IsReturnBlock = (!BB->empty() && BB->back().getDesc().isReturn()); |
| 55 | |
| 56 | // Determine the live-out physregs for this block. |
| 57 | if (IsReturnBlock) { |
| 58 | // In a return block, examine the function live-out regs. |
| 59 | for (MachineRegisterInfo::liveout_iterator I = MRI.liveout_begin(), |
| 60 | E = MRI.liveout_end(); I != E; ++I) { |
| 61 | unsigned Reg = *I; |
| 62 | UnionGroups(Reg, 0); |
| 63 | KillIndices[Reg] = BB->size(); |
| 64 | DefIndices[Reg] = ~0u; |
| 65 | // Repeat, for all aliases. |
| 66 | for (const unsigned *Alias = TRI->getAliasSet(Reg); *Alias; ++Alias) { |
| 67 | unsigned AliasReg = *Alias; |
| 68 | UnionGroups(AliasReg, 0); |
| 69 | KillIndices[AliasReg] = BB->size(); |
| 70 | DefIndices[AliasReg] = ~0u; |
| 71 | } |
| 72 | } |
| 73 | } else { |
| 74 | // In a non-return block, examine the live-in regs of all successors. |
| 75 | for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(), |
| 76 | SE = BB->succ_end(); SI != SE; ++SI) |
| 77 | for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(), |
| 78 | E = (*SI)->livein_end(); I != E; ++I) { |
| 79 | unsigned Reg = *I; |
| 80 | UnionGroups(Reg, 0); |
| 81 | KillIndices[Reg] = BB->size(); |
| 82 | DefIndices[Reg] = ~0u; |
| 83 | // Repeat, for all aliases. |
| 84 | for (const unsigned *Alias = TRI->getAliasSet(Reg); *Alias; ++Alias) { |
| 85 | unsigned AliasReg = *Alias; |
| 86 | UnionGroups(AliasReg, 0); |
| 87 | KillIndices[AliasReg] = BB->size(); |
| 88 | DefIndices[AliasReg] = ~0u; |
| 89 | } |
| 90 | } |
| 91 | } |
| 92 | |
| 93 | // Mark live-out callee-saved registers. In a return block this is |
| 94 | // all callee-saved registers. In non-return this is any |
| 95 | // callee-saved register that is not saved in the prolog. |
| 96 | const MachineFrameInfo *MFI = MF.getFrameInfo(); |
| 97 | BitVector Pristine = MFI->getPristineRegs(BB); |
| 98 | for (const unsigned *I = TRI->getCalleeSavedRegs(); *I; ++I) { |
| 99 | unsigned Reg = *I; |
| 100 | if (!IsReturnBlock && !Pristine.test(Reg)) continue; |
| 101 | UnionGroups(Reg, 0); |
| 102 | KillIndices[Reg] = BB->size(); |
| 103 | DefIndices[Reg] = ~0u; |
| 104 | // Repeat, for all aliases. |
| 105 | for (const unsigned *Alias = TRI->getAliasSet(Reg); *Alias; ++Alias) { |
| 106 | unsigned AliasReg = *Alias; |
| 107 | UnionGroups(AliasReg, 0); |
| 108 | KillIndices[AliasReg] = BB->size(); |
| 109 | DefIndices[AliasReg] = ~0u; |
| 110 | } |
| 111 | } |
| 112 | } |
| 113 | |
| 114 | void AggressiveAntiDepBreaker::FinishBlock() { |
| 115 | RegRefs.clear(); |
| 116 | } |
| 117 | |
| 118 | void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count, |
| 119 | unsigned InsertPosIndex) { |
| 120 | assert(Count < InsertPosIndex && "Instruction index out of expected range!"); |
| 121 | |
| 122 | DEBUG(errs() << "Observe: "); |
| 123 | DEBUG(MI->dump()); |
| 124 | |
| 125 | for (unsigned Reg = 0; Reg != TargetRegisterInfo::FirstVirtualRegister; ++Reg) { |
| 126 | // If Reg is current live, then mark that it can't be renamed as |
| 127 | // we don't know the extent of its live-range anymore (now that it |
| 128 | // has been scheduled). If it is not live but was defined in the |
| 129 | // previous schedule region, then set its def index to the most |
| 130 | // conservative location (i.e. the beginning of the previous |
| 131 | // schedule region). |
| 132 | if (IsLive(Reg)) { |
| 133 | DEBUG(if (GetGroup(Reg) != 0) |
| 134 | errs() << " " << TRI->getName(Reg) << "=g" << |
| 135 | GetGroup(Reg) << "->g0(region live-out)"); |
| 136 | UnionGroups(Reg, 0); |
| 137 | } else if ((DefIndices[Reg] < InsertPosIndex) && (DefIndices[Reg] >= Count)) { |
| 138 | DefIndices[Reg] = Count; |
| 139 | } |
| 140 | } |
| 141 | |
| 142 | std::set<unsigned> PassthruRegs; |
| 143 | GetPassthruRegs(MI, PassthruRegs); |
| 144 | PrescanInstruction(MI, Count, PassthruRegs); |
| 145 | ScanInstruction(MI, Count); |
| 146 | } |
| 147 | |
| 148 | unsigned AggressiveAntiDepBreaker::GetGroup(unsigned Reg) |
| 149 | { |
| 150 | unsigned Node = GroupNodeIndices[Reg]; |
| 151 | while (GroupNodes[Node] != Node) |
| 152 | Node = GroupNodes[Node]; |
| 153 | |
| 154 | return Node; |
| 155 | } |
| 156 | |
| 157 | void AggressiveAntiDepBreaker::GetGroupRegs(unsigned Group, std::vector<unsigned> &Regs) |
| 158 | { |
| 159 | for (unsigned Reg = 0; Reg != TargetRegisterInfo::FirstVirtualRegister; ++Reg) { |
| 160 | if (GetGroup(Reg) == Group) |
| 161 | Regs.push_back(Reg); |
| 162 | } |
| 163 | } |
| 164 | |
| 165 | unsigned AggressiveAntiDepBreaker::UnionGroups(unsigned Reg1, unsigned Reg2) |
| 166 | { |
| 167 | assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!"); |
| 168 | assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!"); |
| 169 | |
| 170 | // find group for each register |
| 171 | unsigned Group1 = GetGroup(Reg1); |
| 172 | unsigned Group2 = GetGroup(Reg2); |
| 173 | |
| 174 | // if either group is 0, then that must become the parent |
| 175 | unsigned Parent = (Group1 == 0) ? Group1 : Group2; |
| 176 | unsigned Other = (Parent == Group1) ? Group2 : Group1; |
| 177 | GroupNodes.at(Other) = Parent; |
| 178 | return Parent; |
| 179 | } |
| 180 | |
| 181 | unsigned AggressiveAntiDepBreaker::LeaveGroup(unsigned Reg) |
| 182 | { |
| 183 | // Create a new GroupNode for Reg. Reg's existing GroupNode must |
| 184 | // stay as is because there could be other GroupNodes referring to |
| 185 | // it. |
| 186 | unsigned idx = GroupNodes.size(); |
| 187 | GroupNodes.push_back(idx); |
| 188 | GroupNodeIndices[Reg] = idx; |
| 189 | return idx; |
| 190 | } |
| 191 | |
| 192 | bool AggressiveAntiDepBreaker::IsLive(unsigned Reg) |
| 193 | { |
| 194 | // KillIndex must be defined and DefIndex not defined for a register |
| 195 | // to be live. |
| 196 | return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u)); |
| 197 | } |
| 198 | |
| 199 | bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI, |
| 200 | MachineOperand& MO) |
| 201 | { |
| 202 | if (!MO.isReg() || !MO.isImplicit()) |
| 203 | return false; |
| 204 | |
| 205 | unsigned Reg = MO.getReg(); |
| 206 | if (Reg == 0) |
| 207 | return false; |
| 208 | |
| 209 | MachineOperand *Op = NULL; |
| 210 | if (MO.isDef()) |
| 211 | Op = MI->findRegisterUseOperand(Reg, true); |
| 212 | else |
| 213 | Op = MI->findRegisterDefOperand(Reg); |
| 214 | |
| 215 | return((Op != NULL) && Op->isImplicit()); |
| 216 | } |
| 217 | |
| 218 | void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI, |
| 219 | std::set<unsigned>& PassthruRegs) { |
| 220 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { |
| 221 | MachineOperand &MO = MI->getOperand(i); |
| 222 | if (!MO.isReg()) continue; |
| 223 | if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) || |
| 224 | IsImplicitDefUse(MI, MO)) { |
| 225 | const unsigned Reg = MO.getReg(); |
| 226 | PassthruRegs.insert(Reg); |
| 227 | for (const unsigned *Subreg = TRI->getSubRegisters(Reg); |
| 228 | *Subreg; ++Subreg) { |
| 229 | PassthruRegs.insert(*Subreg); |
| 230 | } |
| 231 | } |
| 232 | } |
| 233 | } |
| 234 | |
| 235 | /// AntiDepPathStep - Return SUnit that SU has an anti-dependence on. |
| 236 | static void AntiDepPathStep(SUnit *SU, std::vector<SDep*>& Edges) { |
| 237 | SmallSet<unsigned, 8> Dups; |
| 238 | for (SUnit::pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); |
| 239 | P != PE; ++P) { |
| 240 | if (P->getKind() == SDep::Anti) { |
| 241 | unsigned Reg = P->getReg(); |
| 242 | if (Dups.count(Reg) == 0) { |
| 243 | Edges.push_back(&*P); |
| 244 | Dups.insert(Reg); |
| 245 | } |
| 246 | } |
| 247 | } |
| 248 | } |
| 249 | |
| 250 | void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI, unsigned Count, |
| 251 | std::set<unsigned>& PassthruRegs) { |
| 252 | // Scan the register defs for this instruction and update |
| 253 | // live-ranges, groups and RegRefs. |
| 254 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { |
| 255 | MachineOperand &MO = MI->getOperand(i); |
| 256 | if (!MO.isReg() || !MO.isDef()) continue; |
| 257 | unsigned Reg = MO.getReg(); |
| 258 | if (Reg == 0) continue; |
| 259 | // Ignore passthru registers for liveness... |
| 260 | if (PassthruRegs.count(Reg) != 0) continue; |
| 261 | |
| 262 | // Update Def for Reg and subregs. |
| 263 | DefIndices[Reg] = Count; |
| 264 | for (const unsigned *Subreg = TRI->getSubRegisters(Reg); |
| 265 | *Subreg; ++Subreg) { |
| 266 | unsigned SubregReg = *Subreg; |
| 267 | DefIndices[SubregReg] = Count; |
| 268 | } |
| 269 | } |
| 270 | |
| 271 | DEBUG(errs() << "\tDef Groups:"); |
| 272 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { |
| 273 | MachineOperand &MO = MI->getOperand(i); |
| 274 | if (!MO.isReg() || !MO.isDef()) continue; |
| 275 | unsigned Reg = MO.getReg(); |
| 276 | if (Reg == 0) continue; |
| 277 | |
| 278 | DEBUG(errs() << " " << TRI->getName(Reg) << "=g" << GetGroup(Reg)); |
| 279 | |
| 280 | // If MI's defs have special allocation requirement, don't allow |
| 281 | // any def registers to be changed. Also assume all registers |
| 282 | // defined in a call must not be changed (ABI). |
| 283 | if (MI->getDesc().isCall() || MI->getDesc().hasExtraDefRegAllocReq()) { |
| 284 | DEBUG(if (GetGroup(Reg) != 0) errs() << "->g0(alloc-req)"); |
| 285 | UnionGroups(Reg, 0); |
| 286 | } |
| 287 | |
| 288 | // Any aliased that are live at this point are completely or |
| 289 | // partially defined here, so group those subregisters with Reg. |
| 290 | for (const unsigned *Alias = TRI->getAliasSet(Reg); *Alias; ++Alias) { |
| 291 | unsigned AliasReg = *Alias; |
| 292 | if (IsLive(AliasReg)) { |
| 293 | UnionGroups(Reg, AliasReg); |
| 294 | DEBUG(errs() << "->g" << GetGroup(Reg) << "(via " << |
| 295 | TRI->getName(AliasReg) << ")"); |
| 296 | } |
| 297 | } |
| 298 | |
| 299 | // Note register reference... |
| 300 | const TargetRegisterClass *RC = NULL; |
| 301 | if (i < MI->getDesc().getNumOperands()) |
| 302 | RC = MI->getDesc().OpInfo[i].getRegClass(TRI); |
| 303 | RegisterReference RR = { &MO, RC }; |
| 304 | RegRefs.insert(std::make_pair(Reg, RR)); |
| 305 | } |
| 306 | |
| 307 | DEBUG(errs() << '\n'); |
| 308 | } |
| 309 | |
| 310 | void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI, |
| 311 | unsigned Count) { |
| 312 | DEBUG(errs() << "\tUse Groups:"); |
| 313 | |
| 314 | // Scan the register uses for this instruction and update |
| 315 | // live-ranges, groups and RegRefs. |
| 316 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { |
| 317 | MachineOperand &MO = MI->getOperand(i); |
| 318 | if (!MO.isReg() || !MO.isUse()) continue; |
| 319 | unsigned Reg = MO.getReg(); |
| 320 | if (Reg == 0) continue; |
| 321 | |
| 322 | DEBUG(errs() << " " << TRI->getName(Reg) << "=g" << GetGroup(Reg)); |
| 323 | |
| 324 | // It wasn't previously live but now it is, this is a kill. Forget |
| 325 | // the previous live-range information and start a new live-range |
| 326 | // for the register. |
| 327 | if (!IsLive(Reg)) { |
| 328 | KillIndices[Reg] = Count; |
| 329 | DefIndices[Reg] = ~0u; |
| 330 | RegRefs.erase(Reg); |
| 331 | LeaveGroup(Reg); |
| 332 | DEBUG(errs() << "->g" << GetGroup(Reg) << "(last-use)"); |
| 333 | } |
| 334 | // Repeat, for subregisters. |
| 335 | for (const unsigned *Subreg = TRI->getSubRegisters(Reg); |
| 336 | *Subreg; ++Subreg) { |
| 337 | unsigned SubregReg = *Subreg; |
| 338 | if (!IsLive(SubregReg)) { |
| 339 | KillIndices[SubregReg] = Count; |
| 340 | DefIndices[SubregReg] = ~0u; |
| 341 | RegRefs.erase(SubregReg); |
| 342 | LeaveGroup(SubregReg); |
| 343 | DEBUG(errs() << " " << TRI->getName(SubregReg) << "->g" << |
| 344 | GetGroup(SubregReg) << "(last-use)"); |
| 345 | } |
| 346 | } |
| 347 | |
| 348 | // If MI's uses have special allocation requirement, don't allow |
| 349 | // any use registers to be changed. Also assume all registers |
| 350 | // used in a call must not be changed (ABI). |
| 351 | if (MI->getDesc().isCall() || MI->getDesc().hasExtraSrcRegAllocReq()) { |
| 352 | DEBUG(if (GetGroup(Reg) != 0) errs() << "->g0(alloc-req)"); |
| 353 | UnionGroups(Reg, 0); |
| 354 | } |
| 355 | |
| 356 | // Note register reference... |
| 357 | const TargetRegisterClass *RC = NULL; |
| 358 | if (i < MI->getDesc().getNumOperands()) |
| 359 | RC = MI->getDesc().OpInfo[i].getRegClass(TRI); |
| 360 | RegisterReference RR = { &MO, RC }; |
| 361 | RegRefs.insert(std::make_pair(Reg, RR)); |
| 362 | } |
| 363 | |
| 364 | DEBUG(errs() << '\n'); |
| 365 | |
| 366 | // Form a group of all defs and uses of a KILL instruction to ensure |
| 367 | // that all registers are renamed as a group. |
| 368 | if (MI->getOpcode() == TargetInstrInfo::KILL) { |
| 369 | DEBUG(errs() << "\tKill Group:"); |
| 370 | |
| 371 | unsigned FirstReg = 0; |
| 372 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { |
| 373 | MachineOperand &MO = MI->getOperand(i); |
| 374 | if (!MO.isReg()) continue; |
| 375 | unsigned Reg = MO.getReg(); |
| 376 | if (Reg == 0) continue; |
| 377 | |
| 378 | if (FirstReg != 0) { |
| 379 | DEBUG(errs() << "=" << TRI->getName(Reg)); |
| 380 | UnionGroups(FirstReg, Reg); |
| 381 | } else { |
| 382 | DEBUG(errs() << " " << TRI->getName(Reg)); |
| 383 | FirstReg = Reg; |
| 384 | } |
| 385 | } |
| 386 | |
| 387 | DEBUG(errs() << "->g" << GetGroup(FirstReg) << '\n'); |
| 388 | } |
| 389 | } |
| 390 | |
| 391 | BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) { |
| 392 | BitVector BV(TRI->getNumRegs(), false); |
| 393 | bool first = true; |
| 394 | |
| 395 | // Check all references that need rewriting for Reg. For each, use |
| 396 | // the corresponding register class to narrow the set of registers |
| 397 | // that are appropriate for renaming. |
| 398 | std::pair<std::multimap<unsigned, RegisterReference>::iterator, |
| 399 | std::multimap<unsigned, RegisterReference>::iterator> |
| 400 | Range = RegRefs.equal_range(Reg); |
| 401 | for (std::multimap<unsigned, RegisterReference>::iterator |
| 402 | Q = Range.first, QE = Range.second; Q != QE; ++Q) { |
| 403 | const TargetRegisterClass *RC = Q->second.RC; |
| 404 | if (RC == NULL) continue; |
| 405 | |
| 406 | BitVector RCBV = TRI->getAllocatableSet(MF, RC); |
| 407 | if (first) { |
| 408 | BV |= RCBV; |
| 409 | first = false; |
| 410 | } else { |
| 411 | BV &= RCBV; |
| 412 | } |
| 413 | |
| 414 | DEBUG(errs() << " " << RC->getName()); |
| 415 | } |
| 416 | |
| 417 | return BV; |
| 418 | } |
| 419 | |
| 420 | bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters( |
| 421 | unsigned AntiDepGroupIndex, |
| 422 | std::map<unsigned, unsigned> &RenameMap) { |
| 423 | // Collect all registers in the same group as AntiDepReg. These all |
| 424 | // need to be renamed together if we are to break the |
| 425 | // anti-dependence. |
| 426 | std::vector<unsigned> Regs; |
| 427 | GetGroupRegs(AntiDepGroupIndex, Regs); |
| 428 | assert(Regs.size() > 0 && "Empty register group!"); |
| 429 | if (Regs.size() == 0) |
| 430 | return false; |
| 431 | |
| 432 | // Find the "superest" register in the group. At the same time, |
| 433 | // collect the BitVector of registers that can be used to rename |
| 434 | // each register. |
| 435 | DEBUG(errs() << "\tRename Candidates for Group g" << AntiDepGroupIndex << ":\n"); |
| 436 | std::map<unsigned, BitVector> RenameRegisterMap; |
| 437 | unsigned SuperReg = 0; |
| 438 | for (unsigned i = 0, e = Regs.size(); i != e; ++i) { |
| 439 | unsigned Reg = Regs[i]; |
| 440 | if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg)) |
| 441 | SuperReg = Reg; |
| 442 | |
| 443 | // If Reg has any references, then collect possible rename regs |
| 444 | if (RegRefs.count(Reg) > 0) { |
| 445 | DEBUG(errs() << "\t\t" << TRI->getName(Reg) << ":"); |
| 446 | |
| 447 | BitVector BV = GetRenameRegisters(Reg); |
| 448 | RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV)); |
| 449 | |
| 450 | DEBUG(errs() << " ::"); |
| 451 | DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r)) |
| 452 | errs() << " " << TRI->getName(r)); |
| 453 | DEBUG(errs() << "\n"); |
| 454 | } |
| 455 | } |
| 456 | |
| 457 | // All group registers should be a subreg of SuperReg. |
| 458 | for (unsigned i = 0, e = Regs.size(); i != e; ++i) { |
| 459 | unsigned Reg = Regs[i]; |
| 460 | if (Reg == SuperReg) continue; |
| 461 | bool IsSub = TRI->isSubRegister(SuperReg, Reg); |
| 462 | assert(IsSub && "Expecting group subregister"); |
| 463 | if (!IsSub) |
| 464 | return false; |
| 465 | } |
| 466 | |
| 467 | // FIXME: for now just handle single register in group case... |
| 468 | if (Regs.size() > 1) |
| 469 | return false; |
| 470 | |
| 471 | // Check each possible rename register for SuperReg. If that register |
| 472 | // is available, and the corresponding registers are available for |
| 473 | // the other group subregisters, then we can use those registers to |
| 474 | // rename. |
| 475 | DEBUG(errs() << "\tFind Register:"); |
| 476 | BitVector SuperBV = RenameRegisterMap[SuperReg]; |
| 477 | for (int r = SuperBV.find_first(); r != -1; r = SuperBV.find_next(r)) { |
| 478 | const unsigned Reg = (unsigned)r; |
| 479 | // Don't replace a register with itself. |
| 480 | if (Reg == SuperReg) continue; |
| 481 | |
| 482 | DEBUG(errs() << " " << TRI->getName(Reg)); |
| 483 | |
| 484 | // If Reg is dead and Reg's most recent def is not before |
| 485 | // SuperRegs's kill, it's safe to replace SuperReg with |
| 486 | // Reg. We must also check all subregisters of Reg. |
| 487 | if (IsLive(Reg) || (KillIndices[SuperReg] > DefIndices[Reg])) { |
| 488 | DEBUG(errs() << "(live)"); |
| 489 | continue; |
| 490 | } else { |
| 491 | bool found = false; |
| 492 | for (const unsigned *Subreg = TRI->getSubRegisters(Reg); |
| 493 | *Subreg; ++Subreg) { |
| 494 | unsigned SubregReg = *Subreg; |
| 495 | if (IsLive(SubregReg) || (KillIndices[SuperReg] > DefIndices[SubregReg])) { |
| 496 | DEBUG(errs() << "(subreg " << TRI->getName(SubregReg) << " live)"); |
| 497 | found = true; |
| 498 | break; |
| 499 | } |
| 500 | } |
| 501 | if (found) |
| 502 | continue; |
| 503 | } |
| 504 | |
| 505 | if (Reg != 0) { |
| 506 | DEBUG(errs() << '\n'); |
| 507 | RenameMap.insert(std::pair<unsigned, unsigned>(SuperReg, Reg)); |
| 508 | return true; |
| 509 | } |
| 510 | } |
| 511 | |
| 512 | DEBUG(errs() << '\n'); |
| 513 | |
| 514 | // No registers are free and available! |
| 515 | return false; |
| 516 | } |
| 517 | |
| 518 | /// BreakAntiDependencies - Identifiy anti-dependencies within the |
| 519 | /// ScheduleDAG and break them by renaming registers. |
| 520 | /// |
| 521 | unsigned AggressiveAntiDepBreaker::BreakAntiDependencies(std::vector<SUnit>& SUnits, |
| 522 | MachineBasicBlock::iterator& Begin, |
| 523 | MachineBasicBlock::iterator& End, |
| 524 | unsigned InsertPosIndex) { |
| 525 | // The code below assumes that there is at least one instruction, |
| 526 | // so just duck out immediately if the block is empty. |
| 527 | if (SUnits.empty()) return false; |
| 528 | |
| 529 | // ...need a map from MI to SUnit. |
| 530 | std::map<MachineInstr *, SUnit *> MISUnitMap; |
| 531 | |
| 532 | DEBUG(errs() << "Breaking all anti-dependencies\n"); |
| 533 | for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { |
| 534 | SUnit *SU = &SUnits[i]; |
| 535 | MISUnitMap.insert(std::pair<MachineInstr *, SUnit *>(SU->getInstr(), SU)); |
| 536 | } |
| 537 | |
| 538 | #ifndef NDEBUG |
| 539 | { |
| 540 | DEBUG(errs() << "Available regs:"); |
| 541 | for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) { |
| 542 | if (!IsLive(Reg)) |
| 543 | DEBUG(errs() << " " << TRI->getName(Reg)); |
| 544 | } |
| 545 | DEBUG(errs() << '\n'); |
| 546 | } |
| 547 | #endif |
| 548 | |
| 549 | // Attempt to break anti-dependence edges. Walk the instructions |
| 550 | // from the bottom up, tracking information about liveness as we go |
| 551 | // to help determine which registers are available. |
| 552 | unsigned Broken = 0; |
| 553 | unsigned Count = InsertPosIndex - 1; |
| 554 | for (MachineBasicBlock::iterator I = End, E = Begin; |
| 555 | I != E; --Count) { |
| 556 | MachineInstr *MI = --I; |
| 557 | |
| 558 | DEBUG(errs() << "Anti: "); |
| 559 | DEBUG(MI->dump()); |
| 560 | |
| 561 | std::set<unsigned> PassthruRegs; |
| 562 | GetPassthruRegs(MI, PassthruRegs); |
| 563 | |
| 564 | // Process the defs in MI... |
| 565 | PrescanInstruction(MI, Count, PassthruRegs); |
| 566 | |
| 567 | std::vector<SDep*> Edges; |
| 568 | SUnit *PathSU = MISUnitMap[MI]; |
| 569 | if (PathSU) |
| 570 | AntiDepPathStep(PathSU, Edges); |
| 571 | |
| 572 | // Ignore KILL instructions (they form a group in ScanInstruction |
| 573 | // but don't cause any anti-dependence breaking themselves) |
| 574 | if (MI->getOpcode() != TargetInstrInfo::KILL) { |
| 575 | // Attempt to break each anti-dependency... |
| 576 | for (unsigned i = 0, e = Edges.size(); i != e; ++i) { |
| 577 | SDep *Edge = Edges[i]; |
| 578 | SUnit *NextSU = Edge->getSUnit(); |
| 579 | |
| 580 | if (Edge->getKind() != SDep::Anti) continue; |
| 581 | |
| 582 | unsigned AntiDepReg = Edge->getReg(); |
| 583 | DEBUG(errs() << "\tAntidep reg: " << TRI->getName(AntiDepReg)); |
| 584 | assert(AntiDepReg != 0 && "Anti-dependence on reg0?"); |
| 585 | |
| 586 | if (!AllocatableSet.test(AntiDepReg)) { |
| 587 | // Don't break anti-dependencies on non-allocatable registers. |
| 588 | DEBUG(errs() << " (non-allocatable)\n"); |
| 589 | continue; |
| 590 | } else if (PassthruRegs.count(AntiDepReg) != 0) { |
| 591 | // If the anti-dep register liveness "passes-thru", then |
| 592 | // don't try to change it. It will be changed along with |
| 593 | // the use if required to break an earlier antidep. |
| 594 | DEBUG(errs() << " (passthru)\n"); |
| 595 | continue; |
| 596 | } else { |
| 597 | // No anti-dep breaking for implicit deps |
| 598 | MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg); |
| 599 | assert(AntiDepOp != NULL && "Can't find index for defined register operand"); |
| 600 | if ((AntiDepOp == NULL) || AntiDepOp->isImplicit()) { |
| 601 | DEBUG(errs() << " (implicit)\n"); |
| 602 | continue; |
| 603 | } |
| 604 | |
| 605 | // If the SUnit has other dependencies on the SUnit that |
| 606 | // it anti-depends on, don't bother breaking the |
| 607 | // anti-dependency since those edges would prevent such |
| 608 | // units from being scheduled past each other |
| 609 | // regardless. |
| 610 | for (SUnit::pred_iterator P = PathSU->Preds.begin(), |
| 611 | PE = PathSU->Preds.end(); P != PE; ++P) { |
| 612 | if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti)) { |
| 613 | DEBUG(errs() << " (real dependency)\n"); |
| 614 | AntiDepReg = 0; |
| 615 | break; |
| 616 | } |
| 617 | } |
| 618 | |
| 619 | if (AntiDepReg == 0) continue; |
| 620 | } |
| 621 | |
| 622 | assert(AntiDepReg != 0); |
| 623 | if (AntiDepReg == 0) continue; |
| 624 | |
| 625 | // Determine AntiDepReg's register group. |
| 626 | const unsigned GroupIndex = GetGroup(AntiDepReg); |
| 627 | if (GroupIndex == 0) { |
| 628 | DEBUG(errs() << " (zero group)\n"); |
| 629 | continue; |
| 630 | } |
| 631 | |
| 632 | DEBUG(errs() << '\n'); |
| 633 | |
| 634 | // Look for a suitable register to use to break the anti-dependence. |
| 635 | std::map<unsigned, unsigned> RenameMap; |
| 636 | if (FindSuitableFreeRegisters(GroupIndex, RenameMap)) { |
| 637 | DEBUG(errs() << "\tBreaking anti-dependence edge on " |
| 638 | << TRI->getName(AntiDepReg) << ":"); |
| 639 | |
| 640 | // Handle each group register... |
| 641 | for (std::map<unsigned, unsigned>::iterator |
| 642 | S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) { |
| 643 | unsigned CurrReg = S->first; |
| 644 | unsigned NewReg = S->second; |
| 645 | |
| 646 | DEBUG(errs() << " " << TRI->getName(CurrReg) << "->" << |
| 647 | TRI->getName(NewReg) << "(" << |
| 648 | RegRefs.count(CurrReg) << " refs)"); |
| 649 | |
| 650 | // Update the references to the old register CurrReg to |
| 651 | // refer to the new register NewReg. |
| 652 | std::pair<std::multimap<unsigned, RegisterReference>::iterator, |
| 653 | std::multimap<unsigned, RegisterReference>::iterator> |
| 654 | Range = RegRefs.equal_range(CurrReg); |
| 655 | for (std::multimap<unsigned, RegisterReference>::iterator |
| 656 | Q = Range.first, QE = Range.second; Q != QE; ++Q) { |
| 657 | Q->second.Operand->setReg(NewReg); |
| 658 | } |
| 659 | |
| 660 | // We just went back in time and modified history; the |
| 661 | // liveness information for CurrReg is now inconsistent. Set |
| 662 | // the state as if it were dead. |
| 663 | UnionGroups(NewReg, 0); |
| 664 | RegRefs.erase(NewReg); |
| 665 | DefIndices[NewReg] = DefIndices[CurrReg]; |
| 666 | KillIndices[NewReg] = KillIndices[CurrReg]; |
| 667 | |
| 668 | UnionGroups(CurrReg, 0); |
| 669 | RegRefs.erase(CurrReg); |
| 670 | DefIndices[CurrReg] = KillIndices[CurrReg]; |
| 671 | KillIndices[CurrReg] = ~0u; |
| 672 | assert(((KillIndices[CurrReg] == ~0u) != |
| 673 | (DefIndices[CurrReg] == ~0u)) && |
| 674 | "Kill and Def maps aren't consistent for AntiDepReg!"); |
| 675 | } |
| 676 | |
| 677 | ++Broken; |
| 678 | DEBUG(errs() << '\n'); |
| 679 | } |
| 680 | } |
| 681 | } |
| 682 | |
| 683 | ScanInstruction(MI, Count); |
| 684 | } |
| 685 | |
| 686 | return Broken; |
| 687 | } |