blob: 4be06a394cedcea11b6bf996afe42b39f1ea3b8a [file] [log] [blame]
Dan Gohmanf17a25c2007-07-18 16:29:46 +00001//===-- LiveIntervalAnalysis.cpp - Live Interval Analysis -----------------===//
2//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner081ce942007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Dan Gohmanf17a25c2007-07-18 16:29:46 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the LiveInterval analysis pass which is used
11// by the Linear Scan Register allocator. This pass linearizes the
12// basic blocks of the function in DFS order and uses the
13// LiveVariables pass to conservatively compute live intervals for
14// each virtual and physical register.
15//
16//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "liveintervals"
19#include "llvm/CodeGen/LiveIntervalAnalysis.h"
20#include "VirtRegMap.h"
21#include "llvm/Value.h"
Dan Gohmane3427532008-07-25 00:02:30 +000022#include "llvm/Analysis/AliasAnalysis.h"
Dan Gohmanf17a25c2007-07-18 16:29:46 +000023#include "llvm/CodeGen/LiveVariables.h"
24#include "llvm/CodeGen/MachineFrameInfo.h"
25#include "llvm/CodeGen/MachineInstr.h"
Evan Cheng26d17df2007-12-11 02:09:15 +000026#include "llvm/CodeGen/MachineLoopInfo.h"
Chris Lattner1b989192007-12-31 04:13:23 +000027#include "llvm/CodeGen/MachineRegisterInfo.h"
Dan Gohmanf17a25c2007-07-18 16:29:46 +000028#include "llvm/CodeGen/Passes.h"
Dan Gohmane3427532008-07-25 00:02:30 +000029#include "llvm/CodeGen/PseudoSourceValue.h"
Dan Gohman1e57df32008-02-10 18:45:23 +000030#include "llvm/Target/TargetRegisterInfo.h"
Dan Gohmanf17a25c2007-07-18 16:29:46 +000031#include "llvm/Target/TargetInstrInfo.h"
32#include "llvm/Target/TargetMachine.h"
33#include "llvm/Support/CommandLine.h"
34#include "llvm/Support/Debug.h"
Dan Gohmanf17a25c2007-07-18 16:29:46 +000035#include "llvm/ADT/Statistic.h"
36#include "llvm/ADT/STLExtras.h"
37#include <algorithm>
38#include <cmath>
39using namespace llvm;
40
Dan Gohman089efff2008-05-13 00:00:25 +000041// Hidden options for help debugging.
42static cl::opt<bool> DisableReMat("disable-rematerialization",
43 cl::init(false), cl::Hidden);
Evan Chengcecc8222007-11-17 00:40:40 +000044
Dan Gohman089efff2008-05-13 00:00:25 +000045static cl::opt<bool> SplitAtBB("split-intervals-at-bb",
46 cl::init(true), cl::Hidden);
47static cl::opt<int> SplitLimit("split-limit",
48 cl::init(-1), cl::Hidden);
Evan Chengafc07f82007-08-16 07:24:22 +000049
Dan Gohmanf14c94f2008-07-25 15:08:37 +000050static cl::opt<bool> EnableAggressiveRemat("aggressive-remat", cl::Hidden);
51
Dan Gohmanf17a25c2007-07-18 16:29:46 +000052STATISTIC(numIntervals, "Number of original intervals");
53STATISTIC(numIntervalsAfter, "Number of intervals after coalescing");
Evan Cheng96c61312007-11-29 01:06:25 +000054STATISTIC(numFolds , "Number of loads/stores folded into instructions");
55STATISTIC(numSplits , "Number of intervals split");
Dan Gohmanf17a25c2007-07-18 16:29:46 +000056
57char LiveIntervals::ID = 0;
Dan Gohman089efff2008-05-13 00:00:25 +000058static RegisterPass<LiveIntervals> X("liveintervals", "Live Interval Analysis");
Dan Gohmanf17a25c2007-07-18 16:29:46 +000059
60void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
Dan Gohmane3427532008-07-25 00:02:30 +000061 AU.addRequired<AliasAnalysis>();
62 AU.addPreserved<AliasAnalysis>();
Dan Gohmanf17a25c2007-07-18 16:29:46 +000063 AU.addPreserved<LiveVariables>();
64 AU.addRequired<LiveVariables>();
Bill Wendling62264362008-01-04 20:54:55 +000065 AU.addPreservedID(MachineLoopInfoID);
66 AU.addPreservedID(MachineDominatorsID);
Owen Andersonee7b97c2008-08-06 20:58:38 +000067 AU.addPreservedID(PHIEliminationID);
68 AU.addRequiredID(PHIEliminationID);
Dan Gohmanf17a25c2007-07-18 16:29:46 +000069 AU.addRequiredID(TwoAddressInstructionPassID);
Dan Gohmanf17a25c2007-07-18 16:29:46 +000070 MachineFunctionPass::getAnalysisUsage(AU);
71}
72
73void LiveIntervals::releaseMemory() {
Evan Cheng14f8a502008-06-04 09:18:41 +000074 MBB2IdxMap.clear();
Evan Cheng94262e42007-10-17 02:10:22 +000075 Idx2MBBMap.clear();
Dan Gohmanf17a25c2007-07-18 16:29:46 +000076 mi2iMap_.clear();
77 i2miMap_.clear();
78 r2iMap_.clear();
Evan Cheng27344d42007-09-06 01:07:24 +000079 // Release VNInfo memroy regions after all VNInfo objects are dtor'd.
80 VNInfoAllocator.Reset();
Evan Cheng4ce1a522008-07-19 00:37:25 +000081 while (!ClonedMIs.empty()) {
82 MachineInstr *MI = ClonedMIs.back();
83 ClonedMIs.pop_back();
84 mf_->DeleteMachineInstr(MI);
85 }
Dan Gohmanf17a25c2007-07-18 16:29:46 +000086}
87
Owen Andersonf47fbec2008-05-28 20:54:50 +000088void LiveIntervals::computeNumbering() {
89 Index2MiMap OldI2MI = i2miMap_;
Owen Anderson4065ac92008-07-23 21:37:49 +000090 std::vector<IdxMBBPair> OldI2MBB = Idx2MBBMap;
Owen Andersonf47fbec2008-05-28 20:54:50 +000091
92 Idx2MBBMap.clear();
93 MBB2IdxMap.clear();
94 mi2iMap_.clear();
95 i2miMap_.clear();
96
Owen Andersona0e65132008-07-22 22:46:49 +000097 FunctionSize = 0;
98
Dan Gohmanf17a25c2007-07-18 16:29:46 +000099 // Number MachineInstrs and MachineBasicBlocks.
100 // Initialize MBB indexes to a sentinal.
Evan Cheng1204d172007-08-13 23:45:17 +0000101 MBB2IdxMap.resize(mf_->getNumBlockIDs(), std::make_pair(~0U,~0U));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000102
103 unsigned MIIndex = 0;
104 for (MachineFunction::iterator MBB = mf_->begin(), E = mf_->end();
105 MBB != E; ++MBB) {
Evan Cheng1204d172007-08-13 23:45:17 +0000106 unsigned StartIdx = MIIndex;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000107
Owen Anderson4065ac92008-07-23 21:37:49 +0000108 // Insert an empty slot at the beginning of each block.
109 MIIndex += InstrSlots::NUM;
110 i2miMap_.push_back(0);
111
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000112 for (MachineBasicBlock::iterator I = MBB->begin(), E = MBB->end();
113 I != E; ++I) {
114 bool inserted = mi2iMap_.insert(std::make_pair(I, MIIndex)).second;
115 assert(inserted && "multiple MachineInstr -> index mappings");
116 i2miMap_.push_back(I);
117 MIIndex += InstrSlots::NUM;
Owen Andersona0e65132008-07-22 22:46:49 +0000118 FunctionSize++;
Owen Anderson4065ac92008-07-23 21:37:49 +0000119
120 // Insert an empty slot after every instruction.
Owen Anderson7894e5b2008-06-16 16:58:24 +0000121 MIIndex += InstrSlots::NUM;
122 i2miMap_.push_back(0);
Owen Andersond2e874b2008-06-16 07:10:49 +0000123 }
Owen Anderson4065ac92008-07-23 21:37:49 +0000124
Owen Anderson7894e5b2008-06-16 16:58:24 +0000125 // Set the MBB2IdxMap entry for this MBB.
126 MBB2IdxMap[MBB->getNumber()] = std::make_pair(StartIdx, MIIndex - 1);
127 Idx2MBBMap.push_back(std::make_pair(StartIdx, MBB));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000128 }
Evan Cheng94262e42007-10-17 02:10:22 +0000129 std::sort(Idx2MBBMap.begin(), Idx2MBBMap.end(), Idx2MBBCompare());
Owen Andersonf47fbec2008-05-28 20:54:50 +0000130
131 if (!OldI2MI.empty())
Owen Andersond45a11a2008-08-06 18:35:45 +0000132 for (iterator OI = begin(), OE = end(); OI != OE; ++OI) {
Owen Anderson4065ac92008-07-23 21:37:49 +0000133 for (LiveInterval::iterator LI = OI->second.begin(),
134 LE = OI->second.end(); LI != LE; ++LI) {
Owen Anderson5f7420b2008-05-29 18:15:49 +0000135
Owen Andersond7848742008-05-29 23:01:22 +0000136 // Remap the start index of the live range to the corresponding new
137 // number, or our best guess at what it _should_ correspond to if the
138 // original instruction has been erased. This is either the following
139 // instruction or its predecessor.
Owen Anderson4065ac92008-07-23 21:37:49 +0000140 unsigned index = LI->start / InstrSlots::NUM;
Owen Andersond7848742008-05-29 23:01:22 +0000141 unsigned offset = LI->start % InstrSlots::NUM;
Owen Andersona7d223a2008-07-25 23:06:59 +0000142 if (offset == InstrSlots::LOAD) {
Owen Anderson4065ac92008-07-23 21:37:49 +0000143 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersoncadb9b12008-07-25 19:50:48 +0000144 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->start);
Owen Anderson4065ac92008-07-23 21:37:49 +0000145 // Take the pair containing the index
146 std::vector<IdxMBBPair>::const_iterator J =
Owen Anderson3fe23ed2008-07-29 21:15:44 +0000147 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Andersond7848742008-05-29 23:01:22 +0000148
Owen Anderson4065ac92008-07-23 21:37:49 +0000149 LI->start = getMBBStartIdx(J->second);
150 } else {
151 LI->start = mi2iMap_[OldI2MI[index]] + offset;
Owen Andersond7848742008-05-29 23:01:22 +0000152 }
153
154 // Remap the ending index in the same way that we remapped the start,
155 // except for the final step where we always map to the immediately
156 // following instruction.
Owen Andersoncadb9b12008-07-25 19:50:48 +0000157 index = (LI->end - 1) / InstrSlots::NUM;
Owen Anderson4065ac92008-07-23 21:37:49 +0000158 offset = LI->end % InstrSlots::NUM;
Owen Andersonedbeff22008-07-30 00:22:56 +0000159 if (offset == InstrSlots::LOAD) {
160 // VReg dies at end of block.
Owen Anderson4065ac92008-07-23 21:37:49 +0000161 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersoncadb9b12008-07-25 19:50:48 +0000162 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->end);
Owen Andersonedbeff22008-07-30 00:22:56 +0000163 --I;
Owen Anderson4065ac92008-07-23 21:37:49 +0000164
Owen Andersonedbeff22008-07-30 00:22:56 +0000165 LI->end = getMBBEndIdx(I->second) + 1;
Owen Anderson5f7420b2008-05-29 18:15:49 +0000166 } else {
Owen Andersoncadb9b12008-07-25 19:50:48 +0000167 unsigned idx = index;
Owen Anderson30d056d2008-07-25 21:07:13 +0000168 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
169
170 if (index != OldI2MI.size())
171 LI->end = mi2iMap_[OldI2MI[index]] + (idx == index ? offset : 0);
172 else
173 LI->end = InstrSlots::NUM * i2miMap_.size();
Owen Anderson5f7420b2008-05-29 18:15:49 +0000174 }
Owen Andersond45a11a2008-08-06 18:35:45 +0000175 }
176
177 for (LiveInterval::vni_iterator VNI = OI->second.vni_begin(),
178 VNE = OI->second.vni_end(); VNI != VNE; ++VNI) {
179 VNInfo* vni = *VNI;
Owen Andersonfdb3f702008-05-28 22:40:08 +0000180
Owen Andersond7848742008-05-29 23:01:22 +0000181 // Remap the VNInfo def index, which works the same as the
Owen Andersond45a11a2008-08-06 18:35:45 +0000182 // start indices above. VN's with special sentinel defs
183 // don't need to be remapped.
Owen Andersonad457832008-07-30 17:42:47 +0000184 if (vni->def != ~0U && vni->def != ~1U) {
Owen Andersond45a11a2008-08-06 18:35:45 +0000185 unsigned index = vni->def / InstrSlots::NUM;
186 unsigned offset = vni->def % InstrSlots::NUM;
Owen Andersonad457832008-07-30 17:42:47 +0000187 if (offset == InstrSlots::LOAD) {
188 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersona7d223a2008-07-25 23:06:59 +0000189 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->def);
Owen Andersonad457832008-07-30 17:42:47 +0000190 // Take the pair containing the index
191 std::vector<IdxMBBPair>::const_iterator J =
Owen Anderson3fe23ed2008-07-29 21:15:44 +0000192 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Andersond7848742008-05-29 23:01:22 +0000193
Owen Andersonad457832008-07-30 17:42:47 +0000194 vni->def = getMBBStartIdx(J->second);
195 } else {
196 vni->def = mi2iMap_[OldI2MI[index]] + offset;
197 }
Owen Andersond7848742008-05-29 23:01:22 +0000198 }
Owen Andersonfdb3f702008-05-28 22:40:08 +0000199
Owen Andersond7848742008-05-29 23:01:22 +0000200 // Remap the VNInfo kill indices, which works the same as
201 // the end indices above.
Owen Anderson5f7420b2008-05-29 18:15:49 +0000202 for (size_t i = 0; i < vni->kills.size(); ++i) {
Owen Andersonedbeff22008-07-30 00:22:56 +0000203 // PHI kills don't need to be remapped.
204 if (!vni->kills[i]) continue;
205
Owen Andersond45a11a2008-08-06 18:35:45 +0000206 unsigned index = (vni->kills[i]-1) / InstrSlots::NUM;
207 unsigned offset = vni->kills[i] % InstrSlots::NUM;
208 if (offset == InstrSlots::STORE) {
Owen Anderson4065ac92008-07-23 21:37:49 +0000209 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersoncadb9b12008-07-25 19:50:48 +0000210 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->kills[i]);
Owen Andersonedbeff22008-07-30 00:22:56 +0000211 --I;
Owen Anderson4065ac92008-07-23 21:37:49 +0000212
Owen Andersond45a11a2008-08-06 18:35:45 +0000213 vni->kills[i] = getMBBEndIdx(I->second);
Owen Anderson4065ac92008-07-23 21:37:49 +0000214 } else {
Owen Andersoncadb9b12008-07-25 19:50:48 +0000215 unsigned idx = index;
Owen Anderson30d056d2008-07-25 21:07:13 +0000216 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
217
218 if (index != OldI2MI.size())
219 vni->kills[i] = mi2iMap_[OldI2MI[index]] +
220 (idx == index ? offset : 0);
221 else
222 vni->kills[i] = InstrSlots::NUM * i2miMap_.size();
Owen Andersond7848742008-05-29 23:01:22 +0000223 }
Owen Anderson5f7420b2008-05-29 18:15:49 +0000224 }
Owen Andersonf47fbec2008-05-28 20:54:50 +0000225 }
Owen Andersond45a11a2008-08-06 18:35:45 +0000226 }
Owen Andersonf47fbec2008-05-28 20:54:50 +0000227}
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000228
Owen Andersonf47fbec2008-05-28 20:54:50 +0000229/// runOnMachineFunction - Register allocate the whole function
230///
231bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
232 mf_ = &fn;
233 mri_ = &mf_->getRegInfo();
234 tm_ = &fn.getTarget();
235 tri_ = tm_->getRegisterInfo();
236 tii_ = tm_->getInstrInfo();
Dan Gohmane3427532008-07-25 00:02:30 +0000237 aa_ = &getAnalysis<AliasAnalysis>();
Owen Andersonf47fbec2008-05-28 20:54:50 +0000238 lv_ = &getAnalysis<LiveVariables>();
239 allocatableRegs_ = tri_->getAllocatableSet(fn);
240
241 computeNumbering();
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000242 computeIntervals();
243
244 numIntervals += getNumIntervals();
245
246 DOUT << "********** INTERVALS **********\n";
247 for (iterator I = begin(), E = end(); I != E; ++I) {
Dan Gohman1e57df32008-02-10 18:45:23 +0000248 I->second.print(DOUT, tri_);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000249 DOUT << "\n";
250 }
251
252 numIntervalsAfter += getNumIntervals();
253 DEBUG(dump());
254 return true;
255}
256
257/// print - Implement the dump method.
258void LiveIntervals::print(std::ostream &O, const Module* ) const {
259 O << "********** INTERVALS **********\n";
260 for (const_iterator I = begin(), E = end(); I != E; ++I) {
Evan Cheng14f8a502008-06-04 09:18:41 +0000261 I->second.print(O, tri_);
262 O << "\n";
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000263 }
264
265 O << "********** MACHINEINSTRS **********\n";
266 for (MachineFunction::iterator mbbi = mf_->begin(), mbbe = mf_->end();
267 mbbi != mbbe; ++mbbi) {
268 O << ((Value*)mbbi->getBasicBlock())->getName() << ":\n";
269 for (MachineBasicBlock::iterator mii = mbbi->begin(),
270 mie = mbbi->end(); mii != mie; ++mii) {
271 O << getInstructionIndex(mii) << '\t' << *mii;
272 }
273 }
274}
275
Evan Chengc4c75f52007-11-03 07:20:12 +0000276/// conflictsWithPhysRegDef - Returns true if the specified register
277/// is defined during the duration of the specified interval.
278bool LiveIntervals::conflictsWithPhysRegDef(const LiveInterval &li,
279 VirtRegMap &vrm, unsigned reg) {
280 for (LiveInterval::Ranges::const_iterator
281 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
282 for (unsigned index = getBaseIndex(I->start),
283 end = getBaseIndex(I->end-1) + InstrSlots::NUM; index != end;
284 index += InstrSlots::NUM) {
285 // skip deleted instructions
286 while (index != end && !getInstructionFromIndex(index))
287 index += InstrSlots::NUM;
288 if (index == end) break;
289
290 MachineInstr *MI = getInstructionFromIndex(index);
Evan Cheng58edd782007-11-15 08:13:29 +0000291 unsigned SrcReg, DstReg;
292 if (tii_->isMoveInstr(*MI, SrcReg, DstReg))
293 if (SrcReg == li.reg || DstReg == li.reg)
294 continue;
Evan Chengc4c75f52007-11-03 07:20:12 +0000295 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
296 MachineOperand& mop = MI->getOperand(i);
Evan Cheng58edd782007-11-15 08:13:29 +0000297 if (!mop.isRegister())
Evan Chengc4c75f52007-11-03 07:20:12 +0000298 continue;
299 unsigned PhysReg = mop.getReg();
Evan Cheng58edd782007-11-15 08:13:29 +0000300 if (PhysReg == 0 || PhysReg == li.reg)
Evan Chengc4c75f52007-11-03 07:20:12 +0000301 continue;
Dan Gohman1e57df32008-02-10 18:45:23 +0000302 if (TargetRegisterInfo::isVirtualRegister(PhysReg)) {
Evan Cheng58edd782007-11-15 08:13:29 +0000303 if (!vrm.hasPhys(PhysReg))
304 continue;
Evan Chengc4c75f52007-11-03 07:20:12 +0000305 PhysReg = vrm.getPhys(PhysReg);
Evan Cheng58edd782007-11-15 08:13:29 +0000306 }
Dan Gohman1e57df32008-02-10 18:45:23 +0000307 if (PhysReg && tri_->regsOverlap(PhysReg, reg))
Evan Chengc4c75f52007-11-03 07:20:12 +0000308 return true;
309 }
310 }
311 }
312
313 return false;
314}
315
Evan Cheng1204d172007-08-13 23:45:17 +0000316void LiveIntervals::printRegName(unsigned reg) const {
Dan Gohman1e57df32008-02-10 18:45:23 +0000317 if (TargetRegisterInfo::isPhysicalRegister(reg))
Bill Wendling9b0baeb2008-02-26 21:47:57 +0000318 cerr << tri_->getName(reg);
Evan Cheng1204d172007-08-13 23:45:17 +0000319 else
320 cerr << "%reg" << reg;
321}
322
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000323void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
324 MachineBasicBlock::iterator mi,
Owen Anderson5b691fc2008-06-25 23:39:39 +0000325 unsigned MIIdx, MachineOperand& MO,
Evan Chengf1107fd2008-07-10 07:35:43 +0000326 unsigned MOIdx,
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000327 LiveInterval &interval) {
328 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
329 LiveVariables::VarInfo& vi = lv_->getVarInfo(interval.reg);
330
Evan Cheng70f68e92008-04-03 16:39:43 +0000331 if (mi->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
332 DOUT << "is a implicit_def\n";
333 return;
334 }
335
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000336 // Virtual registers may be defined multiple times (due to phi
337 // elimination and 2-addr elimination). Much of what we do only has to be
338 // done once for the vreg. We use an empty interval to detect the first
339 // time we see a vreg.
340 if (interval.empty()) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000341 // Get the Idx of the defining instructions.
342 unsigned defIndex = getDefIndex(MIIdx);
Evan Cheng983b81d2007-08-29 20:45:00 +0000343 VNInfo *ValNo;
Evan Cheng7d2b9082008-02-15 18:24:29 +0000344 MachineInstr *CopyMI = NULL;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000345 unsigned SrcReg, DstReg;
Evan Cheng7d2b9082008-02-15 18:24:29 +0000346 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng909ab8b42008-04-09 20:57:25 +0000347 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Cheng7d2b9082008-02-15 18:24:29 +0000348 tii_->isMoveInstr(*mi, SrcReg, DstReg))
349 CopyMI = mi;
350 ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
Evan Cheng983b81d2007-08-29 20:45:00 +0000351
352 assert(ValNo->id == 0 && "First value in interval is not 0?");
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000353
354 // Loop over all of the blocks that the vreg is defined in. There are
355 // two cases we have to handle here. The most common case is a vreg
356 // whose lifetime is contained within a basic block. In this case there
357 // will be a single kill, in MBB, which comes after the definition.
358 if (vi.Kills.size() == 1 && vi.Kills[0]->getParent() == mbb) {
359 // FIXME: what about dead vars?
360 unsigned killIdx;
361 if (vi.Kills[0] != mi)
362 killIdx = getUseIndex(getInstructionIndex(vi.Kills[0]))+1;
363 else
364 killIdx = defIndex+1;
365
366 // If the kill happens after the definition, we have an intra-block
367 // live range.
368 if (killIdx > defIndex) {
369 assert(vi.AliveBlocks.none() &&
370 "Shouldn't be alive across any blocks!");
Evan Cheng983b81d2007-08-29 20:45:00 +0000371 LiveRange LR(defIndex, killIdx, ValNo);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000372 interval.addRange(LR);
373 DOUT << " +" << LR << "\n";
Evan Cheng319802c2007-09-05 21:46:51 +0000374 interval.addKill(ValNo, killIdx);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000375 return;
376 }
377 }
378
379 // The other case we handle is when a virtual register lives to the end
380 // of the defining block, potentially live across some blocks, then is
381 // live into some number of blocks, but gets killed. Start by adding a
382 // range that goes from this definition to the end of the defining block.
Owen Anderson4065ac92008-07-23 21:37:49 +0000383 LiveRange NewLR(defIndex, getMBBEndIdx(mbb)+1, ValNo);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000384 DOUT << " +" << NewLR;
385 interval.addRange(NewLR);
386
387 // Iterate over all of the blocks that the variable is completely
388 // live in, adding [insrtIndex(begin), instrIndex(end)+4) to the
389 // live interval.
390 for (unsigned i = 0, e = vi.AliveBlocks.size(); i != e; ++i) {
391 if (vi.AliveBlocks[i]) {
Owen Anderson4bebfea2008-06-16 19:32:40 +0000392 LiveRange LR(getMBBStartIdx(i),
Evan Cheng5ac16ae2008-06-17 20:13:36 +0000393 getMBBEndIdx(i)+1, // MBB ends at -1.
Owen Anderson4bebfea2008-06-16 19:32:40 +0000394 ValNo);
395 interval.addRange(LR);
396 DOUT << " +" << LR;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000397 }
398 }
399
400 // Finally, this virtual register is live from the start of any killing
401 // block to the 'use' slot of the killing instruction.
402 for (unsigned i = 0, e = vi.Kills.size(); i != e; ++i) {
403 MachineInstr *Kill = vi.Kills[i];
Evan Cheng58c2b762007-08-08 03:00:28 +0000404 unsigned killIdx = getUseIndex(getInstructionIndex(Kill))+1;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000405 LiveRange LR(getMBBStartIdx(Kill->getParent()),
Evan Cheng983b81d2007-08-29 20:45:00 +0000406 killIdx, ValNo);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000407 interval.addRange(LR);
Evan Cheng319802c2007-09-05 21:46:51 +0000408 interval.addKill(ValNo, killIdx);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000409 DOUT << " +" << LR;
410 }
411
412 } else {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000413 // If this is the second time we see a virtual register definition, it
414 // must be due to phi elimination or two addr elimination. If this is
415 // the result of two address elimination, then the vreg is one of the
416 // def-and-use register operand.
Evan Chengf1107fd2008-07-10 07:35:43 +0000417 if (mi->isRegReDefinedByTwoAddr(interval.reg, MOIdx)) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000418 // If this is a two-address definition, then we have already processed
419 // the live range. The only problem is that we didn't realize there
420 // are actually two values in the live interval. Because of this we
421 // need to take the LiveRegion that defines this register and split it
422 // into two values.
Evan Cheng06701fc2008-01-10 08:22:10 +0000423 assert(interval.containsOneValue());
424 unsigned DefIndex = getDefIndex(interval.getValNumInfo(0)->def);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000425 unsigned RedefIndex = getDefIndex(MIIdx);
426
Evan Cheng816a7f32007-08-11 00:59:19 +0000427 const LiveRange *OldLR = interval.getLiveRangeContaining(RedefIndex-1);
Evan Cheng983b81d2007-08-29 20:45:00 +0000428 VNInfo *OldValNo = OldLR->valno;
Evan Cheng816a7f32007-08-11 00:59:19 +0000429
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000430 // Delete the initial value, which should be short and continuous,
431 // because the 2-addr copy must be in the same MBB as the redef.
432 interval.removeRange(DefIndex, RedefIndex);
433
434 // Two-address vregs should always only be redefined once. This means
435 // that at this point, there should be exactly one value number in it.
436 assert(interval.containsOneValue() && "Unexpected 2-addr liveint!");
437
438 // The new value number (#1) is defined by the instruction we claimed
439 // defined value #0.
Evan Cheng7d2b9082008-02-15 18:24:29 +0000440 VNInfo *ValNo = interval.getNextValue(OldValNo->def, OldValNo->copy,
441 VNInfoAllocator);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000442
443 // Value#0 is now defined by the 2-addr instruction.
Evan Cheng7d2b9082008-02-15 18:24:29 +0000444 OldValNo->def = RedefIndex;
445 OldValNo->copy = 0;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000446
447 // Add the new live interval which replaces the range for the input copy.
448 LiveRange LR(DefIndex, RedefIndex, ValNo);
449 DOUT << " replace range with " << LR;
450 interval.addRange(LR);
Evan Cheng319802c2007-09-05 21:46:51 +0000451 interval.addKill(ValNo, RedefIndex);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000452
453 // If this redefinition is dead, we need to add a dummy unit live
454 // range covering the def slot.
Owen Anderson5b691fc2008-06-25 23:39:39 +0000455 if (MO.isDead())
Evan Cheng983b81d2007-08-29 20:45:00 +0000456 interval.addRange(LiveRange(RedefIndex, RedefIndex+1, OldValNo));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000457
458 DOUT << " RESULT: ";
Dan Gohman1e57df32008-02-10 18:45:23 +0000459 interval.print(DOUT, tri_);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000460
461 } else {
462 // Otherwise, this must be because of phi elimination. If this is the
463 // first redefinition of the vreg that we have seen, go back and change
464 // the live range in the PHI block to be a different value number.
465 if (interval.containsOneValue()) {
466 assert(vi.Kills.size() == 1 &&
467 "PHI elimination vreg should have one kill, the PHI itself!");
468
469 // Remove the old range that we now know has an incorrect number.
Evan Cheng319802c2007-09-05 21:46:51 +0000470 VNInfo *VNI = interval.getValNumInfo(0);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000471 MachineInstr *Killer = vi.Kills[0];
472 unsigned Start = getMBBStartIdx(Killer->getParent());
473 unsigned End = getUseIndex(getInstructionIndex(Killer))+1;
474 DOUT << " Removing [" << Start << "," << End << "] from: ";
Dan Gohman1e57df32008-02-10 18:45:23 +0000475 interval.print(DOUT, tri_); DOUT << "\n";
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000476 interval.removeRange(Start, End);
Evan Cheng8b70e632007-11-29 09:49:23 +0000477 VNI->hasPHIKill = true;
Dan Gohman1e57df32008-02-10 18:45:23 +0000478 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000479
480 // Replace the interval with one of a NEW value number. Note that this
481 // value number isn't actually defined by an instruction, weird huh? :)
Evan Cheng319802c2007-09-05 21:46:51 +0000482 LiveRange LR(Start, End, interval.getNextValue(~0, 0, VNInfoAllocator));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000483 DOUT << " replace range with " << LR;
484 interval.addRange(LR);
Evan Cheng319802c2007-09-05 21:46:51 +0000485 interval.addKill(LR.valno, End);
Dan Gohman1e57df32008-02-10 18:45:23 +0000486 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000487 }
488
489 // In the case of PHI elimination, each variable definition is only
490 // live until the end of the block. We've already taken care of the
491 // rest of the live range.
492 unsigned defIndex = getDefIndex(MIIdx);
493
Evan Cheng983b81d2007-08-29 20:45:00 +0000494 VNInfo *ValNo;
Evan Cheng7d2b9082008-02-15 18:24:29 +0000495 MachineInstr *CopyMI = NULL;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000496 unsigned SrcReg, DstReg;
Evan Cheng7d2b9082008-02-15 18:24:29 +0000497 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng909ab8b42008-04-09 20:57:25 +0000498 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Cheng7d2b9082008-02-15 18:24:29 +0000499 tii_->isMoveInstr(*mi, SrcReg, DstReg))
500 CopyMI = mi;
501 ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000502
Owen Anderson4065ac92008-07-23 21:37:49 +0000503 unsigned killIndex = getMBBEndIdx(mbb) + 1;
Evan Cheng983b81d2007-08-29 20:45:00 +0000504 LiveRange LR(defIndex, killIndex, ValNo);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000505 interval.addRange(LR);
Evan Cheng8b70e632007-11-29 09:49:23 +0000506 interval.addKill(ValNo, killIndex);
507 ValNo->hasPHIKill = true;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000508 DOUT << " +" << LR;
509 }
510 }
511
512 DOUT << '\n';
513}
514
515void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
516 MachineBasicBlock::iterator mi,
517 unsigned MIIdx,
Owen Anderson5b691fc2008-06-25 23:39:39 +0000518 MachineOperand& MO,
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000519 LiveInterval &interval,
Evan Cheng7d2b9082008-02-15 18:24:29 +0000520 MachineInstr *CopyMI) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000521 // A physical register cannot be live across basic block, so its
522 // lifetime must end somewhere in its defining basic block.
523 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
524
525 unsigned baseIndex = MIIdx;
526 unsigned start = getDefIndex(baseIndex);
527 unsigned end = start;
528
529 // If it is not used after definition, it is considered dead at
530 // the instruction defining it. Hence its interval is:
531 // [defSlot(def), defSlot(def)+1)
Owen Anderson5b691fc2008-06-25 23:39:39 +0000532 if (MO.isDead()) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000533 DOUT << " dead";
534 end = getDefIndex(start) + 1;
535 goto exit;
536 }
537
538 // If it is not dead on definition, it must be killed by a
539 // subsequent instruction. Hence its interval is:
540 // [defSlot(def), useSlot(kill)+1)
Owen Anderson4065ac92008-07-23 21:37:49 +0000541 baseIndex += InstrSlots::NUM;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000542 while (++mi != MBB->end()) {
Owen Anderson4065ac92008-07-23 21:37:49 +0000543 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
544 getInstructionFromIndex(baseIndex) == 0)
545 baseIndex += InstrSlots::NUM;
Evan Chengc7daf1f2008-03-05 00:59:57 +0000546 if (mi->killsRegister(interval.reg, tri_)) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000547 DOUT << " killed";
548 end = getUseIndex(baseIndex) + 1;
549 goto exit;
Evan Chengc7daf1f2008-03-05 00:59:57 +0000550 } else if (mi->modifiesRegister(interval.reg, tri_)) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000551 // Another instruction redefines the register before it is ever read.
552 // Then the register is essentially dead at the instruction that defines
553 // it. Hence its interval is:
554 // [defSlot(def), defSlot(def)+1)
555 DOUT << " dead";
556 end = getDefIndex(start) + 1;
557 goto exit;
558 }
Owen Anderson4065ac92008-07-23 21:37:49 +0000559
560 baseIndex += InstrSlots::NUM;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000561 }
562
563 // The only case we should have a dead physreg here without a killing or
564 // instruction where we know it's dead is if it is live-in to the function
565 // and never used.
Evan Cheng7d2b9082008-02-15 18:24:29 +0000566 assert(!CopyMI && "physreg was not killed in defining block!");
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000567 end = getDefIndex(start) + 1; // It's dead.
568
569exit:
570 assert(start < end && "did not find end of interval?");
571
572 // Already exists? Extend old live interval.
573 LiveInterval::iterator OldLR = interval.FindLiveRangeContaining(start);
Evan Cheng983b81d2007-08-29 20:45:00 +0000574 VNInfo *ValNo = (OldLR != interval.end())
Evan Cheng7d2b9082008-02-15 18:24:29 +0000575 ? OldLR->valno : interval.getNextValue(start, CopyMI, VNInfoAllocator);
Evan Cheng983b81d2007-08-29 20:45:00 +0000576 LiveRange LR(start, end, ValNo);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000577 interval.addRange(LR);
Evan Cheng319802c2007-09-05 21:46:51 +0000578 interval.addKill(LR.valno, end);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000579 DOUT << " +" << LR << '\n';
580}
581
582void LiveIntervals::handleRegisterDef(MachineBasicBlock *MBB,
583 MachineBasicBlock::iterator MI,
584 unsigned MIIdx,
Evan Chengf1107fd2008-07-10 07:35:43 +0000585 MachineOperand& MO,
586 unsigned MOIdx) {
Owen Anderson5b691fc2008-06-25 23:39:39 +0000587 if (TargetRegisterInfo::isVirtualRegister(MO.getReg()))
Evan Chengf1107fd2008-07-10 07:35:43 +0000588 handleVirtualRegisterDef(MBB, MI, MIIdx, MO, MOIdx,
Owen Anderson5b691fc2008-06-25 23:39:39 +0000589 getOrCreateInterval(MO.getReg()));
590 else if (allocatableRegs_[MO.getReg()]) {
Evan Cheng7d2b9082008-02-15 18:24:29 +0000591 MachineInstr *CopyMI = NULL;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000592 unsigned SrcReg, DstReg;
Evan Cheng7d2b9082008-02-15 18:24:29 +0000593 if (MI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng909ab8b42008-04-09 20:57:25 +0000594 MI->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Cheng7d2b9082008-02-15 18:24:29 +0000595 tii_->isMoveInstr(*MI, SrcReg, DstReg))
596 CopyMI = MI;
Owen Anderson5b691fc2008-06-25 23:39:39 +0000597 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
598 getOrCreateInterval(MO.getReg()), CopyMI);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000599 // Def of a register also defines its sub-registers.
Owen Anderson5b691fc2008-06-25 23:39:39 +0000600 for (const unsigned* AS = tri_->getSubRegisters(MO.getReg()); *AS; ++AS)
Evan Chengc7daf1f2008-03-05 00:59:57 +0000601 // If MI also modifies the sub-register explicitly, avoid processing it
602 // more than once. Do not pass in TRI here so it checks for exact match.
603 if (!MI->modifiesRegister(*AS))
Owen Anderson5b691fc2008-06-25 23:39:39 +0000604 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
605 getOrCreateInterval(*AS), 0);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000606 }
607}
608
609void LiveIntervals::handleLiveInRegister(MachineBasicBlock *MBB,
610 unsigned MIIdx,
611 LiveInterval &interval, bool isAlias) {
612 DOUT << "\t\tlivein register: "; DEBUG(printRegName(interval.reg));
613
614 // Look for kills, if it reaches a def before it's killed, then it shouldn't
615 // be considered a livein.
616 MachineBasicBlock::iterator mi = MBB->begin();
617 unsigned baseIndex = MIIdx;
618 unsigned start = baseIndex;
619 unsigned end = start;
620 while (mi != MBB->end()) {
Evan Chengc7daf1f2008-03-05 00:59:57 +0000621 if (mi->killsRegister(interval.reg, tri_)) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000622 DOUT << " killed";
623 end = getUseIndex(baseIndex) + 1;
624 goto exit;
Evan Chengc7daf1f2008-03-05 00:59:57 +0000625 } else if (mi->modifiesRegister(interval.reg, tri_)) {
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000626 // Another instruction redefines the register before it is ever read.
627 // Then the register is essentially dead at the instruction that defines
628 // it. Hence its interval is:
629 // [defSlot(def), defSlot(def)+1)
630 DOUT << " dead";
631 end = getDefIndex(start) + 1;
632 goto exit;
633 }
634
635 baseIndex += InstrSlots::NUM;
Owen Anderson4065ac92008-07-23 21:37:49 +0000636 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
637 getInstructionFromIndex(baseIndex) == 0)
638 baseIndex += InstrSlots::NUM;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000639 ++mi;
640 }
641
642exit:
643 // Live-in register might not be used at all.
644 if (end == MIIdx) {
645 if (isAlias) {
646 DOUT << " dead";
647 end = getDefIndex(MIIdx) + 1;
648 } else {
649 DOUT << " live through";
650 end = baseIndex;
651 }
652 }
653
Evan Cheng319802c2007-09-05 21:46:51 +0000654 LiveRange LR(start, end, interval.getNextValue(start, 0, VNInfoAllocator));
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000655 interval.addRange(LR);
Evan Cheng319802c2007-09-05 21:46:51 +0000656 interval.addKill(LR.valno, end);
Evan Cheng0f727342007-08-08 07:03:29 +0000657 DOUT << " +" << LR << '\n';
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000658}
659
660/// computeIntervals - computes the live intervals for virtual
661/// registers. for some ordering of the machine instructions [1,N] a
662/// live interval is an interval [i, j) where 1 <= i <= j < N for
663/// which a variable is live
664void LiveIntervals::computeIntervals() {
665 DOUT << "********** COMPUTING LIVE INTERVALS **********\n"
666 << "********** Function: "
667 << ((Value*)mf_->getFunction())->getName() << '\n';
668 // Track the index of the current machine instr.
669 unsigned MIIndex = 0;
Owen Anderson4065ac92008-07-23 21:37:49 +0000670
671 // Skip over empty initial indices.
672 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
673 getInstructionFromIndex(MIIndex) == 0)
674 MIIndex += InstrSlots::NUM;
675
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000676 for (MachineFunction::iterator MBBI = mf_->begin(), E = mf_->end();
677 MBBI != E; ++MBBI) {
678 MachineBasicBlock *MBB = MBBI;
679 DOUT << ((Value*)MBB->getBasicBlock())->getName() << ":\n";
680
681 MachineBasicBlock::iterator MI = MBB->begin(), miEnd = MBB->end();
682
Dan Gohman3f7d94b2007-10-03 19:26:29 +0000683 // Create intervals for live-ins to this BB first.
684 for (MachineBasicBlock::const_livein_iterator LI = MBB->livein_begin(),
685 LE = MBB->livein_end(); LI != LE; ++LI) {
686 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*LI));
687 // Multiple live-ins can alias the same register.
Dan Gohman1e57df32008-02-10 18:45:23 +0000688 for (const unsigned* AS = tri_->getSubRegisters(*LI); *AS; ++AS)
Dan Gohman3f7d94b2007-10-03 19:26:29 +0000689 if (!hasInterval(*AS))
690 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*AS),
691 true);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000692 }
693
694 for (; MI != miEnd; ++MI) {
695 DOUT << MIIndex << "\t" << *MI;
696
697 // Handle defs.
698 for (int i = MI->getNumOperands() - 1; i >= 0; --i) {
699 MachineOperand &MO = MI->getOperand(i);
700 // handle register defs - build intervals
701 if (MO.isRegister() && MO.getReg() && MO.isDef())
Evan Chengf1107fd2008-07-10 07:35:43 +0000702 handleRegisterDef(MBB, MI, MIIndex, MO, i);
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000703 }
704
705 MIIndex += InstrSlots::NUM;
Owen Anderson4065ac92008-07-23 21:37:49 +0000706
707 // Skip over empty indices.
708 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
709 getInstructionFromIndex(MIIndex) == 0)
710 MIIndex += InstrSlots::NUM;
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000711 }
712 }
713}
714
Evan Cheng94262e42007-10-17 02:10:22 +0000715bool LiveIntervals::findLiveInMBBs(const LiveRange &LR,
Evan Cheng12d6fcb2007-10-17 06:53:44 +0000716 SmallVectorImpl<MachineBasicBlock*> &MBBs) const {
Evan Cheng94262e42007-10-17 02:10:22 +0000717 std::vector<IdxMBBPair>::const_iterator I =
718 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), LR.start);
719
720 bool ResVal = false;
721 while (I != Idx2MBBMap.end()) {
722 if (LR.end <= I->first)
723 break;
724 MBBs.push_back(I->second);
725 ResVal = true;
726 ++I;
727 }
728 return ResVal;
729}
730
731
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000732LiveInterval LiveIntervals::createInterval(unsigned reg) {
Dan Gohman1e57df32008-02-10 18:45:23 +0000733 float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ?
Dan Gohmanf17a25c2007-07-18 16:29:46 +0000734 HUGE_VALF : 0.0F;
735 return LiveInterval(reg, Weight);
736}
Evan Cheng9b741602007-11-12 06:35:08 +0000737
Evan Cheng7d2b9082008-02-15 18:24:29 +0000738/// getVNInfoSourceReg - Helper function that parses the specified VNInfo
739/// copy field and returns the source register that defines it.
740unsigned LiveIntervals::getVNInfoSourceReg(const VNInfo *VNI) const {
741 if (!VNI->copy)
742 return 0;
743
744 if (VNI->copy->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG)
745 return VNI->copy->getOperand(1).getReg();
Evan Cheng909ab8b42008-04-09 20:57:25 +0000746 if (VNI->copy->getOpcode() == TargetInstrInfo::INSERT_SUBREG)
747 return VNI->copy->getOperand(2).getReg();
Evan Cheng7d2b9082008-02-15 18:24:29 +0000748 unsigned SrcReg, DstReg;
749 if (tii_->isMoveInstr(*VNI->copy, SrcReg, DstReg))
750 return SrcReg;
751 assert(0 && "Unrecognized copy instruction!");
752 return 0;
753}
Evan Cheng9b741602007-11-12 06:35:08 +0000754
755//===----------------------------------------------------------------------===//
756// Register allocator hooks.
757//
758
Evan Chenga37ecfe2008-02-22 09:24:50 +0000759/// getReMatImplicitUse - If the remat definition MI has one (for now, we only
760/// allow one) virtual register operand, then its uses are implicitly using
761/// the register. Returns the virtual register.
762unsigned LiveIntervals::getReMatImplicitUse(const LiveInterval &li,
763 MachineInstr *MI) const {
764 unsigned RegOp = 0;
765 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
766 MachineOperand &MO = MI->getOperand(i);
767 if (!MO.isRegister() || !MO.isUse())
768 continue;
769 unsigned Reg = MO.getReg();
770 if (Reg == 0 || Reg == li.reg)
771 continue;
772 // FIXME: For now, only remat MI with at most one register operand.
773 assert(!RegOp &&
774 "Can't rematerialize instruction with multiple register operand!");
775 RegOp = MO.getReg();
Dan Gohmane3427532008-07-25 00:02:30 +0000776#ifndef NDEBUG
Evan Chenga37ecfe2008-02-22 09:24:50 +0000777 break;
Dan Gohmane3427532008-07-25 00:02:30 +0000778#endif
Evan Chenga37ecfe2008-02-22 09:24:50 +0000779 }
780 return RegOp;
781}
782
783/// isValNoAvailableAt - Return true if the val# of the specified interval
784/// which reaches the given instruction also reaches the specified use index.
785bool LiveIntervals::isValNoAvailableAt(const LiveInterval &li, MachineInstr *MI,
786 unsigned UseIdx) const {
787 unsigned Index = getInstructionIndex(MI);
788 VNInfo *ValNo = li.FindLiveRangeContaining(Index)->valno;
789 LiveInterval::const_iterator UI = li.FindLiveRangeContaining(UseIdx);
790 return UI != li.end() && UI->valno == ValNo;
791}
792
Evan Cheng9b741602007-11-12 06:35:08 +0000793/// isReMaterializable - Returns true if the definition MI of the specified
794/// val# of the specified interval is re-materializable.
795bool LiveIntervals::isReMaterializable(const LiveInterval &li,
Evan Chenge81fdb92007-12-06 00:01:56 +0000796 const VNInfo *ValNo, MachineInstr *MI,
797 bool &isLoad) {
Evan Cheng9b741602007-11-12 06:35:08 +0000798 if (DisableReMat)
799 return false;
800
Evan Chengfdc17062008-03-15 00:19:36 +0000801 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF)
Evan Chenga37ecfe2008-02-22 09:24:50 +0000802 return true;
Evan Chengc2855322008-02-23 01:44:27 +0000803
804 int FrameIdx = 0;
805 if (tii_->isLoadFromStackSlot(MI, FrameIdx) &&
Evan Chengd0201932008-02-23 03:38:34 +0000806 mf_->getFrameInfo()->isImmutableObjectIndex(FrameIdx))
Evan Cheng70e40de2008-02-25 08:50:41 +0000807 // FIXME: Let target specific isReallyTriviallyReMaterializable determines
808 // this but remember this is not safe to fold into a two-address
809 // instruction.
Evan Chengd0201932008-02-23 03:38:34 +0000810 // This is a load from fixed stack slot. It can be rematerialized.
Evan Chengc2855322008-02-23 01:44:27 +0000811 return true;
Evan Chengc2855322008-02-23 01:44:27 +0000812
Dan Gohmane3427532008-07-25 00:02:30 +0000813 // If the target-specific rules don't identify an instruction as
814 // being trivially rematerializable, use some target-independent
815 // rules.
816 if (!MI->getDesc().isRematerializable() ||
817 !tii_->isTriviallyReMaterializable(MI)) {
Dan Gohmanf14c94f2008-07-25 15:08:37 +0000818 if (!EnableAggressiveRemat)
819 return false;
Evan Chenga37ecfe2008-02-22 09:24:50 +0000820
Dan Gohman4306f132008-07-28 18:43:51 +0000821 // If the instruction accesses memory but the memoperands have been lost,
Dan Gohmane3427532008-07-25 00:02:30 +0000822 // we can't analyze it.
823 const TargetInstrDesc &TID = MI->getDesc();
824 if ((TID.mayLoad() || TID.mayStore()) && MI->memoperands_empty())
825 return false;
826
827 // Avoid instructions obviously unsafe for remat.
828 if (TID.hasUnmodeledSideEffects() || TID.isNotDuplicable())
829 return false;
830
831 // If the instruction accesses memory and the memory could be non-constant,
832 // assume the instruction is not rematerializable.
Dan Gohman2fcbc7e2008-07-28 21:51:04 +0000833 for (std::list<MachineMemOperand>::const_iterator I = MI->memoperands_begin(),
Dan Gohmane3427532008-07-25 00:02:30 +0000834 E = MI->memoperands_end(); I != E; ++I) {
835 const MachineMemOperand &MMO = *I;
836 if (MMO.isVolatile() || MMO.isStore())
837 return false;
838 const Value *V = MMO.getValue();
839 if (!V)
840 return false;
841 if (const PseudoSourceValue *PSV = dyn_cast<PseudoSourceValue>(V)) {
842 if (!PSV->isConstant(mf_->getFrameInfo()))
Evan Chenga37ecfe2008-02-22 09:24:50 +0000843 return false;
Dan Gohmane3427532008-07-25 00:02:30 +0000844 } else if (!aa_->pointsToConstantMemory(V))
845 return false;
846 }
847
848 // If any of the registers accessed are non-constant, conservatively assume
849 // the instruction is not rematerializable.
850 unsigned ImpUse = 0;
851 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
852 const MachineOperand &MO = MI->getOperand(i);
853 if (MO.isReg()) {
854 unsigned Reg = MO.getReg();
855 if (Reg == 0)
856 continue;
857 if (TargetRegisterInfo::isPhysicalRegister(Reg))
858 return false;
859
860 // Only allow one def, and that in the first operand.
861 if (MO.isDef() != (i == 0))
862 return false;
863
864 // Only allow constant-valued registers.
865 bool IsLiveIn = mri_->isLiveIn(Reg);
866 MachineRegisterInfo::def_iterator I = mri_->def_begin(Reg),
867 E = mri_->def_end();
868
869 // For the def, it should be the only def.
870 if (MO.isDef() && (next(I) != E || IsLiveIn))
871 return false;
872
873 if (MO.isUse()) {
874 // Only allow one use other register use, as that's all the
875 // remat mechanisms support currently.
876 if (Reg != li.reg) {
877 if (ImpUse == 0)
878 ImpUse = Reg;
879 else if (Reg != ImpUse)
880 return false;
881 }
882 // For uses, there should be only one associate def.
883 if (I != E && (next(I) != E || IsLiveIn))
884 return false;
885 }
Evan Chenga37ecfe2008-02-22 09:24:50 +0000886 }
887 }
Evan Chenge81fdb92007-12-06 00:01:56 +0000888 }
Evan Cheng9b741602007-11-12 06:35:08 +0000889
Dan Gohmane3427532008-07-25 00:02:30 +0000890 unsigned ImpUse = getReMatImplicitUse(li, MI);
891 if (ImpUse) {
892 const LiveInterval &ImpLi = getInterval(ImpUse);
893 for (MachineRegisterInfo::use_iterator ri = mri_->use_begin(li.reg),
894 re = mri_->use_end(); ri != re; ++ri) {
895 MachineInstr *UseMI = &*ri;
896 unsigned UseIdx = getInstructionIndex(UseMI);
897 if (li.FindLiveRangeContaining(UseIdx)->valno != ValNo)
898 continue;
899 if (!isValNoAvailableAt(ImpLi, MI, UseIdx))
900 return false;
901 }
902 }
903 return true;
Evan Chenge81fdb92007-12-06 00:01:56 +0000904}
905
906/// isReMaterializable - Returns true if every definition of MI of every
907/// val# of the specified interval is re-materializable.
908bool LiveIntervals::isReMaterializable(const LiveInterval &li, bool &isLoad) {
909 isLoad = false;
910 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
911 i != e; ++i) {
912 const VNInfo *VNI = *i;
913 unsigned DefIdx = VNI->def;
914 if (DefIdx == ~1U)
915 continue; // Dead val#.
916 // Is the def for the val# rematerializable?
917 if (DefIdx == ~0u)
918 return false;
919 MachineInstr *ReMatDefMI = getInstructionFromIndex(DefIdx);
920 bool DefIsLoad = false;
Evan Chenga37ecfe2008-02-22 09:24:50 +0000921 if (!ReMatDefMI ||
922 !isReMaterializable(li, VNI, ReMatDefMI, DefIsLoad))
Evan Chenge81fdb92007-12-06 00:01:56 +0000923 return false;
924 isLoad |= DefIsLoad;
Evan Cheng9b741602007-11-12 06:35:08 +0000925 }
926 return true;
927}
928
Evan Cheng70e40de2008-02-25 08:50:41 +0000929/// FilterFoldedOps - Filter out two-address use operands. Return
930/// true if it finds any issue with the operands that ought to prevent
931/// folding.
932static bool FilterFoldedOps(MachineInstr *MI,
933 SmallVector<unsigned, 2> &Ops,
934 unsigned &MRInfo,
935 SmallVector<unsigned, 2> &FoldOps) {
Chris Lattner5b930372008-01-07 07:27:27 +0000936 const TargetInstrDesc &TID = MI->getDesc();
Evan Chenge399fbb2007-12-12 23:12:09 +0000937
Evan Cheng70e40de2008-02-25 08:50:41 +0000938 MRInfo = 0;
Evan Chengfd0bd3c2007-12-02 08:30:39 +0000939 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
940 unsigned OpIdx = Ops[i];
Evan Chenga37ecfe2008-02-22 09:24:50 +0000941 MachineOperand &MO = MI->getOperand(OpIdx);
Evan Chengfd0bd3c2007-12-02 08:30:39 +0000942 // FIXME: fold subreg use.
Evan Chenga37ecfe2008-02-22 09:24:50 +0000943 if (MO.getSubReg())
Evan Cheng70e40de2008-02-25 08:50:41 +0000944 return true;
Evan Chenga37ecfe2008-02-22 09:24:50 +0000945 if (MO.isDef())
Evan Chengfd0bd3c2007-12-02 08:30:39 +0000946 MRInfo |= (unsigned)VirtRegMap::isMod;
947 else {
948 // Filter out two-address use operand(s).
Evan Chenga37ecfe2008-02-22 09:24:50 +0000949 if (!MO.isImplicit() &&
950 TID.getOperandConstraint(OpIdx, TOI::TIED_TO) != -1) {
Evan Chengfd0bd3c2007-12-02 08:30:39 +0000951 MRInfo = VirtRegMap::isModRef;
952 continue;
953 }
954 MRInfo |= (unsigned)VirtRegMap::isRef;
955 }
956 FoldOps.push_back(OpIdx);
Evan Chengff52f082007-12-01 02:07:52 +0000957 }
Evan Cheng70e40de2008-02-25 08:50:41 +0000958 return false;
959}
960
961
962/// tryFoldMemoryOperand - Attempts to fold either a spill / restore from
963/// slot / to reg or any rematerialized load into ith operand of specified
964/// MI. If it is successul, MI is updated with the newly created MI and
965/// returns true.
966bool LiveIntervals::tryFoldMemoryOperand(MachineInstr* &MI,
967 VirtRegMap &vrm, MachineInstr *DefMI,
968 unsigned InstrIdx,
969 SmallVector<unsigned, 2> &Ops,
970 bool isSS, int Slot, unsigned Reg) {
Evan Cheng70e40de2008-02-25 08:50:41 +0000971 // If it is an implicit def instruction, just delete it.
Evan Chengfdc17062008-03-15 00:19:36 +0000972 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
Evan Cheng70e40de2008-02-25 08:50:41 +0000973 RemoveMachineInstrFromMaps(MI);
974 vrm.RemoveMachineInstrFromMaps(MI);
975 MI->eraseFromParent();
976 ++numFolds;
977 return true;
978 }
979
980 // Filter the list of operand indexes that are to be folded. Abort if
981 // any operand will prevent folding.
982 unsigned MRInfo = 0;
983 SmallVector<unsigned, 2> FoldOps;
984 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
985 return false;
Evan Chengff52f082007-12-01 02:07:52 +0000986
Evan Cheng4c7ab522008-03-31 23:19:51 +0000987 // The only time it's safe to fold into a two address instruction is when
988 // it's folding reload and spill from / into a spill stack slot.
989 if (DefMI && (MRInfo & VirtRegMap::isMod))
Evan Chengd0201932008-02-23 03:38:34 +0000990 return false;
991
Evan Cheng3a15a4e2008-02-08 22:05:27 +0000992 MachineInstr *fmi = isSS ? tii_->foldMemoryOperand(*mf_, MI, FoldOps, Slot)
993 : tii_->foldMemoryOperand(*mf_, MI, FoldOps, DefMI);
Evan Cheng9b741602007-11-12 06:35:08 +0000994 if (fmi) {
Evan Chengda872532008-02-27 03:04:06 +0000995 // Remember this instruction uses the spill slot.
996 if (isSS) vrm.addSpillSlotUse(Slot, fmi);
997
Evan Cheng9b741602007-11-12 06:35:08 +0000998 // Attempt to fold the memory reference into the instruction. If
999 // we can do this, we don't need to insert spill code.
Evan Cheng9b741602007-11-12 06:35:08 +00001000 MachineBasicBlock &MBB = *MI->getParent();
Evan Chengefe93672008-01-10 08:24:38 +00001001 if (isSS && !mf_->getFrameInfo()->isImmutableObjectIndex(Slot))
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001002 vrm.virtFolded(Reg, MI, fmi, (VirtRegMap::ModRef)MRInfo);
Evan Chengcecc8222007-11-17 00:40:40 +00001003 vrm.transferSpillPts(MI, fmi);
Evan Cheng96c61312007-11-29 01:06:25 +00001004 vrm.transferRestorePts(MI, fmi);
Evan Cheng1eeb2ef2008-03-11 21:34:46 +00001005 vrm.transferEmergencySpills(MI, fmi);
Evan Cheng9b741602007-11-12 06:35:08 +00001006 mi2iMap_.erase(MI);
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001007 i2miMap_[InstrIdx /InstrSlots::NUM] = fmi;
1008 mi2iMap_[fmi] = InstrIdx;
Evan Cheng9b741602007-11-12 06:35:08 +00001009 MI = MBB.insert(MBB.erase(MI), fmi);
Evan Cheng96c61312007-11-29 01:06:25 +00001010 ++numFolds;
Evan Cheng9b741602007-11-12 06:35:08 +00001011 return true;
1012 }
1013 return false;
1014}
1015
Evan Chengebcba1e2007-12-05 03:22:34 +00001016/// canFoldMemoryOperand - Returns true if the specified load / store
1017/// folding is possible.
1018bool LiveIntervals::canFoldMemoryOperand(MachineInstr *MI,
Evan Cheng70e40de2008-02-25 08:50:41 +00001019 SmallVector<unsigned, 2> &Ops,
Evan Cheng71f75b42008-04-01 21:37:32 +00001020 bool ReMat) const {
Evan Cheng70e40de2008-02-25 08:50:41 +00001021 // Filter the list of operand indexes that are to be folded. Abort if
1022 // any operand will prevent folding.
1023 unsigned MRInfo = 0;
Evan Chengebcba1e2007-12-05 03:22:34 +00001024 SmallVector<unsigned, 2> FoldOps;
Evan Cheng70e40de2008-02-25 08:50:41 +00001025 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
1026 return false;
Evan Chengebcba1e2007-12-05 03:22:34 +00001027
Evan Cheng71f75b42008-04-01 21:37:32 +00001028 // It's only legal to remat for a use, not a def.
1029 if (ReMat && (MRInfo & VirtRegMap::isMod))
Evan Cheng70e40de2008-02-25 08:50:41 +00001030 return false;
Evan Chengebcba1e2007-12-05 03:22:34 +00001031
Evan Chenga37ecfe2008-02-22 09:24:50 +00001032 return tii_->canFoldMemoryOperand(MI, FoldOps);
1033}
1034
Evan Chengcecc8222007-11-17 00:40:40 +00001035bool LiveIntervals::intervalIsInOneMBB(const LiveInterval &li) const {
1036 SmallPtrSet<MachineBasicBlock*, 4> MBBs;
1037 for (LiveInterval::Ranges::const_iterator
1038 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1039 std::vector<IdxMBBPair>::const_iterator II =
1040 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), I->start);
1041 if (II == Idx2MBBMap.end())
1042 continue;
1043 if (I->end > II->first) // crossing a MBB.
1044 return false;
1045 MBBs.insert(II->second);
1046 if (MBBs.size() > 1)
1047 return false;
1048 }
1049 return true;
1050}
1051
Evan Chenga37ecfe2008-02-22 09:24:50 +00001052/// rewriteImplicitOps - Rewrite implicit use operands of MI (i.e. uses of
1053/// interval on to-be re-materialized operands of MI) with new register.
1054void LiveIntervals::rewriteImplicitOps(const LiveInterval &li,
1055 MachineInstr *MI, unsigned NewVReg,
1056 VirtRegMap &vrm) {
1057 // There is an implicit use. That means one of the other operand is
1058 // being remat'ed and the remat'ed instruction has li.reg as an
1059 // use operand. Make sure we rewrite that as well.
1060 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1061 MachineOperand &MO = MI->getOperand(i);
1062 if (!MO.isRegister())
1063 continue;
1064 unsigned Reg = MO.getReg();
1065 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
1066 continue;
1067 if (!vrm.isReMaterialized(Reg))
1068 continue;
1069 MachineInstr *ReMatMI = vrm.getReMaterializedMI(Reg);
Evan Chengc7daf1f2008-03-05 00:59:57 +00001070 MachineOperand *UseMO = ReMatMI->findRegisterUseOperand(li.reg);
1071 if (UseMO)
1072 UseMO->setReg(NewVReg);
Evan Chenga37ecfe2008-02-22 09:24:50 +00001073 }
1074}
1075
Evan Cheng9b741602007-11-12 06:35:08 +00001076/// rewriteInstructionForSpills, rewriteInstructionsForSpills - Helper functions
1077/// for addIntervalsForSpills to rewrite uses / defs for the given live range.
Evan Chengebcba1e2007-12-05 03:22:34 +00001078bool LiveIntervals::
Evan Chenga37ecfe2008-02-22 09:24:50 +00001079rewriteInstructionForSpills(const LiveInterval &li, const VNInfo *VNI,
1080 bool TrySplit, unsigned index, unsigned end, MachineInstr *MI,
Evan Chengcecc8222007-11-17 00:40:40 +00001081 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Cheng9b741602007-11-12 06:35:08 +00001082 unsigned Slot, int LdSlot,
1083 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chenga37ecfe2008-02-22 09:24:50 +00001084 VirtRegMap &vrm,
Evan Cheng9b741602007-11-12 06:35:08 +00001085 const TargetRegisterClass* rc,
1086 SmallVector<int, 4> &ReMatIds,
Evan Cheng26d17df2007-12-11 02:09:15 +00001087 const MachineLoopInfo *loopInfo,
Evan Chengc7666af2008-02-23 00:33:04 +00001088 unsigned &NewVReg, unsigned ImpUse, bool &HasDef, bool &HasUse,
Evan Cheng7b632362007-11-29 10:12:14 +00001089 std::map<unsigned,unsigned> &MBBVRegsMap,
Evan Chengba221ca2008-06-06 07:54:39 +00001090 std::vector<LiveInterval*> &NewLIs, float &SSWeight) {
1091 MachineBasicBlock *MBB = MI->getParent();
1092 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Chengebcba1e2007-12-05 03:22:34 +00001093 bool CanFold = false;
Evan Cheng9b741602007-11-12 06:35:08 +00001094 RestartInstruction:
1095 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
1096 MachineOperand& mop = MI->getOperand(i);
1097 if (!mop.isRegister())
1098 continue;
1099 unsigned Reg = mop.getReg();
1100 unsigned RegI = Reg;
Dan Gohman1e57df32008-02-10 18:45:23 +00001101 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
Evan Cheng9b741602007-11-12 06:35:08 +00001102 continue;
Evan Cheng9b741602007-11-12 06:35:08 +00001103 if (Reg != li.reg)
1104 continue;
1105
1106 bool TryFold = !DefIsReMat;
Evan Cheng35d47762007-11-29 23:02:50 +00001107 bool FoldSS = true; // Default behavior unless it's a remat.
Evan Cheng9b741602007-11-12 06:35:08 +00001108 int FoldSlot = Slot;
1109 if (DefIsReMat) {
1110 // If this is the rematerializable definition MI itself and
1111 // all of its uses are rematerialized, simply delete it.
Evan Chengcecc8222007-11-17 00:40:40 +00001112 if (MI == ReMatOrigDefMI && CanDelete) {
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001113 DOUT << "\t\t\t\tErasing re-materlizable def: ";
1114 DOUT << MI << '\n';
Evan Cheng9b741602007-11-12 06:35:08 +00001115 RemoveMachineInstrFromMaps(MI);
Evan Cheng91e32d02007-11-28 01:28:46 +00001116 vrm.RemoveMachineInstrFromMaps(MI);
Evan Cheng9b741602007-11-12 06:35:08 +00001117 MI->eraseFromParent();
1118 break;
1119 }
1120
1121 // If def for this use can't be rematerialized, then try folding.
Evan Cheng96c61312007-11-29 01:06:25 +00001122 // If def is rematerializable and it's a load, also try folding.
Evan Cheng35d47762007-11-29 23:02:50 +00001123 TryFold = !ReMatDefMI || (ReMatDefMI && (MI == ReMatOrigDefMI || isLoad));
Evan Cheng9b741602007-11-12 06:35:08 +00001124 if (isLoad) {
1125 // Try fold loads (from stack slot, constant pool, etc.) into uses.
1126 FoldSS = isLoadSS;
1127 FoldSlot = LdSlot;
1128 }
1129 }
1130
Evan Cheng9b741602007-11-12 06:35:08 +00001131 // Scan all of the operands of this instruction rewriting operands
1132 // to use NewVReg instead of li.reg as appropriate. We do this for
1133 // two reasons:
1134 //
1135 // 1. If the instr reads the same spilled vreg multiple times, we
1136 // want to reuse the NewVReg.
1137 // 2. If the instr is a two-addr instruction, we are required to
1138 // keep the src/dst regs pinned.
1139 //
1140 // Keep track of whether we replace a use and/or def so that we can
1141 // create the spill interval with the appropriate range.
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001142
Evan Chengcecc8222007-11-17 00:40:40 +00001143 HasUse = mop.isUse();
1144 HasDef = mop.isDef();
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001145 SmallVector<unsigned, 2> Ops;
1146 Ops.push_back(i);
Evan Cheng9b741602007-11-12 06:35:08 +00001147 for (unsigned j = i+1, e = MI->getNumOperands(); j != e; ++j) {
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001148 const MachineOperand &MOj = MI->getOperand(j);
1149 if (!MOj.isRegister())
Evan Cheng9b741602007-11-12 06:35:08 +00001150 continue;
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001151 unsigned RegJ = MOj.getReg();
Dan Gohman1e57df32008-02-10 18:45:23 +00001152 if (RegJ == 0 || TargetRegisterInfo::isPhysicalRegister(RegJ))
Evan Cheng9b741602007-11-12 06:35:08 +00001153 continue;
1154 if (RegJ == RegI) {
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001155 Ops.push_back(j);
1156 HasUse |= MOj.isUse();
1157 HasDef |= MOj.isDef();
Evan Cheng9b741602007-11-12 06:35:08 +00001158 }
1159 }
1160
Evan Chengc33577b2008-07-12 01:56:02 +00001161 if (HasUse && !li.liveAt(getUseIndex(index)))
1162 // Must be defined by an implicit def. It should not be spilled. Note,
1163 // this is for correctness reason. e.g.
1164 // 8 %reg1024<def> = IMPLICIT_DEF
1165 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1166 // The live range [12, 14) are not part of the r1024 live interval since
1167 // it's defined by an implicit def. It will not conflicts with live
1168 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengc3c41c92008-07-12 02:22:07 +00001169 // easily see a situation where both registers are reloaded before
Evan Chengc33577b2008-07-12 01:56:02 +00001170 // the INSERT_SUBREG and both target registers that would overlap.
1171 HasUse = false;
1172
Evan Chengba221ca2008-06-06 07:54:39 +00001173 // Update stack slot spill weight if we are splitting.
Evan Chengf7b45f42008-06-21 06:45:54 +00001174 float Weight = getSpillWeight(HasDef, HasUse, loopDepth);
Evan Chengba221ca2008-06-06 07:54:39 +00001175 if (!TrySplit)
1176 SSWeight += Weight;
1177
1178 if (!TryFold)
1179 CanFold = false;
1180 else {
Evan Chengebcba1e2007-12-05 03:22:34 +00001181 // Do not fold load / store here if we are splitting. We'll find an
1182 // optimal point to insert a load / store later.
1183 if (!TrySplit) {
1184 if (tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
1185 Ops, FoldSS, FoldSlot, Reg)) {
1186 // Folding the load/store can completely change the instruction in
1187 // unpredictable ways, rescan it from the beginning.
1188 HasUse = false;
1189 HasDef = false;
1190 CanFold = false;
Evan Chengba221ca2008-06-06 07:54:39 +00001191 if (isRemoved(MI)) {
1192 SSWeight -= Weight;
Evan Cheng909ab8b42008-04-09 20:57:25 +00001193 break;
Evan Chengba221ca2008-06-06 07:54:39 +00001194 }
Evan Chengebcba1e2007-12-05 03:22:34 +00001195 goto RestartInstruction;
1196 }
1197 } else {
Evan Chengba221ca2008-06-06 07:54:39 +00001198 // We'll try to fold it later if it's profitable.
Evan Cheng71f75b42008-04-01 21:37:32 +00001199 CanFold = canFoldMemoryOperand(MI, Ops, DefIsReMat);
Evan Chengebcba1e2007-12-05 03:22:34 +00001200 }
Evan Chengba221ca2008-06-06 07:54:39 +00001201 }
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001202
1203 // Create a new virtual register for the spill interval.
1204 bool CreatedNewVReg = false;
1205 if (NewVReg == 0) {
Evan Chenga37ecfe2008-02-22 09:24:50 +00001206 NewVReg = mri_->createVirtualRegister(rc);
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001207 vrm.grow();
1208 CreatedNewVReg = true;
1209 }
1210 mop.setReg(NewVReg);
Evan Chenga37ecfe2008-02-22 09:24:50 +00001211 if (mop.isImplicit())
1212 rewriteImplicitOps(li, MI, NewVReg, vrm);
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001213
1214 // Reuse NewVReg for other reads.
Evan Chenga37ecfe2008-02-22 09:24:50 +00001215 for (unsigned j = 0, e = Ops.size(); j != e; ++j) {
1216 MachineOperand &mopj = MI->getOperand(Ops[j]);
1217 mopj.setReg(NewVReg);
1218 if (mopj.isImplicit())
1219 rewriteImplicitOps(li, MI, NewVReg, vrm);
1220 }
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001221
Evan Chengcecc8222007-11-17 00:40:40 +00001222 if (CreatedNewVReg) {
1223 if (DefIsReMat) {
1224 vrm.setVirtIsReMaterialized(NewVReg, ReMatDefMI/*, CanDelete*/);
Evan Chenga37ecfe2008-02-22 09:24:50 +00001225 if (ReMatIds[VNI->id] == VirtRegMap::MAX_STACK_SLOT) {
Evan Chengcecc8222007-11-17 00:40:40 +00001226 // Each valnum may have its own remat id.
Evan Chenga37ecfe2008-02-22 09:24:50 +00001227 ReMatIds[VNI->id] = vrm.assignVirtReMatId(NewVReg);
Evan Chengcecc8222007-11-17 00:40:40 +00001228 } else {
Evan Chenga37ecfe2008-02-22 09:24:50 +00001229 vrm.assignVirtReMatId(NewVReg, ReMatIds[VNI->id]);
Evan Chengcecc8222007-11-17 00:40:40 +00001230 }
1231 if (!CanDelete || (HasUse && HasDef)) {
1232 // If this is a two-addr instruction then its use operands are
1233 // rematerializable but its def is not. It should be assigned a
1234 // stack slot.
1235 vrm.assignVirt2StackSlot(NewVReg, Slot);
1236 }
Evan Cheng9b741602007-11-12 06:35:08 +00001237 } else {
Evan Cheng9b741602007-11-12 06:35:08 +00001238 vrm.assignVirt2StackSlot(NewVReg, Slot);
1239 }
Evan Cheng35d47762007-11-29 23:02:50 +00001240 } else if (HasUse && HasDef &&
1241 vrm.getStackSlot(NewVReg) == VirtRegMap::NO_STACK_SLOT) {
1242 // If this interval hasn't been assigned a stack slot (because earlier
1243 // def is a deleted remat def), do it now.
1244 assert(Slot != VirtRegMap::NO_STACK_SLOT);
1245 vrm.assignVirt2StackSlot(NewVReg, Slot);
Evan Cheng9b741602007-11-12 06:35:08 +00001246 }
1247
Evan Chengc7666af2008-02-23 00:33:04 +00001248 // Re-matting an instruction with virtual register use. Add the
1249 // register as an implicit use on the use MI.
1250 if (DefIsReMat && ImpUse)
1251 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
1252
Evan Cheng9b741602007-11-12 06:35:08 +00001253 // create a new register interval for this spill / remat.
1254 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Chengcecc8222007-11-17 00:40:40 +00001255 if (CreatedNewVReg) {
1256 NewLIs.push_back(&nI);
Evan Cheng7b632362007-11-29 10:12:14 +00001257 MBBVRegsMap.insert(std::make_pair(MI->getParent()->getNumber(), NewVReg));
Evan Chengcecc8222007-11-17 00:40:40 +00001258 if (TrySplit)
1259 vrm.setIsSplitFromReg(NewVReg, li.reg);
1260 }
Evan Cheng9b741602007-11-12 06:35:08 +00001261
1262 if (HasUse) {
Evan Chengcecc8222007-11-17 00:40:40 +00001263 if (CreatedNewVReg) {
1264 LiveRange LR(getLoadIndex(index), getUseIndex(index)+1,
1265 nI.getNextValue(~0U, 0, VNInfoAllocator));
1266 DOUT << " +" << LR;
1267 nI.addRange(LR);
1268 } else {
1269 // Extend the split live interval to this def / use.
1270 unsigned End = getUseIndex(index)+1;
1271 LiveRange LR(nI.ranges[nI.ranges.size()-1].end, End,
1272 nI.getValNumInfo(nI.getNumValNums()-1));
1273 DOUT << " +" << LR;
1274 nI.addRange(LR);
1275 }
Evan Cheng9b741602007-11-12 06:35:08 +00001276 }
1277 if (HasDef) {
1278 LiveRange LR(getDefIndex(index), getStoreIndex(index),
1279 nI.getNextValue(~0U, 0, VNInfoAllocator));
1280 DOUT << " +" << LR;
1281 nI.addRange(LR);
1282 }
Evan Chengcecc8222007-11-17 00:40:40 +00001283
Evan Cheng9b741602007-11-12 06:35:08 +00001284 DOUT << "\t\t\t\tAdded new interval: ";
Dan Gohman1e57df32008-02-10 18:45:23 +00001285 nI.print(DOUT, tri_);
Evan Cheng9b741602007-11-12 06:35:08 +00001286 DOUT << '\n';
1287 }
Evan Chengebcba1e2007-12-05 03:22:34 +00001288 return CanFold;
Evan Cheng9b741602007-11-12 06:35:08 +00001289}
Evan Chengcecc8222007-11-17 00:40:40 +00001290bool LiveIntervals::anyKillInMBBAfterIdx(const LiveInterval &li,
Evan Cheng96c61312007-11-29 01:06:25 +00001291 const VNInfo *VNI,
1292 MachineBasicBlock *MBB, unsigned Idx) const {
Evan Chengcecc8222007-11-17 00:40:40 +00001293 unsigned End = getMBBEndIdx(MBB);
Evan Cheng96c61312007-11-29 01:06:25 +00001294 for (unsigned j = 0, ee = VNI->kills.size(); j != ee; ++j) {
1295 unsigned KillIdx = VNI->kills[j];
1296 if (KillIdx > Idx && KillIdx < End)
1297 return true;
Evan Chengcecc8222007-11-17 00:40:40 +00001298 }
1299 return false;
1300}
1301
Evan Cheng44fccf22008-02-21 00:34:19 +00001302/// RewriteInfo - Keep track of machine instrs that will be rewritten
1303/// during spilling.
Dan Gohman089efff2008-05-13 00:00:25 +00001304namespace {
1305 struct RewriteInfo {
1306 unsigned Index;
1307 MachineInstr *MI;
1308 bool HasUse;
1309 bool HasDef;
1310 RewriteInfo(unsigned i, MachineInstr *mi, bool u, bool d)
1311 : Index(i), MI(mi), HasUse(u), HasDef(d) {}
1312 };
Evan Cheng44fccf22008-02-21 00:34:19 +00001313
Dan Gohman089efff2008-05-13 00:00:25 +00001314 struct RewriteInfoCompare {
1315 bool operator()(const RewriteInfo &LHS, const RewriteInfo &RHS) const {
1316 return LHS.Index < RHS.Index;
1317 }
1318 };
1319}
Evan Cheng44fccf22008-02-21 00:34:19 +00001320
Evan Cheng9b741602007-11-12 06:35:08 +00001321void LiveIntervals::
Evan Chengcecc8222007-11-17 00:40:40 +00001322rewriteInstructionsForSpills(const LiveInterval &li, bool TrySplit,
Evan Cheng9b741602007-11-12 06:35:08 +00001323 LiveInterval::Ranges::const_iterator &I,
Evan Chengcecc8222007-11-17 00:40:40 +00001324 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Cheng9b741602007-11-12 06:35:08 +00001325 unsigned Slot, int LdSlot,
1326 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chenga37ecfe2008-02-22 09:24:50 +00001327 VirtRegMap &vrm,
Evan Cheng9b741602007-11-12 06:35:08 +00001328 const TargetRegisterClass* rc,
1329 SmallVector<int, 4> &ReMatIds,
Evan Cheng26d17df2007-12-11 02:09:15 +00001330 const MachineLoopInfo *loopInfo,
Evan Chengcecc8222007-11-17 00:40:40 +00001331 BitVector &SpillMBBs,
Evan Cheng7b632362007-11-29 10:12:14 +00001332 std::map<unsigned, std::vector<SRInfo> > &SpillIdxes,
Evan Cheng96c61312007-11-29 01:06:25 +00001333 BitVector &RestoreMBBs,
Evan Cheng7b632362007-11-29 10:12:14 +00001334 std::map<unsigned, std::vector<SRInfo> > &RestoreIdxes,
1335 std::map<unsigned,unsigned> &MBBVRegsMap,
Evan Chengba221ca2008-06-06 07:54:39 +00001336 std::vector<LiveInterval*> &NewLIs, float &SSWeight) {
Evan Chengebcba1e2007-12-05 03:22:34 +00001337 bool AllCanFold = true;
Evan Chengcecc8222007-11-17 00:40:40 +00001338 unsigned NewVReg = 0;
Evan Cheng44fccf22008-02-21 00:34:19 +00001339 unsigned start = getBaseIndex(I->start);
Evan Cheng9b741602007-11-12 06:35:08 +00001340 unsigned end = getBaseIndex(I->end-1) + InstrSlots::NUM;
Evan Cheng9b741602007-11-12 06:35:08 +00001341
Evan Cheng44fccf22008-02-21 00:34:19 +00001342 // First collect all the def / use in this live range that will be rewritten.
Evan Cheng909ab8b42008-04-09 20:57:25 +00001343 // Make sure they are sorted according to instruction index.
Evan Cheng44fccf22008-02-21 00:34:19 +00001344 std::vector<RewriteInfo> RewriteMIs;
Evan Chenga37ecfe2008-02-22 09:24:50 +00001345 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1346 re = mri_->reg_end(); ri != re; ) {
Evan Cheng70f68e92008-04-03 16:39:43 +00001347 MachineInstr *MI = &*ri;
Evan Cheng44fccf22008-02-21 00:34:19 +00001348 MachineOperand &O = ri.getOperand();
1349 ++ri;
Evan Cheng8adc74e2008-03-31 07:53:30 +00001350 assert(!O.isImplicit() && "Spilling register that's used as implicit use?");
Evan Cheng44fccf22008-02-21 00:34:19 +00001351 unsigned index = getInstructionIndex(MI);
1352 if (index < start || index >= end)
1353 continue;
Evan Chengc33577b2008-07-12 01:56:02 +00001354 if (O.isUse() && !li.liveAt(getUseIndex(index)))
1355 // Must be defined by an implicit def. It should not be spilled. Note,
1356 // this is for correctness reason. e.g.
1357 // 8 %reg1024<def> = IMPLICIT_DEF
1358 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1359 // The live range [12, 14) are not part of the r1024 live interval since
1360 // it's defined by an implicit def. It will not conflicts with live
1361 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengc3c41c92008-07-12 02:22:07 +00001362 // easily see a situation where both registers are reloaded before
Evan Chengc33577b2008-07-12 01:56:02 +00001363 // the INSERT_SUBREG and both target registers that would overlap.
1364 continue;
Evan Cheng44fccf22008-02-21 00:34:19 +00001365 RewriteMIs.push_back(RewriteInfo(index, MI, O.isUse(), O.isDef()));
1366 }
1367 std::sort(RewriteMIs.begin(), RewriteMIs.end(), RewriteInfoCompare());
1368
Evan Chengc7666af2008-02-23 00:33:04 +00001369 unsigned ImpUse = DefIsReMat ? getReMatImplicitUse(li, ReMatDefMI) : 0;
Evan Cheng44fccf22008-02-21 00:34:19 +00001370 // Now rewrite the defs and uses.
1371 for (unsigned i = 0, e = RewriteMIs.size(); i != e; ) {
1372 RewriteInfo &rwi = RewriteMIs[i];
1373 ++i;
1374 unsigned index = rwi.Index;
1375 bool MIHasUse = rwi.HasUse;
1376 bool MIHasDef = rwi.HasDef;
1377 MachineInstr *MI = rwi.MI;
1378 // If MI def and/or use the same register multiple times, then there
1379 // are multiple entries.
Evan Chengc7666af2008-02-23 00:33:04 +00001380 unsigned NumUses = MIHasUse;
Evan Cheng44fccf22008-02-21 00:34:19 +00001381 while (i != e && RewriteMIs[i].MI == MI) {
1382 assert(RewriteMIs[i].Index == index);
Evan Chengc7666af2008-02-23 00:33:04 +00001383 bool isUse = RewriteMIs[i].HasUse;
1384 if (isUse) ++NumUses;
1385 MIHasUse |= isUse;
Evan Cheng44fccf22008-02-21 00:34:19 +00001386 MIHasDef |= RewriteMIs[i].HasDef;
1387 ++i;
1388 }
Evan Chengcecc8222007-11-17 00:40:40 +00001389 MachineBasicBlock *MBB = MI->getParent();
Evan Chengc7666af2008-02-23 00:33:04 +00001390
Evan Cheng9c48ca72008-05-23 23:00:04 +00001391 if (ImpUse && MI != ReMatDefMI) {
Evan Chengc7666af2008-02-23 00:33:04 +00001392 // Re-matting an instruction with virtual register use. Update the
Evan Cheng8adc74e2008-03-31 07:53:30 +00001393 // register interval's spill weight to HUGE_VALF to prevent it from
1394 // being spilled.
Evan Chengc7666af2008-02-23 00:33:04 +00001395 LiveInterval &ImpLi = getInterval(ImpUse);
Evan Cheng8adc74e2008-03-31 07:53:30 +00001396 ImpLi.weight = HUGE_VALF;
Evan Chengc7666af2008-02-23 00:33:04 +00001397 }
1398
Evan Cheng44fccf22008-02-21 00:34:19 +00001399 unsigned MBBId = MBB->getNumber();
Evan Chengebcba1e2007-12-05 03:22:34 +00001400 unsigned ThisVReg = 0;
Evan Cheng1083a2f2007-12-03 09:58:48 +00001401 if (TrySplit) {
Evan Cheng44fccf22008-02-21 00:34:19 +00001402 std::map<unsigned,unsigned>::const_iterator NVI = MBBVRegsMap.find(MBBId);
Evan Cheng7b632362007-11-29 10:12:14 +00001403 if (NVI != MBBVRegsMap.end()) {
Evan Chengebcba1e2007-12-05 03:22:34 +00001404 ThisVReg = NVI->second;
Evan Cheng7b632362007-11-29 10:12:14 +00001405 // One common case:
1406 // x = use
1407 // ...
1408 // ...
1409 // def = ...
1410 // = use
1411 // It's better to start a new interval to avoid artifically
1412 // extend the new interval.
Evan Cheng7b632362007-11-29 10:12:14 +00001413 if (MIHasDef && !MIHasUse) {
1414 MBBVRegsMap.erase(MBB->getNumber());
Evan Chengebcba1e2007-12-05 03:22:34 +00001415 ThisVReg = 0;
Evan Cheng7b632362007-11-29 10:12:14 +00001416 }
1417 }
Evan Cheng91e32d02007-11-28 01:28:46 +00001418 }
Evan Chengebcba1e2007-12-05 03:22:34 +00001419
1420 bool IsNew = ThisVReg == 0;
1421 if (IsNew) {
1422 // This ends the previous live interval. If all of its def / use
1423 // can be folded, give it a low spill weight.
1424 if (NewVReg && TrySplit && AllCanFold) {
1425 LiveInterval &nI = getOrCreateInterval(NewVReg);
1426 nI.weight /= 10.0F;
1427 }
1428 AllCanFold = true;
1429 }
1430 NewVReg = ThisVReg;
1431
Evan Chengcecc8222007-11-17 00:40:40 +00001432 bool HasDef = false;
1433 bool HasUse = false;
Evan Chenga37ecfe2008-02-22 09:24:50 +00001434 bool CanFold = rewriteInstructionForSpills(li, I->valno, TrySplit,
Evan Chengba221ca2008-06-06 07:54:39 +00001435 index, end, MI, ReMatOrigDefMI, ReMatDefMI,
1436 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
1437 CanDelete, vrm, rc, ReMatIds, loopInfo, NewVReg,
1438 ImpUse, HasDef, HasUse, MBBVRegsMap, NewLIs, SSWeight);
Evan Chengcecc8222007-11-17 00:40:40 +00001439 if (!HasDef && !HasUse)
1440 continue;
1441
Evan Chengebcba1e2007-12-05 03:22:34 +00001442 AllCanFold &= CanFold;
1443
Evan Chengcecc8222007-11-17 00:40:40 +00001444 // Update weight of spill interval.
1445 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng1083a2f2007-12-03 09:58:48 +00001446 if (!TrySplit) {
Evan Chengcecc8222007-11-17 00:40:40 +00001447 // The spill weight is now infinity as it cannot be spilled again.
1448 nI.weight = HUGE_VALF;
Evan Cheng96c61312007-11-29 01:06:25 +00001449 continue;
Evan Chengcecc8222007-11-17 00:40:40 +00001450 }
Evan Cheng96c61312007-11-29 01:06:25 +00001451
1452 // Keep track of the last def and first use in each MBB.
Evan Cheng96c61312007-11-29 01:06:25 +00001453 if (HasDef) {
1454 if (MI != ReMatOrigDefMI || !CanDelete) {
Evan Cheng96c61312007-11-29 01:06:25 +00001455 bool HasKill = false;
1456 if (!HasUse)
1457 HasKill = anyKillInMBBAfterIdx(li, I->valno, MBB, getDefIndex(index));
1458 else {
Evan Cheng7b632362007-11-29 10:12:14 +00001459 // If this is a two-address code, then this index starts a new VNInfo.
Evan Cheng14f8a502008-06-04 09:18:41 +00001460 const VNInfo *VNI = li.findDefinedVNInfo(getDefIndex(index));
Evan Cheng96c61312007-11-29 01:06:25 +00001461 if (VNI)
1462 HasKill = anyKillInMBBAfterIdx(li, VNI, MBB, getDefIndex(index));
1463 }
Evan Cheng999f9472007-12-01 04:42:39 +00001464 std::map<unsigned, std::vector<SRInfo> >::iterator SII =
1465 SpillIdxes.find(MBBId);
Evan Cheng96c61312007-11-29 01:06:25 +00001466 if (!HasKill) {
Evan Cheng7b632362007-11-29 10:12:14 +00001467 if (SII == SpillIdxes.end()) {
1468 std::vector<SRInfo> S;
1469 S.push_back(SRInfo(index, NewVReg, true));
1470 SpillIdxes.insert(std::make_pair(MBBId, S));
1471 } else if (SII->second.back().vreg != NewVReg) {
1472 SII->second.push_back(SRInfo(index, NewVReg, true));
1473 } else if ((int)index > SII->second.back().index) {
Evan Cheng96c61312007-11-29 01:06:25 +00001474 // If there is an earlier def and this is a two-address
1475 // instruction, then it's not possible to fold the store (which
1476 // would also fold the load).
Evan Cheng7b632362007-11-29 10:12:14 +00001477 SRInfo &Info = SII->second.back();
1478 Info.index = index;
1479 Info.canFold = !HasUse;
Evan Cheng96c61312007-11-29 01:06:25 +00001480 }
1481 SpillMBBs.set(MBBId);
Evan Cheng999f9472007-12-01 04:42:39 +00001482 } else if (SII != SpillIdxes.end() &&
1483 SII->second.back().vreg == NewVReg &&
1484 (int)index > SII->second.back().index) {
1485 // There is an earlier def that's not killed (must be two-address).
1486 // The spill is no longer needed.
1487 SII->second.pop_back();
1488 if (SII->second.empty()) {
1489 SpillIdxes.erase(MBBId);
1490 SpillMBBs.reset(MBBId);
1491 }
Evan Cheng96c61312007-11-29 01:06:25 +00001492 }
1493 }
Evan Cheng96c61312007-11-29 01:06:25 +00001494 }
1495
1496 if (HasUse) {
Evan Cheng7b632362007-11-29 10:12:14 +00001497 std::map<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Cheng96c61312007-11-29 01:06:25 +00001498 SpillIdxes.find(MBBId);
Evan Cheng7b632362007-11-29 10:12:14 +00001499 if (SII != SpillIdxes.end() &&
1500 SII->second.back().vreg == NewVReg &&
1501 (int)index > SII->second.back().index)
Evan Cheng96c61312007-11-29 01:06:25 +00001502 // Use(s) following the last def, it's not safe to fold the spill.
Evan Cheng7b632362007-11-29 10:12:14 +00001503 SII->second.back().canFold = false;
1504 std::map<unsigned, std::vector<SRInfo> >::iterator RII =
Evan Cheng96c61312007-11-29 01:06:25 +00001505 RestoreIdxes.find(MBBId);
Evan Cheng7b632362007-11-29 10:12:14 +00001506 if (RII != RestoreIdxes.end() && RII->second.back().vreg == NewVReg)
Evan Cheng96c61312007-11-29 01:06:25 +00001507 // If we are splitting live intervals, only fold if it's the first
1508 // use and there isn't another use later in the MBB.
Evan Cheng7b632362007-11-29 10:12:14 +00001509 RII->second.back().canFold = false;
Evan Cheng96c61312007-11-29 01:06:25 +00001510 else if (IsNew) {
1511 // Only need a reload if there isn't an earlier def / use.
Evan Cheng7b632362007-11-29 10:12:14 +00001512 if (RII == RestoreIdxes.end()) {
1513 std::vector<SRInfo> Infos;
1514 Infos.push_back(SRInfo(index, NewVReg, true));
1515 RestoreIdxes.insert(std::make_pair(MBBId, Infos));
1516 } else {
1517 RII->second.push_back(SRInfo(index, NewVReg, true));
1518 }
Evan Cheng96c61312007-11-29 01:06:25 +00001519 RestoreMBBs.set(MBBId);
1520 }
1521 }
1522
1523 // Update spill weight.
Evan Cheng26d17df2007-12-11 02:09:15 +00001524 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Chengf7b45f42008-06-21 06:45:54 +00001525 nI.weight += getSpillWeight(HasDef, HasUse, loopDepth);
Evan Cheng9b741602007-11-12 06:35:08 +00001526 }
Evan Chengebcba1e2007-12-05 03:22:34 +00001527
1528 if (NewVReg && TrySplit && AllCanFold) {
1529 // If all of its def / use can be folded, give it a low spill weight.
1530 LiveInterval &nI = getOrCreateInterval(NewVReg);
1531 nI.weight /= 10.0F;
1532 }
Evan Cheng9b741602007-11-12 06:35:08 +00001533}
1534
Evan Cheng7b632362007-11-29 10:12:14 +00001535bool LiveIntervals::alsoFoldARestore(int Id, int index, unsigned vr,
1536 BitVector &RestoreMBBs,
1537 std::map<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
1538 if (!RestoreMBBs[Id])
1539 return false;
1540 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1541 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1542 if (Restores[i].index == index &&
1543 Restores[i].vreg == vr &&
1544 Restores[i].canFold)
1545 return true;
1546 return false;
1547}
1548
1549void LiveIntervals::eraseRestoreInfo(int Id, int index, unsigned vr,
1550 BitVector &RestoreMBBs,
1551 std::map<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
1552 if (!RestoreMBBs[Id])
1553 return;
1554 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1555 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1556 if (Restores[i].index == index && Restores[i].vreg)
1557 Restores[i].index = -1;
1558}
Evan Chengcecc8222007-11-17 00:40:40 +00001559
Evan Cheng7b88cbc2008-04-11 17:53:36 +00001560/// handleSpilledImpDefs - Remove IMPLICIT_DEF instructions which are being
1561/// spilled and create empty intervals for their uses.
1562void
1563LiveIntervals::handleSpilledImpDefs(const LiveInterval &li, VirtRegMap &vrm,
1564 const TargetRegisterClass* rc,
1565 std::vector<LiveInterval*> &NewLIs) {
Evan Cheng70f68e92008-04-03 16:39:43 +00001566 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1567 re = mri_->reg_end(); ri != re; ) {
Evan Cheng7b88cbc2008-04-11 17:53:36 +00001568 MachineOperand &O = ri.getOperand();
Evan Cheng70f68e92008-04-03 16:39:43 +00001569 MachineInstr *MI = &*ri;
1570 ++ri;
Evan Cheng7b88cbc2008-04-11 17:53:36 +00001571 if (O.isDef()) {
1572 assert(MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF &&
1573 "Register def was not rewritten?");
1574 RemoveMachineInstrFromMaps(MI);
1575 vrm.RemoveMachineInstrFromMaps(MI);
1576 MI->eraseFromParent();
1577 } else {
1578 // This must be an use of an implicit_def so it's not part of the live
1579 // interval. Create a new empty live interval for it.
1580 // FIXME: Can we simply erase some of the instructions? e.g. Stores?
1581 unsigned NewVReg = mri_->createVirtualRegister(rc);
1582 vrm.grow();
1583 vrm.setIsImplicitlyDefined(NewVReg);
1584 NewLIs.push_back(&getOrCreateInterval(NewVReg));
1585 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1586 MachineOperand &MO = MI->getOperand(i);
1587 if (MO.isReg() && MO.getReg() == li.reg)
1588 MO.setReg(NewVReg);
1589 }
1590 }
Evan Cheng70f68e92008-04-03 16:39:43 +00001591 }
1592}
1593
Evan Chengcecc8222007-11-17 00:40:40 +00001594
Evan Cheng9b741602007-11-12 06:35:08 +00001595std::vector<LiveInterval*> LiveIntervals::
Evan Chengcecc8222007-11-17 00:40:40 +00001596addIntervalsForSpills(const LiveInterval &li,
Evan Chengba221ca2008-06-06 07:54:39 +00001597 const MachineLoopInfo *loopInfo, VirtRegMap &vrm,
1598 float &SSWeight) {
Evan Cheng9b741602007-11-12 06:35:08 +00001599 assert(li.weight != HUGE_VALF &&
1600 "attempt to spill already spilled interval!");
1601
1602 DOUT << "\t\t\t\tadding intervals for spills for interval: ";
Dan Gohman1e57df32008-02-10 18:45:23 +00001603 li.print(DOUT, tri_);
Evan Cheng9b741602007-11-12 06:35:08 +00001604 DOUT << '\n';
1605
Evan Chengba221ca2008-06-06 07:54:39 +00001606 // Spill slot weight.
1607 SSWeight = 0.0f;
1608
Evan Chengcecc8222007-11-17 00:40:40 +00001609 // Each bit specify whether it a spill is required in the MBB.
1610 BitVector SpillMBBs(mf_->getNumBlockIDs());
Evan Cheng7b632362007-11-29 10:12:14 +00001611 std::map<unsigned, std::vector<SRInfo> > SpillIdxes;
Evan Cheng96c61312007-11-29 01:06:25 +00001612 BitVector RestoreMBBs(mf_->getNumBlockIDs());
Evan Cheng7b632362007-11-29 10:12:14 +00001613 std::map<unsigned, std::vector<SRInfo> > RestoreIdxes;
1614 std::map<unsigned,unsigned> MBBVRegsMap;
Evan Cheng9b741602007-11-12 06:35:08 +00001615 std::vector<LiveInterval*> NewLIs;
Evan Chenga37ecfe2008-02-22 09:24:50 +00001616 const TargetRegisterClass* rc = mri_->getRegClass(li.reg);
Evan Cheng9b741602007-11-12 06:35:08 +00001617
1618 unsigned NumValNums = li.getNumValNums();
1619 SmallVector<MachineInstr*, 4> ReMatDefs;
1620 ReMatDefs.resize(NumValNums, NULL);
1621 SmallVector<MachineInstr*, 4> ReMatOrigDefs;
1622 ReMatOrigDefs.resize(NumValNums, NULL);
1623 SmallVector<int, 4> ReMatIds;
1624 ReMatIds.resize(NumValNums, VirtRegMap::MAX_STACK_SLOT);
1625 BitVector ReMatDelete(NumValNums);
1626 unsigned Slot = VirtRegMap::MAX_STACK_SLOT;
1627
Evan Chengcecc8222007-11-17 00:40:40 +00001628 // Spilling a split live interval. It cannot be split any further. Also,
1629 // it's also guaranteed to be a single val# / range interval.
1630 if (vrm.getPreSplitReg(li.reg)) {
1631 vrm.setIsSplitFromReg(li.reg, 0);
Evan Chengd9731042007-12-05 10:24:35 +00001632 // Unset the split kill marker on the last use.
1633 unsigned KillIdx = vrm.getKillPoint(li.reg);
1634 if (KillIdx) {
1635 MachineInstr *KillMI = getInstructionFromIndex(KillIdx);
1636 assert(KillMI && "Last use disappeared?");
1637 int KillOp = KillMI->findRegisterUseOperandIdx(li.reg, true);
1638 assert(KillOp != -1 && "Last use disappeared?");
Chris Lattner7f2d3b82007-12-30 21:56:09 +00001639 KillMI->getOperand(KillOp).setIsKill(false);
Evan Chengd9731042007-12-05 10:24:35 +00001640 }
Evan Cheng6f522672007-12-05 09:51:10 +00001641 vrm.removeKillPoint(li.reg);
Evan Chengcecc8222007-11-17 00:40:40 +00001642 bool DefIsReMat = vrm.isReMaterialized(li.reg);
1643 Slot = vrm.getStackSlot(li.reg);
1644 assert(Slot != VirtRegMap::MAX_STACK_SLOT);
1645 MachineInstr *ReMatDefMI = DefIsReMat ?
1646 vrm.getReMaterializedMI(li.reg) : NULL;
1647 int LdSlot = 0;
1648 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
1649 bool isLoad = isLoadSS ||
Chris Lattner5b930372008-01-07 07:27:27 +00001650 (DefIsReMat && (ReMatDefMI->getDesc().isSimpleLoad()));
Evan Chengcecc8222007-11-17 00:40:40 +00001651 bool IsFirstRange = true;
1652 for (LiveInterval::Ranges::const_iterator
1653 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1654 // If this is a split live interval with multiple ranges, it means there
1655 // are two-address instructions that re-defined the value. Only the
1656 // first def can be rematerialized!
1657 if (IsFirstRange) {
Evan Cheng35d47762007-11-29 23:02:50 +00001658 // Note ReMatOrigDefMI has already been deleted.
Evan Chengcecc8222007-11-17 00:40:40 +00001659 rewriteInstructionsForSpills(li, false, I, NULL, ReMatDefMI,
1660 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chenga37ecfe2008-02-22 09:24:50 +00001661 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng96c61312007-11-29 01:06:25 +00001662 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Chengba221ca2008-06-06 07:54:39 +00001663 MBBVRegsMap, NewLIs, SSWeight);
Evan Chengcecc8222007-11-17 00:40:40 +00001664 } else {
1665 rewriteInstructionsForSpills(li, false, I, NULL, 0,
1666 Slot, 0, false, false, false,
Evan Chenga37ecfe2008-02-22 09:24:50 +00001667 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng96c61312007-11-29 01:06:25 +00001668 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Chengba221ca2008-06-06 07:54:39 +00001669 MBBVRegsMap, NewLIs, SSWeight);
Evan Chengcecc8222007-11-17 00:40:40 +00001670 }
1671 IsFirstRange = false;
1672 }
Evan Cheng70f68e92008-04-03 16:39:43 +00001673
Evan Chengba221ca2008-06-06 07:54:39 +00001674 SSWeight = 0.0f; // Already accounted for when split.
Evan Cheng7b88cbc2008-04-11 17:53:36 +00001675 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Chengcecc8222007-11-17 00:40:40 +00001676 return NewLIs;
1677 }
1678
1679 bool TrySplit = SplitAtBB && !intervalIsInOneMBB(li);
Evan Cheng96c61312007-11-29 01:06:25 +00001680 if (SplitLimit != -1 && (int)numSplits >= SplitLimit)
1681 TrySplit = false;
1682 if (TrySplit)
1683 ++numSplits;
Evan Cheng9b741602007-11-12 06:35:08 +00001684 bool NeedStackSlot = false;
1685 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
1686 i != e; ++i) {
1687 const VNInfo *VNI = *i;
1688 unsigned VN = VNI->id;
1689 unsigned DefIdx = VNI->def;
1690 if (DefIdx == ~1U)
1691 continue; // Dead val#.
1692 // Is the def for the val# rematerializable?
Evan Chengcecc8222007-11-17 00:40:40 +00001693 MachineInstr *ReMatDefMI = (DefIdx == ~0u)
1694 ? 0 : getInstructionFromIndex(DefIdx);
Evan Chenge81fdb92007-12-06 00:01:56 +00001695 bool dummy;
1696 if (ReMatDefMI && isReMaterializable(li, VNI, ReMatDefMI, dummy)) {
Evan Cheng9b741602007-11-12 06:35:08 +00001697 // Remember how to remat the def of this val#.
Evan Chengcecc8222007-11-17 00:40:40 +00001698 ReMatOrigDefs[VN] = ReMatDefMI;
Dan Gohman8b3b5172008-07-17 23:49:46 +00001699 // Original def may be modified so we have to make a copy here.
Evan Cheng4ce1a522008-07-19 00:37:25 +00001700 MachineInstr *Clone = mf_->CloneMachineInstr(ReMatDefMI);
1701 ClonedMIs.push_back(Clone);
1702 ReMatDefs[VN] = Clone;
Evan Cheng9b741602007-11-12 06:35:08 +00001703
1704 bool CanDelete = true;
Evan Cheng8b70e632007-11-29 09:49:23 +00001705 if (VNI->hasPHIKill) {
1706 // A kill is a phi node, not all of its uses can be rematerialized.
Evan Cheng9b741602007-11-12 06:35:08 +00001707 // It must not be deleted.
Evan Cheng8b70e632007-11-29 09:49:23 +00001708 CanDelete = false;
1709 // Need a stack slot if there is any live range where uses cannot be
1710 // rematerialized.
1711 NeedStackSlot = true;
Evan Cheng9b741602007-11-12 06:35:08 +00001712 }
Evan Cheng9b741602007-11-12 06:35:08 +00001713 if (CanDelete)
1714 ReMatDelete.set(VN);
1715 } else {
1716 // Need a stack slot if there is any live range where uses cannot be
1717 // rematerialized.
1718 NeedStackSlot = true;
1719 }
1720 }
1721
1722 // One stack slot per live interval.
Evan Chengcecc8222007-11-17 00:40:40 +00001723 if (NeedStackSlot && vrm.getPreSplitReg(li.reg) == 0)
Evan Cheng9b741602007-11-12 06:35:08 +00001724 Slot = vrm.assignVirt2StackSlot(li.reg);
1725
1726 // Create new intervals and rewrite defs and uses.
1727 for (LiveInterval::Ranges::const_iterator
1728 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
Evan Chengcecc8222007-11-17 00:40:40 +00001729 MachineInstr *ReMatDefMI = ReMatDefs[I->valno->id];
1730 MachineInstr *ReMatOrigDefMI = ReMatOrigDefs[I->valno->id];
1731 bool DefIsReMat = ReMatDefMI != NULL;
Evan Cheng9b741602007-11-12 06:35:08 +00001732 bool CanDelete = ReMatDelete[I->valno->id];
1733 int LdSlot = 0;
Evan Chengcecc8222007-11-17 00:40:40 +00001734 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
Evan Cheng9b741602007-11-12 06:35:08 +00001735 bool isLoad = isLoadSS ||
Chris Lattner5b930372008-01-07 07:27:27 +00001736 (DefIsReMat && ReMatDefMI->getDesc().isSimpleLoad());
Evan Chengcecc8222007-11-17 00:40:40 +00001737 rewriteInstructionsForSpills(li, TrySplit, I, ReMatOrigDefMI, ReMatDefMI,
Evan Cheng96c61312007-11-29 01:06:25 +00001738 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chenga37ecfe2008-02-22 09:24:50 +00001739 CanDelete, vrm, rc, ReMatIds, loopInfo,
Evan Cheng96c61312007-11-29 01:06:25 +00001740 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Chengba221ca2008-06-06 07:54:39 +00001741 MBBVRegsMap, NewLIs, SSWeight);
Evan Cheng9b741602007-11-12 06:35:08 +00001742 }
1743
Evan Cheng96c61312007-11-29 01:06:25 +00001744 // Insert spills / restores if we are splitting.
Evan Cheng70f68e92008-04-03 16:39:43 +00001745 if (!TrySplit) {
Evan Cheng7b88cbc2008-04-11 17:53:36 +00001746 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng7b632362007-11-29 10:12:14 +00001747 return NewLIs;
Evan Cheng70f68e92008-04-03 16:39:43 +00001748 }
Evan Cheng7b632362007-11-29 10:12:14 +00001749
Evan Chenged17a892007-12-05 08:16:32 +00001750 SmallPtrSet<LiveInterval*, 4> AddedKill;
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001751 SmallVector<unsigned, 2> Ops;
Evan Cheng7b632362007-11-29 10:12:14 +00001752 if (NeedStackSlot) {
1753 int Id = SpillMBBs.find_first();
1754 while (Id != -1) {
Evan Chengba221ca2008-06-06 07:54:39 +00001755 MachineBasicBlock *MBB = mf_->getBlockNumbered(Id);
1756 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Cheng7b632362007-11-29 10:12:14 +00001757 std::vector<SRInfo> &spills = SpillIdxes[Id];
1758 for (unsigned i = 0, e = spills.size(); i != e; ++i) {
1759 int index = spills[i].index;
1760 unsigned VReg = spills[i].vreg;
Evan Cheng3a46f222007-12-04 00:32:23 +00001761 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng96c61312007-11-29 01:06:25 +00001762 bool isReMat = vrm.isReMaterialized(VReg);
1763 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001764 bool CanFold = false;
1765 bool FoundUse = false;
1766 Ops.clear();
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001767 if (spills[i].canFold) {
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001768 CanFold = true;
Evan Cheng96c61312007-11-29 01:06:25 +00001769 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
1770 MachineOperand &MO = MI->getOperand(j);
1771 if (!MO.isRegister() || MO.getReg() != VReg)
1772 continue;
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001773
1774 Ops.push_back(j);
1775 if (MO.isDef())
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001776 continue;
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001777 if (isReMat ||
1778 (!FoundUse && !alsoFoldARestore(Id, index, VReg,
1779 RestoreMBBs, RestoreIdxes))) {
1780 // MI has two-address uses of the same register. If the use
1781 // isn't the first and only use in the BB, then we can't fold
1782 // it. FIXME: Move this to rewriteInstructionsForSpills.
1783 CanFold = false;
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001784 break;
1785 }
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001786 FoundUse = true;
Evan Cheng96c61312007-11-29 01:06:25 +00001787 }
1788 }
1789 // Fold the store into the def if possible.
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001790 bool Folded = false;
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001791 if (CanFold && !Ops.empty()) {
1792 if (tryFoldMemoryOperand(MI, vrm, NULL, index, Ops, true, Slot,VReg)){
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001793 Folded = true;
Evan Cheng550092f2007-12-05 09:05:34 +00001794 if (FoundUse > 0) {
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001795 // Also folded uses, do not issue a load.
1796 eraseRestoreInfo(Id, index, VReg, RestoreMBBs, RestoreIdxes);
Evan Cheng550092f2007-12-05 09:05:34 +00001797 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
1798 }
Evan Cheng3a46f222007-12-04 00:32:23 +00001799 nI.removeRange(getDefIndex(index), getStoreIndex(index));
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001800 }
Evan Cheng96c61312007-11-29 01:06:25 +00001801 }
1802
Evan Cheng909ab8b42008-04-09 20:57:25 +00001803 // Otherwise tell the spiller to issue a spill.
Evan Chenged17a892007-12-05 08:16:32 +00001804 if (!Folded) {
1805 LiveRange *LR = &nI.ranges[nI.ranges.size()-1];
1806 bool isKill = LR->end == getStoreIndex(index);
Evan Cheng87dc7692008-05-20 08:10:37 +00001807 if (!MI->registerDefIsDead(nI.reg))
1808 // No need to spill a dead def.
1809 vrm.addSpillPoint(VReg, isKill, MI);
Evan Chenged17a892007-12-05 08:16:32 +00001810 if (isKill)
1811 AddedKill.insert(&nI);
1812 }
Evan Chengba221ca2008-06-06 07:54:39 +00001813
1814 // Update spill slot weight.
1815 if (!isReMat)
Evan Chengf7b45f42008-06-21 06:45:54 +00001816 SSWeight += getSpillWeight(true, false, loopDepth);
Evan Cheng96c61312007-11-29 01:06:25 +00001817 }
Evan Cheng7b632362007-11-29 10:12:14 +00001818 Id = SpillMBBs.find_next(Id);
Evan Cheng96c61312007-11-29 01:06:25 +00001819 }
Evan Cheng7b632362007-11-29 10:12:14 +00001820 }
Evan Cheng96c61312007-11-29 01:06:25 +00001821
Evan Cheng7b632362007-11-29 10:12:14 +00001822 int Id = RestoreMBBs.find_first();
1823 while (Id != -1) {
Evan Chengba221ca2008-06-06 07:54:39 +00001824 MachineBasicBlock *MBB = mf_->getBlockNumbered(Id);
1825 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
1826
Evan Cheng7b632362007-11-29 10:12:14 +00001827 std::vector<SRInfo> &restores = RestoreIdxes[Id];
1828 for (unsigned i = 0, e = restores.size(); i != e; ++i) {
1829 int index = restores[i].index;
1830 if (index == -1)
1831 continue;
1832 unsigned VReg = restores[i].vreg;
Evan Cheng3a46f222007-12-04 00:32:23 +00001833 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Chengba221ca2008-06-06 07:54:39 +00001834 bool isReMat = vrm.isReMaterialized(VReg);
Evan Chengcecc8222007-11-17 00:40:40 +00001835 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001836 bool CanFold = false;
1837 Ops.clear();
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001838 if (restores[i].canFold) {
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001839 CanFold = true;
Evan Chengcecc8222007-11-17 00:40:40 +00001840 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
1841 MachineOperand &MO = MI->getOperand(j);
1842 if (!MO.isRegister() || MO.getReg() != VReg)
1843 continue;
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001844
Evan Cheng96c61312007-11-29 01:06:25 +00001845 if (MO.isDef()) {
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001846 // If this restore were to be folded, it would have been folded
1847 // already.
1848 CanFold = false;
Evan Chengcecc8222007-11-17 00:40:40 +00001849 break;
1850 }
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001851 Ops.push_back(j);
Evan Chengcecc8222007-11-17 00:40:40 +00001852 }
1853 }
Evan Cheng96c61312007-11-29 01:06:25 +00001854
1855 // Fold the load into the use if possible.
Evan Cheng16ebb8c2007-11-30 21:23:43 +00001856 bool Folded = false;
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001857 if (CanFold && !Ops.empty()) {
Evan Chengba221ca2008-06-06 07:54:39 +00001858 if (!isReMat)
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001859 Folded = tryFoldMemoryOperand(MI, vrm, NULL,index,Ops,true,Slot,VReg);
1860 else {
Evan Cheng96c61312007-11-29 01:06:25 +00001861 MachineInstr *ReMatDefMI = vrm.getReMaterializedMI(VReg);
1862 int LdSlot = 0;
1863 bool isLoadSS = tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
1864 // If the rematerializable def is a load, also try to fold it.
Chris Lattner5b930372008-01-07 07:27:27 +00001865 if (isLoadSS || ReMatDefMI->getDesc().isSimpleLoad())
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001866 Folded = tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
1867 Ops, isLoadSS, LdSlot, VReg);
Evan Chenga37ecfe2008-02-22 09:24:50 +00001868 unsigned ImpUse = getReMatImplicitUse(li, ReMatDefMI);
1869 if (ImpUse) {
1870 // Re-matting an instruction with virtual register use. Add the
1871 // register as an implicit use on the use MI and update the register
Evan Cheng8adc74e2008-03-31 07:53:30 +00001872 // interval's spill weight to HUGE_VALF to prevent it from being
1873 // spilled.
Evan Chenga37ecfe2008-02-22 09:24:50 +00001874 LiveInterval &ImpLi = getInterval(ImpUse);
Evan Cheng8adc74e2008-03-31 07:53:30 +00001875 ImpLi.weight = HUGE_VALF;
Evan Chenga37ecfe2008-02-22 09:24:50 +00001876 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
1877 }
Evan Chengfd0bd3c2007-12-02 08:30:39 +00001878 }
Evan Cheng96c61312007-11-29 01:06:25 +00001879 }
1880 // If folding is not possible / failed, then tell the spiller to issue a
1881 // load / rematerialization for us.
Evan Cheng3a46f222007-12-04 00:32:23 +00001882 if (Folded)
1883 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
Evan Chenged17a892007-12-05 08:16:32 +00001884 else
Evan Cheng96c61312007-11-29 01:06:25 +00001885 vrm.addRestorePoint(VReg, MI);
Evan Chengba221ca2008-06-06 07:54:39 +00001886
1887 // Update spill slot weight.
1888 if (!isReMat)
Evan Chengf7b45f42008-06-21 06:45:54 +00001889 SSWeight += getSpillWeight(false, true, loopDepth);
Evan Chengcecc8222007-11-17 00:40:40 +00001890 }
Evan Cheng7b632362007-11-29 10:12:14 +00001891 Id = RestoreMBBs.find_next(Id);
Evan Chengcecc8222007-11-17 00:40:40 +00001892 }
1893
Evan Chenged17a892007-12-05 08:16:32 +00001894 // Finalize intervals: add kills, finalize spill weights, and filter out
1895 // dead intervals.
Evan Cheng3a46f222007-12-04 00:32:23 +00001896 std::vector<LiveInterval*> RetNewLIs;
1897 for (unsigned i = 0, e = NewLIs.size(); i != e; ++i) {
1898 LiveInterval *LI = NewLIs[i];
1899 if (!LI->empty()) {
Owen Anderson50ca1a22008-07-23 19:47:27 +00001900 LI->weight /= InstrSlots::NUM * getApproximateInstructionCount(*LI);
Evan Chenged17a892007-12-05 08:16:32 +00001901 if (!AddedKill.count(LI)) {
1902 LiveRange *LR = &LI->ranges[LI->ranges.size()-1];
Evan Chengd9731042007-12-05 10:24:35 +00001903 unsigned LastUseIdx = getBaseIndex(LR->end);
1904 MachineInstr *LastUse = getInstructionFromIndex(LastUseIdx);
Evan Chengc7daf1f2008-03-05 00:59:57 +00001905 int UseIdx = LastUse->findRegisterUseOperandIdx(LI->reg, false);
Evan Chenged17a892007-12-05 08:16:32 +00001906 assert(UseIdx != -1);
Evan Chenga37ecfe2008-02-22 09:24:50 +00001907 if (LastUse->getOperand(UseIdx).isImplicit() ||
1908 LastUse->getDesc().getOperandConstraint(UseIdx,TOI::TIED_TO) == -1){
Evan Chenged17a892007-12-05 08:16:32 +00001909 LastUse->getOperand(UseIdx).setIsKill();
Evan Chengd9731042007-12-05 10:24:35 +00001910 vrm.addKillPoint(LI->reg, LastUseIdx);
Evan Cheng6f522672007-12-05 09:51:10 +00001911 }
Evan Chenged17a892007-12-05 08:16:32 +00001912 }
Evan Cheng3a46f222007-12-04 00:32:23 +00001913 RetNewLIs.push_back(LI);
1914 }
1915 }
Evan Chengcecc8222007-11-17 00:40:40 +00001916
Evan Cheng7b88cbc2008-04-11 17:53:36 +00001917 handleSpilledImpDefs(li, vrm, rc, RetNewLIs);
Evan Cheng3a46f222007-12-04 00:32:23 +00001918 return RetNewLIs;
Evan Cheng9b741602007-11-12 06:35:08 +00001919}
Evan Cheng14cc83f2008-03-11 07:19:34 +00001920
1921/// hasAllocatableSuperReg - Return true if the specified physical register has
1922/// any super register that's allocatable.
1923bool LiveIntervals::hasAllocatableSuperReg(unsigned Reg) const {
1924 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS)
1925 if (allocatableRegs_[*AS] && hasInterval(*AS))
1926 return true;
1927 return false;
1928}
1929
1930/// getRepresentativeReg - Find the largest super register of the specified
1931/// physical register.
1932unsigned LiveIntervals::getRepresentativeReg(unsigned Reg) const {
1933 // Find the largest super-register that is allocatable.
1934 unsigned BestReg = Reg;
1935 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS) {
1936 unsigned SuperReg = *AS;
1937 if (!hasAllocatableSuperReg(SuperReg) && hasInterval(SuperReg)) {
1938 BestReg = SuperReg;
1939 break;
1940 }
1941 }
1942 return BestReg;
1943}
1944
1945/// getNumConflictsWithPhysReg - Return the number of uses and defs of the
1946/// specified interval that conflicts with the specified physical register.
1947unsigned LiveIntervals::getNumConflictsWithPhysReg(const LiveInterval &li,
1948 unsigned PhysReg) const {
1949 unsigned NumConflicts = 0;
1950 const LiveInterval &pli = getInterval(getRepresentativeReg(PhysReg));
1951 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
1952 E = mri_->reg_end(); I != E; ++I) {
1953 MachineOperand &O = I.getOperand();
1954 MachineInstr *MI = O.getParent();
1955 unsigned Index = getInstructionIndex(MI);
1956 if (pli.liveAt(Index))
1957 ++NumConflicts;
1958 }
1959 return NumConflicts;
1960}
1961
1962/// spillPhysRegAroundRegDefsUses - Spill the specified physical register
1963/// around all defs and uses of the specified interval.
1964void LiveIntervals::spillPhysRegAroundRegDefsUses(const LiveInterval &li,
1965 unsigned PhysReg, VirtRegMap &vrm) {
1966 unsigned SpillReg = getRepresentativeReg(PhysReg);
1967
1968 for (const unsigned *AS = tri_->getAliasSet(PhysReg); *AS; ++AS)
1969 // If there are registers which alias PhysReg, but which are not a
1970 // sub-register of the chosen representative super register. Assert
1971 // since we can't handle it yet.
1972 assert(*AS == SpillReg || !allocatableRegs_[*AS] ||
1973 tri_->isSuperRegister(*AS, SpillReg));
1974
1975 LiveInterval &pli = getInterval(SpillReg);
1976 SmallPtrSet<MachineInstr*, 8> SeenMIs;
1977 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
1978 E = mri_->reg_end(); I != E; ++I) {
1979 MachineOperand &O = I.getOperand();
1980 MachineInstr *MI = O.getParent();
1981 if (SeenMIs.count(MI))
1982 continue;
1983 SeenMIs.insert(MI);
1984 unsigned Index = getInstructionIndex(MI);
1985 if (pli.liveAt(Index)) {
1986 vrm.addEmergencySpill(SpillReg, MI);
1987 pli.removeRange(getLoadIndex(Index), getStoreIndex(Index)+1);
1988 for (const unsigned* AS = tri_->getSubRegisters(SpillReg); *AS; ++AS) {
1989 if (!hasInterval(*AS))
1990 continue;
1991 LiveInterval &spli = getInterval(*AS);
1992 if (spli.liveAt(Index))
1993 spli.removeRange(getLoadIndex(Index), getStoreIndex(Index)+1);
1994 }
1995 }
1996 }
1997}
Owen Anderson7399f222008-06-05 17:15:43 +00001998
1999LiveRange LiveIntervals::addLiveRangeToEndOfBlock(unsigned reg,
2000 MachineInstr* startInst) {
2001 LiveInterval& Interval = getOrCreateInterval(reg);
2002 VNInfo* VN = Interval.getNextValue(
2003 getInstructionIndex(startInst) + InstrSlots::DEF,
2004 startInst, getVNInfoAllocator());
2005 VN->hasPHIKill = true;
2006 VN->kills.push_back(getMBBEndIdx(startInst->getParent()));
2007 LiveRange LR(getInstructionIndex(startInst) + InstrSlots::DEF,
2008 getMBBEndIdx(startInst->getParent()) + 1, VN);
2009 Interval.addRange(LR);
2010
2011 return LR;
2012}