blob: d027a81e84412087671a7ab1850439666cabb566 [file] [log] [blame]
Chris Lattnera3b8b5c2004-07-23 17:56:30 +00001//===-- LiveIntervalAnalysis.cpp - Live Interval Analysis -----------------===//
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the LiveInterval analysis pass which is used
11// by the Linear Scan Register allocator. This pass linearizes the
12// basic blocks of the function in DFS order and uses the
13// LiveVariables pass to conservatively compute live intervals for
14// each virtual and physical register.
15//
16//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "liveintervals"
Chris Lattner3c3fe462005-09-21 04:19:09 +000019#include "llvm/CodeGen/LiveIntervalAnalysis.h"
Misha Brukman08a6c762004-09-03 18:25:53 +000020#include "VirtRegMap.h"
Chris Lattner015959e2004-05-01 21:24:39 +000021#include "llvm/Value.h"
Dan Gohman6d69ba82008-07-25 00:02:30 +000022#include "llvm/Analysis/AliasAnalysis.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000023#include "llvm/CodeGen/LiveVariables.h"
24#include "llvm/CodeGen/MachineFrameInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000025#include "llvm/CodeGen/MachineInstr.h"
Evan Cheng22f07ff2007-12-11 02:09:15 +000026#include "llvm/CodeGen/MachineLoopInfo.h"
Chris Lattner84bc5422007-12-31 04:13:23 +000027#include "llvm/CodeGen/MachineRegisterInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000028#include "llvm/CodeGen/Passes.h"
Dan Gohman6d69ba82008-07-25 00:02:30 +000029#include "llvm/CodeGen/PseudoSourceValue.h"
Dan Gohman6f0d0242008-02-10 18:45:23 +000030#include "llvm/Target/TargetRegisterInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000031#include "llvm/Target/TargetInstrInfo.h"
32#include "llvm/Target/TargetMachine.h"
Reid Spencer551ccae2004-09-01 22:55:40 +000033#include "llvm/Support/CommandLine.h"
34#include "llvm/Support/Debug.h"
35#include "llvm/ADT/Statistic.h"
36#include "llvm/ADT/STLExtras.h"
Alkis Evlogimenos20aa4742004-09-03 18:19:51 +000037#include <algorithm>
Jeff Cohen97af7512006-12-02 02:22:01 +000038#include <cmath>
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000039using namespace llvm;
40
Dan Gohman844731a2008-05-13 00:00:25 +000041// Hidden options for help debugging.
42static cl::opt<bool> DisableReMat("disable-rematerialization",
43 cl::init(false), cl::Hidden);
Evan Cheng81a03822007-11-17 00:40:40 +000044
Dan Gohman844731a2008-05-13 00:00:25 +000045static cl::opt<bool> SplitAtBB("split-intervals-at-bb",
46 cl::init(true), cl::Hidden);
47static cl::opt<int> SplitLimit("split-limit",
48 cl::init(-1), cl::Hidden);
Evan Chengbc165e42007-08-16 07:24:22 +000049
Dan Gohman4c8f8702008-07-25 15:08:37 +000050static cl::opt<bool> EnableAggressiveRemat("aggressive-remat", cl::Hidden);
51
Chris Lattnercd3245a2006-12-19 22:41:21 +000052STATISTIC(numIntervals, "Number of original intervals");
53STATISTIC(numIntervalsAfter, "Number of intervals after coalescing");
Evan Cheng0cbb1162007-11-29 01:06:25 +000054STATISTIC(numFolds , "Number of loads/stores folded into instructions");
55STATISTIC(numSplits , "Number of intervals split");
Chris Lattnercd3245a2006-12-19 22:41:21 +000056
Devang Patel19974732007-05-03 01:11:54 +000057char LiveIntervals::ID = 0;
Dan Gohman844731a2008-05-13 00:00:25 +000058static RegisterPass<LiveIntervals> X("liveintervals", "Live Interval Analysis");
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000059
Chris Lattnerf7da2c72006-08-24 22:43:55 +000060void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
Dan Gohman6d69ba82008-07-25 00:02:30 +000061 AU.addRequired<AliasAnalysis>();
62 AU.addPreserved<AliasAnalysis>();
David Greene25133302007-06-08 17:18:56 +000063 AU.addPreserved<LiveVariables>();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000064 AU.addRequired<LiveVariables>();
Bill Wendling67d65bb2008-01-04 20:54:55 +000065 AU.addPreservedID(MachineLoopInfoID);
66 AU.addPreservedID(MachineDominatorsID);
Owen Andersonaa111082008-08-06 20:58:38 +000067 AU.addPreservedID(PHIEliminationID);
68 AU.addRequiredID(PHIEliminationID);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000069 AU.addRequiredID(TwoAddressInstructionPassID);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000070 MachineFunctionPass::getAnalysisUsage(AU);
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000071}
72
Chris Lattnerf7da2c72006-08-24 22:43:55 +000073void LiveIntervals::releaseMemory() {
Owen Anderson03857b22008-08-13 21:49:13 +000074 // Free the live intervals themselves.
Owen Anderson20e28392008-08-13 22:08:30 +000075 for (DenseMap<unsigned, LiveInterval*>::iterator I = r2iMap_.begin(),
Owen Anderson03857b22008-08-13 21:49:13 +000076 E = r2iMap_.end(); I != E; ++I)
77 delete I->second;
78
Evan Cheng3f32d652008-06-04 09:18:41 +000079 MBB2IdxMap.clear();
Evan Cheng4ca980e2007-10-17 02:10:22 +000080 Idx2MBBMap.clear();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000081 mi2iMap_.clear();
82 i2miMap_.clear();
83 r2iMap_.clear();
Evan Chengdd199d22007-09-06 01:07:24 +000084 // Release VNInfo memroy regions after all VNInfo objects are dtor'd.
85 VNInfoAllocator.Reset();
Evan Cheng1ed99222008-07-19 00:37:25 +000086 while (!ClonedMIs.empty()) {
87 MachineInstr *MI = ClonedMIs.back();
88 ClonedMIs.pop_back();
89 mf_->DeleteMachineInstr(MI);
90 }
Alkis Evlogimenos08cec002004-01-31 19:59:32 +000091}
92
Owen Anderson80b3ce62008-05-28 20:54:50 +000093void LiveIntervals::computeNumbering() {
94 Index2MiMap OldI2MI = i2miMap_;
Owen Anderson7fbad272008-07-23 21:37:49 +000095 std::vector<IdxMBBPair> OldI2MBB = Idx2MBBMap;
Owen Anderson80b3ce62008-05-28 20:54:50 +000096
97 Idx2MBBMap.clear();
98 MBB2IdxMap.clear();
99 mi2iMap_.clear();
100 i2miMap_.clear();
101
Owen Andersona1566f22008-07-22 22:46:49 +0000102 FunctionSize = 0;
103
Chris Lattner428b92e2006-09-15 03:57:23 +0000104 // Number MachineInstrs and MachineBasicBlocks.
105 // Initialize MBB indexes to a sentinal.
Evan Cheng549f27d32007-08-13 23:45:17 +0000106 MBB2IdxMap.resize(mf_->getNumBlockIDs(), std::make_pair(~0U,~0U));
Chris Lattner428b92e2006-09-15 03:57:23 +0000107
108 unsigned MIIndex = 0;
109 for (MachineFunction::iterator MBB = mf_->begin(), E = mf_->end();
110 MBB != E; ++MBB) {
Evan Cheng549f27d32007-08-13 23:45:17 +0000111 unsigned StartIdx = MIIndex;
Evan Cheng0c9f92e2007-02-13 01:30:55 +0000112
Owen Anderson7fbad272008-07-23 21:37:49 +0000113 // Insert an empty slot at the beginning of each block.
114 MIIndex += InstrSlots::NUM;
115 i2miMap_.push_back(0);
116
Chris Lattner428b92e2006-09-15 03:57:23 +0000117 for (MachineBasicBlock::iterator I = MBB->begin(), E = MBB->end();
118 I != E; ++I) {
119 bool inserted = mi2iMap_.insert(std::make_pair(I, MIIndex)).second;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000120 assert(inserted && "multiple MachineInstr -> index mappings");
Chris Lattner428b92e2006-09-15 03:57:23 +0000121 i2miMap_.push_back(I);
122 MIIndex += InstrSlots::NUM;
Owen Andersona1566f22008-07-22 22:46:49 +0000123 FunctionSize++;
Owen Anderson7fbad272008-07-23 21:37:49 +0000124
125 // Insert an empty slot after every instruction.
Owen Anderson1fbb4542008-06-16 16:58:24 +0000126 MIIndex += InstrSlots::NUM;
127 i2miMap_.push_back(0);
Owen Anderson35578012008-06-16 07:10:49 +0000128 }
Owen Anderson7fbad272008-07-23 21:37:49 +0000129
Owen Anderson1fbb4542008-06-16 16:58:24 +0000130 // Set the MBB2IdxMap entry for this MBB.
131 MBB2IdxMap[MBB->getNumber()] = std::make_pair(StartIdx, MIIndex - 1);
132 Idx2MBBMap.push_back(std::make_pair(StartIdx, MBB));
Chris Lattner428b92e2006-09-15 03:57:23 +0000133 }
Evan Cheng4ca980e2007-10-17 02:10:22 +0000134 std::sort(Idx2MBBMap.begin(), Idx2MBBMap.end(), Idx2MBBCompare());
Owen Anderson80b3ce62008-05-28 20:54:50 +0000135
136 if (!OldI2MI.empty())
Owen Anderson788d0412008-08-06 18:35:45 +0000137 for (iterator OI = begin(), OE = end(); OI != OE; ++OI) {
Owen Anderson03857b22008-08-13 21:49:13 +0000138 for (LiveInterval::iterator LI = OI->second->begin(),
139 LE = OI->second->end(); LI != LE; ++LI) {
Owen Anderson4b5b2092008-05-29 18:15:49 +0000140
Owen Anderson7eec0c22008-05-29 23:01:22 +0000141 // Remap the start index of the live range to the corresponding new
142 // number, or our best guess at what it _should_ correspond to if the
143 // original instruction has been erased. This is either the following
144 // instruction or its predecessor.
Owen Anderson7fbad272008-07-23 21:37:49 +0000145 unsigned index = LI->start / InstrSlots::NUM;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000146 unsigned offset = LI->start % InstrSlots::NUM;
Owen Anderson0a7615a2008-07-25 23:06:59 +0000147 if (offset == InstrSlots::LOAD) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000148 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000149 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->start);
Owen Anderson7fbad272008-07-23 21:37:49 +0000150 // Take the pair containing the index
151 std::vector<IdxMBBPair>::const_iterator J =
Owen Andersona0c032f2008-07-29 21:15:44 +0000152 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000153
Owen Anderson7fbad272008-07-23 21:37:49 +0000154 LI->start = getMBBStartIdx(J->second);
155 } else {
156 LI->start = mi2iMap_[OldI2MI[index]] + offset;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000157 }
158
159 // Remap the ending index in the same way that we remapped the start,
160 // except for the final step where we always map to the immediately
161 // following instruction.
Owen Andersond7dcbec2008-07-25 19:50:48 +0000162 index = (LI->end - 1) / InstrSlots::NUM;
Owen Anderson7fbad272008-07-23 21:37:49 +0000163 offset = LI->end % InstrSlots::NUM;
Owen Anderson9382b932008-07-30 00:22:56 +0000164 if (offset == InstrSlots::LOAD) {
165 // VReg dies at end of block.
Owen Anderson7fbad272008-07-23 21:37:49 +0000166 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000167 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->end);
Owen Anderson9382b932008-07-30 00:22:56 +0000168 --I;
Owen Anderson7fbad272008-07-23 21:37:49 +0000169
Owen Anderson9382b932008-07-30 00:22:56 +0000170 LI->end = getMBBEndIdx(I->second) + 1;
Owen Anderson4b5b2092008-05-29 18:15:49 +0000171 } else {
Owen Andersond7dcbec2008-07-25 19:50:48 +0000172 unsigned idx = index;
Owen Anderson8d0cc0a2008-07-25 21:07:13 +0000173 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
174
175 if (index != OldI2MI.size())
176 LI->end = mi2iMap_[OldI2MI[index]] + (idx == index ? offset : 0);
177 else
178 LI->end = InstrSlots::NUM * i2miMap_.size();
Owen Anderson4b5b2092008-05-29 18:15:49 +0000179 }
Owen Anderson788d0412008-08-06 18:35:45 +0000180 }
181
Owen Anderson03857b22008-08-13 21:49:13 +0000182 for (LiveInterval::vni_iterator VNI = OI->second->vni_begin(),
183 VNE = OI->second->vni_end(); VNI != VNE; ++VNI) {
Owen Anderson788d0412008-08-06 18:35:45 +0000184 VNInfo* vni = *VNI;
Owen Anderson745825f42008-05-28 22:40:08 +0000185
Owen Anderson7eec0c22008-05-29 23:01:22 +0000186 // Remap the VNInfo def index, which works the same as the
Owen Anderson788d0412008-08-06 18:35:45 +0000187 // start indices above. VN's with special sentinel defs
188 // don't need to be remapped.
Owen Anderson91292392008-07-30 17:42:47 +0000189 if (vni->def != ~0U && vni->def != ~1U) {
Owen Anderson788d0412008-08-06 18:35:45 +0000190 unsigned index = vni->def / InstrSlots::NUM;
191 unsigned offset = vni->def % InstrSlots::NUM;
Owen Anderson91292392008-07-30 17:42:47 +0000192 if (offset == InstrSlots::LOAD) {
193 std::vector<IdxMBBPair>::const_iterator I =
Owen Anderson0a7615a2008-07-25 23:06:59 +0000194 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->def);
Owen Anderson91292392008-07-30 17:42:47 +0000195 // Take the pair containing the index
196 std::vector<IdxMBBPair>::const_iterator J =
Owen Andersona0c032f2008-07-29 21:15:44 +0000197 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000198
Owen Anderson91292392008-07-30 17:42:47 +0000199 vni->def = getMBBStartIdx(J->second);
200 } else {
201 vni->def = mi2iMap_[OldI2MI[index]] + offset;
202 }
Owen Anderson7eec0c22008-05-29 23:01:22 +0000203 }
Owen Anderson745825f42008-05-28 22:40:08 +0000204
Owen Anderson7eec0c22008-05-29 23:01:22 +0000205 // Remap the VNInfo kill indices, which works the same as
206 // the end indices above.
Owen Anderson4b5b2092008-05-29 18:15:49 +0000207 for (size_t i = 0; i < vni->kills.size(); ++i) {
Owen Anderson9382b932008-07-30 00:22:56 +0000208 // PHI kills don't need to be remapped.
209 if (!vni->kills[i]) continue;
210
Owen Anderson788d0412008-08-06 18:35:45 +0000211 unsigned index = (vni->kills[i]-1) / InstrSlots::NUM;
212 unsigned offset = vni->kills[i] % InstrSlots::NUM;
213 if (offset == InstrSlots::STORE) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000214 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000215 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->kills[i]);
Owen Anderson9382b932008-07-30 00:22:56 +0000216 --I;
Owen Anderson7fbad272008-07-23 21:37:49 +0000217
Owen Anderson788d0412008-08-06 18:35:45 +0000218 vni->kills[i] = getMBBEndIdx(I->second);
Owen Anderson7fbad272008-07-23 21:37:49 +0000219 } else {
Owen Andersond7dcbec2008-07-25 19:50:48 +0000220 unsigned idx = index;
Owen Anderson8d0cc0a2008-07-25 21:07:13 +0000221 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
222
223 if (index != OldI2MI.size())
224 vni->kills[i] = mi2iMap_[OldI2MI[index]] +
225 (idx == index ? offset : 0);
226 else
227 vni->kills[i] = InstrSlots::NUM * i2miMap_.size();
Owen Anderson7eec0c22008-05-29 23:01:22 +0000228 }
Owen Anderson4b5b2092008-05-29 18:15:49 +0000229 }
Owen Anderson80b3ce62008-05-28 20:54:50 +0000230 }
Owen Anderson788d0412008-08-06 18:35:45 +0000231 }
Owen Anderson80b3ce62008-05-28 20:54:50 +0000232}
Alkis Evlogimenosd6e40a62004-01-14 10:44:29 +0000233
Owen Anderson80b3ce62008-05-28 20:54:50 +0000234/// runOnMachineFunction - Register allocate the whole function
235///
236bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
237 mf_ = &fn;
238 mri_ = &mf_->getRegInfo();
239 tm_ = &fn.getTarget();
240 tri_ = tm_->getRegisterInfo();
241 tii_ = tm_->getInstrInfo();
Dan Gohman6d69ba82008-07-25 00:02:30 +0000242 aa_ = &getAnalysis<AliasAnalysis>();
Owen Anderson80b3ce62008-05-28 20:54:50 +0000243 lv_ = &getAnalysis<LiveVariables>();
244 allocatableRegs_ = tri_->getAllocatableSet(fn);
245
246 computeNumbering();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000247 computeIntervals();
Alkis Evlogimenos843b1602004-02-15 10:24:21 +0000248
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000249 numIntervals += getNumIntervals();
250
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000251 DOUT << "********** INTERVALS **********\n";
252 for (iterator I = begin(), E = end(); I != E; ++I) {
Owen Anderson03857b22008-08-13 21:49:13 +0000253 I->second->print(DOUT, tri_);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000254 DOUT << "\n";
255 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000256
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000257 numIntervalsAfter += getNumIntervals();
Chris Lattner70ca3582004-09-30 15:59:17 +0000258 DEBUG(dump());
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000259 return true;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000260}
261
Chris Lattner70ca3582004-09-30 15:59:17 +0000262/// print - Implement the dump method.
Reid Spencerce9653c2004-12-07 04:03:45 +0000263void LiveIntervals::print(std::ostream &O, const Module* ) const {
Chris Lattner70ca3582004-09-30 15:59:17 +0000264 O << "********** INTERVALS **********\n";
Chris Lattner8e7a7092005-07-27 23:03:38 +0000265 for (const_iterator I = begin(), E = end(); I != E; ++I) {
Owen Anderson03857b22008-08-13 21:49:13 +0000266 I->second->print(O, tri_);
Evan Cheng3f32d652008-06-04 09:18:41 +0000267 O << "\n";
Chris Lattner8e7a7092005-07-27 23:03:38 +0000268 }
Chris Lattner70ca3582004-09-30 15:59:17 +0000269
270 O << "********** MACHINEINSTRS **********\n";
271 for (MachineFunction::iterator mbbi = mf_->begin(), mbbe = mf_->end();
272 mbbi != mbbe; ++mbbi) {
273 O << ((Value*)mbbi->getBasicBlock())->getName() << ":\n";
274 for (MachineBasicBlock::iterator mii = mbbi->begin(),
275 mie = mbbi->end(); mii != mie; ++mii) {
Chris Lattner477e4552004-09-30 16:10:45 +0000276 O << getInstructionIndex(mii) << '\t' << *mii;
Chris Lattner70ca3582004-09-30 15:59:17 +0000277 }
278 }
279}
280
Evan Chengc92da382007-11-03 07:20:12 +0000281/// conflictsWithPhysRegDef - Returns true if the specified register
282/// is defined during the duration of the specified interval.
283bool LiveIntervals::conflictsWithPhysRegDef(const LiveInterval &li,
284 VirtRegMap &vrm, unsigned reg) {
285 for (LiveInterval::Ranges::const_iterator
286 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
287 for (unsigned index = getBaseIndex(I->start),
288 end = getBaseIndex(I->end-1) + InstrSlots::NUM; index != end;
289 index += InstrSlots::NUM) {
290 // skip deleted instructions
291 while (index != end && !getInstructionFromIndex(index))
292 index += InstrSlots::NUM;
293 if (index == end) break;
294
295 MachineInstr *MI = getInstructionFromIndex(index);
Evan Cheng5d446262007-11-15 08:13:29 +0000296 unsigned SrcReg, DstReg;
297 if (tii_->isMoveInstr(*MI, SrcReg, DstReg))
298 if (SrcReg == li.reg || DstReg == li.reg)
299 continue;
Evan Chengc92da382007-11-03 07:20:12 +0000300 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
301 MachineOperand& mop = MI->getOperand(i);
Evan Cheng5d446262007-11-15 08:13:29 +0000302 if (!mop.isRegister())
Evan Chengc92da382007-11-03 07:20:12 +0000303 continue;
304 unsigned PhysReg = mop.getReg();
Evan Cheng5d446262007-11-15 08:13:29 +0000305 if (PhysReg == 0 || PhysReg == li.reg)
Evan Chengc92da382007-11-03 07:20:12 +0000306 continue;
Dan Gohman6f0d0242008-02-10 18:45:23 +0000307 if (TargetRegisterInfo::isVirtualRegister(PhysReg)) {
Evan Cheng5d446262007-11-15 08:13:29 +0000308 if (!vrm.hasPhys(PhysReg))
309 continue;
Evan Chengc92da382007-11-03 07:20:12 +0000310 PhysReg = vrm.getPhys(PhysReg);
Evan Cheng5d446262007-11-15 08:13:29 +0000311 }
Dan Gohman6f0d0242008-02-10 18:45:23 +0000312 if (PhysReg && tri_->regsOverlap(PhysReg, reg))
Evan Chengc92da382007-11-03 07:20:12 +0000313 return true;
314 }
315 }
316 }
317
318 return false;
319}
320
Evan Cheng549f27d32007-08-13 23:45:17 +0000321void LiveIntervals::printRegName(unsigned reg) const {
Dan Gohman6f0d0242008-02-10 18:45:23 +0000322 if (TargetRegisterInfo::isPhysicalRegister(reg))
Bill Wendlinge6d088a2008-02-26 21:47:57 +0000323 cerr << tri_->getName(reg);
Evan Cheng549f27d32007-08-13 23:45:17 +0000324 else
325 cerr << "%reg" << reg;
326}
327
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000328void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000329 MachineBasicBlock::iterator mi,
Owen Anderson6b098de2008-06-25 23:39:39 +0000330 unsigned MIIdx, MachineOperand& MO,
Evan Chengef0732d2008-07-10 07:35:43 +0000331 unsigned MOIdx,
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000332 LiveInterval &interval) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000333 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000334 LiveVariables::VarInfo& vi = lv_->getVarInfo(interval.reg);
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000335
Evan Cheng419852c2008-04-03 16:39:43 +0000336 if (mi->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
337 DOUT << "is a implicit_def\n";
338 return;
339 }
340
Alkis Evlogimenos70651572004-08-04 09:46:56 +0000341 // Virtual registers may be defined multiple times (due to phi
342 // elimination and 2-addr elimination). Much of what we do only has to be
343 // done once for the vreg. We use an empty interval to detect the first
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000344 // time we see a vreg.
345 if (interval.empty()) {
346 // Get the Idx of the defining instructions.
Chris Lattner6b128bd2006-09-03 08:07:11 +0000347 unsigned defIndex = getDefIndex(MIIdx);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000348 VNInfo *ValNo;
Evan Chengc8d044e2008-02-15 18:24:29 +0000349 MachineInstr *CopyMI = NULL;
Chris Lattner91725b72006-08-31 05:54:43 +0000350 unsigned SrcReg, DstReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000351 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000352 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Chengc8d044e2008-02-15 18:24:29 +0000353 tii_->isMoveInstr(*mi, SrcReg, DstReg))
354 CopyMI = mi;
355 ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000356
357 assert(ValNo->id == 0 && "First value in interval is not 0?");
Chris Lattner7ac2d312004-07-24 02:59:07 +0000358
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000359 // Loop over all of the blocks that the vreg is defined in. There are
360 // two cases we have to handle here. The most common case is a vreg
361 // whose lifetime is contained within a basic block. In this case there
362 // will be a single kill, in MBB, which comes after the definition.
363 if (vi.Kills.size() == 1 && vi.Kills[0]->getParent() == mbb) {
364 // FIXME: what about dead vars?
365 unsigned killIdx;
366 if (vi.Kills[0] != mi)
367 killIdx = getUseIndex(getInstructionIndex(vi.Kills[0]))+1;
368 else
369 killIdx = defIndex+1;
Chris Lattner6097d132004-07-19 02:15:56 +0000370
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000371 // If the kill happens after the definition, we have an intra-block
372 // live range.
373 if (killIdx > defIndex) {
Evan Cheng61de82d2007-02-15 05:59:24 +0000374 assert(vi.AliveBlocks.none() &&
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000375 "Shouldn't be alive across any blocks!");
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000376 LiveRange LR(defIndex, killIdx, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000377 interval.addRange(LR);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000378 DOUT << " +" << LR << "\n";
Evan Chengf3bb2e62007-09-05 21:46:51 +0000379 interval.addKill(ValNo, killIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000380 return;
381 }
Alkis Evlogimenosdd2cc652003-12-18 08:48:48 +0000382 }
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000383
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000384 // The other case we handle is when a virtual register lives to the end
385 // of the defining block, potentially live across some blocks, then is
386 // live into some number of blocks, but gets killed. Start by adding a
387 // range that goes from this definition to the end of the defining block.
Owen Anderson7fbad272008-07-23 21:37:49 +0000388 LiveRange NewLR(defIndex, getMBBEndIdx(mbb)+1, ValNo);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000389 DOUT << " +" << NewLR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000390 interval.addRange(NewLR);
391
392 // Iterate over all of the blocks that the variable is completely
393 // live in, adding [insrtIndex(begin), instrIndex(end)+4) to the
394 // live interval.
395 for (unsigned i = 0, e = vi.AliveBlocks.size(); i != e; ++i) {
396 if (vi.AliveBlocks[i]) {
Owen Anderson31ec8412008-06-16 19:32:40 +0000397 LiveRange LR(getMBBStartIdx(i),
Evan Chengf26e8552008-06-17 20:13:36 +0000398 getMBBEndIdx(i)+1, // MBB ends at -1.
Owen Anderson31ec8412008-06-16 19:32:40 +0000399 ValNo);
400 interval.addRange(LR);
401 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000402 }
403 }
404
405 // Finally, this virtual register is live from the start of any killing
406 // block to the 'use' slot of the killing instruction.
407 for (unsigned i = 0, e = vi.Kills.size(); i != e; ++i) {
408 MachineInstr *Kill = vi.Kills[i];
Evan Cheng8df78602007-08-08 03:00:28 +0000409 unsigned killIdx = getUseIndex(getInstructionIndex(Kill))+1;
Chris Lattner428b92e2006-09-15 03:57:23 +0000410 LiveRange LR(getMBBStartIdx(Kill->getParent()),
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000411 killIdx, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000412 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000413 interval.addKill(ValNo, killIdx);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000414 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000415 }
416
417 } else {
418 // If this is the second time we see a virtual register definition, it
419 // must be due to phi elimination or two addr elimination. If this is
Evan Chengbf105c82006-11-03 03:04:46 +0000420 // the result of two address elimination, then the vreg is one of the
421 // def-and-use register operand.
Evan Chengef0732d2008-07-10 07:35:43 +0000422 if (mi->isRegReDefinedByTwoAddr(interval.reg, MOIdx)) {
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000423 // If this is a two-address definition, then we have already processed
424 // the live range. The only problem is that we didn't realize there
425 // are actually two values in the live interval. Because of this we
426 // need to take the LiveRegion that defines this register and split it
427 // into two values.
Evan Chenga07cec92008-01-10 08:22:10 +0000428 assert(interval.containsOneValue());
429 unsigned DefIndex = getDefIndex(interval.getValNumInfo(0)->def);
Chris Lattner6b128bd2006-09-03 08:07:11 +0000430 unsigned RedefIndex = getDefIndex(MIIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000431
Evan Cheng4f8ff162007-08-11 00:59:19 +0000432 const LiveRange *OldLR = interval.getLiveRangeContaining(RedefIndex-1);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000433 VNInfo *OldValNo = OldLR->valno;
Evan Cheng4f8ff162007-08-11 00:59:19 +0000434
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000435 // Delete the initial value, which should be short and continuous,
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000436 // because the 2-addr copy must be in the same MBB as the redef.
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000437 interval.removeRange(DefIndex, RedefIndex);
Alkis Evlogimenos70651572004-08-04 09:46:56 +0000438
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000439 // Two-address vregs should always only be redefined once. This means
440 // that at this point, there should be exactly one value number in it.
441 assert(interval.containsOneValue() && "Unexpected 2-addr liveint!");
442
Chris Lattner91725b72006-08-31 05:54:43 +0000443 // The new value number (#1) is defined by the instruction we claimed
444 // defined value #0.
Evan Chengc8d044e2008-02-15 18:24:29 +0000445 VNInfo *ValNo = interval.getNextValue(OldValNo->def, OldValNo->copy,
446 VNInfoAllocator);
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000447
Chris Lattner91725b72006-08-31 05:54:43 +0000448 // Value#0 is now defined by the 2-addr instruction.
Evan Chengc8d044e2008-02-15 18:24:29 +0000449 OldValNo->def = RedefIndex;
450 OldValNo->copy = 0;
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000451
452 // Add the new live interval which replaces the range for the input copy.
453 LiveRange LR(DefIndex, RedefIndex, ValNo);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000454 DOUT << " replace range with " << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000455 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000456 interval.addKill(ValNo, RedefIndex);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000457
458 // If this redefinition is dead, we need to add a dummy unit live
459 // range covering the def slot.
Owen Anderson6b098de2008-06-25 23:39:39 +0000460 if (MO.isDead())
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000461 interval.addRange(LiveRange(RedefIndex, RedefIndex+1, OldValNo));
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000462
Evan Cheng56fdd7a2007-03-15 21:19:28 +0000463 DOUT << " RESULT: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +0000464 interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000465
466 } else {
467 // Otherwise, this must be because of phi elimination. If this is the
468 // first redefinition of the vreg that we have seen, go back and change
469 // the live range in the PHI block to be a different value number.
470 if (interval.containsOneValue()) {
471 assert(vi.Kills.size() == 1 &&
472 "PHI elimination vreg should have one kill, the PHI itself!");
473
474 // Remove the old range that we now know has an incorrect number.
Evan Chengf3bb2e62007-09-05 21:46:51 +0000475 VNInfo *VNI = interval.getValNumInfo(0);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000476 MachineInstr *Killer = vi.Kills[0];
Chris Lattner428b92e2006-09-15 03:57:23 +0000477 unsigned Start = getMBBStartIdx(Killer->getParent());
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000478 unsigned End = getUseIndex(getInstructionIndex(Killer))+1;
Evan Cheng56fdd7a2007-03-15 21:19:28 +0000479 DOUT << " Removing [" << Start << "," << End << "] from: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +0000480 interval.print(DOUT, tri_); DOUT << "\n";
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000481 interval.removeRange(Start, End);
Evan Chengc3fc7d92007-11-29 09:49:23 +0000482 VNI->hasPHIKill = true;
Dan Gohman6f0d0242008-02-10 18:45:23 +0000483 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000484
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000485 // Replace the interval with one of a NEW value number. Note that this
486 // value number isn't actually defined by an instruction, weird huh? :)
Evan Chengf3bb2e62007-09-05 21:46:51 +0000487 LiveRange LR(Start, End, interval.getNextValue(~0, 0, VNInfoAllocator));
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000488 DOUT << " replace range with " << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000489 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000490 interval.addKill(LR.valno, End);
Dan Gohman6f0d0242008-02-10 18:45:23 +0000491 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000492 }
493
494 // In the case of PHI elimination, each variable definition is only
495 // live until the end of the block. We've already taken care of the
496 // rest of the live range.
Chris Lattner6b128bd2006-09-03 08:07:11 +0000497 unsigned defIndex = getDefIndex(MIIdx);
Chris Lattner91725b72006-08-31 05:54:43 +0000498
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000499 VNInfo *ValNo;
Evan Chengc8d044e2008-02-15 18:24:29 +0000500 MachineInstr *CopyMI = NULL;
Chris Lattner91725b72006-08-31 05:54:43 +0000501 unsigned SrcReg, DstReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000502 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000503 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Chengc8d044e2008-02-15 18:24:29 +0000504 tii_->isMoveInstr(*mi, SrcReg, DstReg))
505 CopyMI = mi;
506 ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
Chris Lattner91725b72006-08-31 05:54:43 +0000507
Owen Anderson7fbad272008-07-23 21:37:49 +0000508 unsigned killIndex = getMBBEndIdx(mbb) + 1;
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000509 LiveRange LR(defIndex, killIndex, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000510 interval.addRange(LR);
Evan Chengc3fc7d92007-11-29 09:49:23 +0000511 interval.addKill(ValNo, killIndex);
512 ValNo->hasPHIKill = true;
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000513 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000514 }
515 }
516
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000517 DOUT << '\n';
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000518}
519
Chris Lattnerf35fef72004-07-23 21:24:19 +0000520void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000521 MachineBasicBlock::iterator mi,
Chris Lattner6b128bd2006-09-03 08:07:11 +0000522 unsigned MIIdx,
Owen Anderson6b098de2008-06-25 23:39:39 +0000523 MachineOperand& MO,
Chris Lattner91725b72006-08-31 05:54:43 +0000524 LiveInterval &interval,
Evan Chengc8d044e2008-02-15 18:24:29 +0000525 MachineInstr *CopyMI) {
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000526 // A physical register cannot be live across basic block, so its
527 // lifetime must end somewhere in its defining basic block.
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000528 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
Alkis Evlogimenos02ba13c2004-01-31 23:13:30 +0000529
Chris Lattner6b128bd2006-09-03 08:07:11 +0000530 unsigned baseIndex = MIIdx;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000531 unsigned start = getDefIndex(baseIndex);
532 unsigned end = start;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000533
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000534 // If it is not used after definition, it is considered dead at
535 // the instruction defining it. Hence its interval is:
536 // [defSlot(def), defSlot(def)+1)
Owen Anderson6b098de2008-06-25 23:39:39 +0000537 if (MO.isDead()) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000538 DOUT << " dead";
Chris Lattnerab4b66d2005-08-23 22:51:41 +0000539 end = getDefIndex(start) + 1;
540 goto exit;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000541 }
542
543 // If it is not dead on definition, it must be killed by a
544 // subsequent instruction. Hence its interval is:
545 // [defSlot(def), useSlot(kill)+1)
Owen Anderson7fbad272008-07-23 21:37:49 +0000546 baseIndex += InstrSlots::NUM;
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000547 while (++mi != MBB->end()) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000548 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
549 getInstructionFromIndex(baseIndex) == 0)
550 baseIndex += InstrSlots::NUM;
Evan Cheng6130f662008-03-05 00:59:57 +0000551 if (mi->killsRegister(interval.reg, tri_)) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000552 DOUT << " killed";
Chris Lattnerab4b66d2005-08-23 22:51:41 +0000553 end = getUseIndex(baseIndex) + 1;
554 goto exit;
Evan Cheng6130f662008-03-05 00:59:57 +0000555 } else if (mi->modifiesRegister(interval.reg, tri_)) {
Evan Cheng9a1956a2006-11-15 20:54:11 +0000556 // Another instruction redefines the register before it is ever read.
557 // Then the register is essentially dead at the instruction that defines
558 // it. Hence its interval is:
559 // [defSlot(def), defSlot(def)+1)
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000560 DOUT << " dead";
Evan Cheng9a1956a2006-11-15 20:54:11 +0000561 end = getDefIndex(start) + 1;
562 goto exit;
Alkis Evlogimenosaf254732004-01-13 22:26:14 +0000563 }
Owen Anderson7fbad272008-07-23 21:37:49 +0000564
565 baseIndex += InstrSlots::NUM;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000566 }
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000567
568 // The only case we should have a dead physreg here without a killing or
569 // instruction where we know it's dead is if it is live-in to the function
570 // and never used.
Evan Chengc8d044e2008-02-15 18:24:29 +0000571 assert(!CopyMI && "physreg was not killed in defining block!");
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000572 end = getDefIndex(start) + 1; // It's dead.
Alkis Evlogimenos02ba13c2004-01-31 23:13:30 +0000573
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000574exit:
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000575 assert(start < end && "did not find end of interval?");
Chris Lattnerf768bba2005-03-09 23:05:19 +0000576
Evan Cheng24a3cc42007-04-25 07:30:23 +0000577 // Already exists? Extend old live interval.
578 LiveInterval::iterator OldLR = interval.FindLiveRangeContaining(start);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000579 VNInfo *ValNo = (OldLR != interval.end())
Evan Chengc8d044e2008-02-15 18:24:29 +0000580 ? OldLR->valno : interval.getNextValue(start, CopyMI, VNInfoAllocator);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000581 LiveRange LR(start, end, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000582 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000583 interval.addKill(LR.valno, end);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000584 DOUT << " +" << LR << '\n';
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000585}
586
Chris Lattnerf35fef72004-07-23 21:24:19 +0000587void LiveIntervals::handleRegisterDef(MachineBasicBlock *MBB,
588 MachineBasicBlock::iterator MI,
Chris Lattner6b128bd2006-09-03 08:07:11 +0000589 unsigned MIIdx,
Evan Chengef0732d2008-07-10 07:35:43 +0000590 MachineOperand& MO,
591 unsigned MOIdx) {
Owen Anderson6b098de2008-06-25 23:39:39 +0000592 if (TargetRegisterInfo::isVirtualRegister(MO.getReg()))
Evan Chengef0732d2008-07-10 07:35:43 +0000593 handleVirtualRegisterDef(MBB, MI, MIIdx, MO, MOIdx,
Owen Anderson6b098de2008-06-25 23:39:39 +0000594 getOrCreateInterval(MO.getReg()));
595 else if (allocatableRegs_[MO.getReg()]) {
Evan Chengc8d044e2008-02-15 18:24:29 +0000596 MachineInstr *CopyMI = NULL;
Chris Lattner91725b72006-08-31 05:54:43 +0000597 unsigned SrcReg, DstReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000598 if (MI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000599 MI->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Chengc8d044e2008-02-15 18:24:29 +0000600 tii_->isMoveInstr(*MI, SrcReg, DstReg))
601 CopyMI = MI;
Owen Anderson6b098de2008-06-25 23:39:39 +0000602 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
603 getOrCreateInterval(MO.getReg()), CopyMI);
Evan Cheng24a3cc42007-04-25 07:30:23 +0000604 // Def of a register also defines its sub-registers.
Owen Anderson6b098de2008-06-25 23:39:39 +0000605 for (const unsigned* AS = tri_->getSubRegisters(MO.getReg()); *AS; ++AS)
Evan Cheng6130f662008-03-05 00:59:57 +0000606 // If MI also modifies the sub-register explicitly, avoid processing it
607 // more than once. Do not pass in TRI here so it checks for exact match.
608 if (!MI->modifiesRegister(*AS))
Owen Anderson6b098de2008-06-25 23:39:39 +0000609 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
610 getOrCreateInterval(*AS), 0);
Chris Lattnerf35fef72004-07-23 21:24:19 +0000611 }
Alkis Evlogimenos4d46e1e2004-01-31 14:37:41 +0000612}
613
Evan Chengb371f452007-02-19 21:49:54 +0000614void LiveIntervals::handleLiveInRegister(MachineBasicBlock *MBB,
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000615 unsigned MIIdx,
Evan Cheng24a3cc42007-04-25 07:30:23 +0000616 LiveInterval &interval, bool isAlias) {
Evan Chengb371f452007-02-19 21:49:54 +0000617 DOUT << "\t\tlivein register: "; DEBUG(printRegName(interval.reg));
618
619 // Look for kills, if it reaches a def before it's killed, then it shouldn't
620 // be considered a livein.
621 MachineBasicBlock::iterator mi = MBB->begin();
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000622 unsigned baseIndex = MIIdx;
623 unsigned start = baseIndex;
Evan Chengb371f452007-02-19 21:49:54 +0000624 unsigned end = start;
625 while (mi != MBB->end()) {
Evan Cheng6130f662008-03-05 00:59:57 +0000626 if (mi->killsRegister(interval.reg, tri_)) {
Evan Chengb371f452007-02-19 21:49:54 +0000627 DOUT << " killed";
628 end = getUseIndex(baseIndex) + 1;
629 goto exit;
Evan Cheng6130f662008-03-05 00:59:57 +0000630 } else if (mi->modifiesRegister(interval.reg, tri_)) {
Evan Chengb371f452007-02-19 21:49:54 +0000631 // Another instruction redefines the register before it is ever read.
632 // Then the register is essentially dead at the instruction that defines
633 // it. Hence its interval is:
634 // [defSlot(def), defSlot(def)+1)
635 DOUT << " dead";
636 end = getDefIndex(start) + 1;
637 goto exit;
638 }
639
640 baseIndex += InstrSlots::NUM;
Owen Anderson7fbad272008-07-23 21:37:49 +0000641 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
642 getInstructionFromIndex(baseIndex) == 0)
643 baseIndex += InstrSlots::NUM;
Evan Chengb371f452007-02-19 21:49:54 +0000644 ++mi;
645 }
646
647exit:
Evan Cheng75611fb2007-06-27 01:16:36 +0000648 // Live-in register might not be used at all.
649 if (end == MIIdx) {
Evan Cheng292da942007-06-27 18:47:28 +0000650 if (isAlias) {
651 DOUT << " dead";
Evan Cheng75611fb2007-06-27 01:16:36 +0000652 end = getDefIndex(MIIdx) + 1;
Evan Cheng292da942007-06-27 18:47:28 +0000653 } else {
654 DOUT << " live through";
655 end = baseIndex;
656 }
Evan Cheng24a3cc42007-04-25 07:30:23 +0000657 }
658
Evan Chengf3bb2e62007-09-05 21:46:51 +0000659 LiveRange LR(start, end, interval.getNextValue(start, 0, VNInfoAllocator));
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000660 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000661 interval.addKill(LR.valno, end);
Evan Cheng24c2e5c2007-08-08 07:03:29 +0000662 DOUT << " +" << LR << '\n';
Evan Chengb371f452007-02-19 21:49:54 +0000663}
664
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000665/// computeIntervals - computes the live intervals for virtual
Alkis Evlogimenos4d46e1e2004-01-31 14:37:41 +0000666/// registers. for some ordering of the machine instructions [1,N] a
Alkis Evlogimenos08cec002004-01-31 19:59:32 +0000667/// live interval is an interval [i, j) where 1 <= i <= j < N for
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000668/// which a variable is live
Chris Lattnerf7da2c72006-08-24 22:43:55 +0000669void LiveIntervals::computeIntervals() {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000670 DOUT << "********** COMPUTING LIVE INTERVALS **********\n"
671 << "********** Function: "
672 << ((Value*)mf_->getFunction())->getName() << '\n';
Chris Lattner6b128bd2006-09-03 08:07:11 +0000673 // Track the index of the current machine instr.
674 unsigned MIIndex = 0;
Owen Anderson7fbad272008-07-23 21:37:49 +0000675
676 // Skip over empty initial indices.
677 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
678 getInstructionFromIndex(MIIndex) == 0)
679 MIIndex += InstrSlots::NUM;
680
Chris Lattner428b92e2006-09-15 03:57:23 +0000681 for (MachineFunction::iterator MBBI = mf_->begin(), E = mf_->end();
682 MBBI != E; ++MBBI) {
683 MachineBasicBlock *MBB = MBBI;
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000684 DOUT << ((Value*)MBB->getBasicBlock())->getName() << ":\n";
Alkis Evlogimenos6b4edba2003-12-21 20:19:10 +0000685
Chris Lattner428b92e2006-09-15 03:57:23 +0000686 MachineBasicBlock::iterator MI = MBB->begin(), miEnd = MBB->end();
Evan Cheng0c9f92e2007-02-13 01:30:55 +0000687
Dan Gohmancb406c22007-10-03 19:26:29 +0000688 // Create intervals for live-ins to this BB first.
689 for (MachineBasicBlock::const_livein_iterator LI = MBB->livein_begin(),
690 LE = MBB->livein_end(); LI != LE; ++LI) {
691 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*LI));
692 // Multiple live-ins can alias the same register.
Dan Gohman6f0d0242008-02-10 18:45:23 +0000693 for (const unsigned* AS = tri_->getSubRegisters(*LI); *AS; ++AS)
Dan Gohmancb406c22007-10-03 19:26:29 +0000694 if (!hasInterval(*AS))
695 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*AS),
696 true);
Chris Lattnerdffb2e82006-09-04 18:27:40 +0000697 }
698
Chris Lattner428b92e2006-09-15 03:57:23 +0000699 for (; MI != miEnd; ++MI) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000700 DOUT << MIIndex << "\t" << *MI;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000701
Evan Cheng438f7bc2006-11-10 08:43:01 +0000702 // Handle defs.
Chris Lattner428b92e2006-09-15 03:57:23 +0000703 for (int i = MI->getNumOperands() - 1; i >= 0; --i) {
704 MachineOperand &MO = MI->getOperand(i);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000705 // handle register defs - build intervals
Chris Lattner428b92e2006-09-15 03:57:23 +0000706 if (MO.isRegister() && MO.getReg() && MO.isDef())
Evan Chengef0732d2008-07-10 07:35:43 +0000707 handleRegisterDef(MBB, MI, MIIndex, MO, i);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000708 }
Chris Lattner6b128bd2006-09-03 08:07:11 +0000709
710 MIIndex += InstrSlots::NUM;
Owen Anderson7fbad272008-07-23 21:37:49 +0000711
712 // Skip over empty indices.
713 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
714 getInstructionFromIndex(MIIndex) == 0)
715 MIIndex += InstrSlots::NUM;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000716 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000717 }
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000718}
Alkis Evlogimenosb27ef242003-12-05 10:38:28 +0000719
Evan Cheng4ca980e2007-10-17 02:10:22 +0000720bool LiveIntervals::findLiveInMBBs(const LiveRange &LR,
Evan Chenga5bfc972007-10-17 06:53:44 +0000721 SmallVectorImpl<MachineBasicBlock*> &MBBs) const {
Evan Cheng4ca980e2007-10-17 02:10:22 +0000722 std::vector<IdxMBBPair>::const_iterator I =
723 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), LR.start);
724
725 bool ResVal = false;
726 while (I != Idx2MBBMap.end()) {
727 if (LR.end <= I->first)
728 break;
729 MBBs.push_back(I->second);
730 ResVal = true;
731 ++I;
732 }
733 return ResVal;
734}
735
736
Owen Anderson03857b22008-08-13 21:49:13 +0000737LiveInterval* LiveIntervals::createInterval(unsigned reg) {
Dan Gohman6f0d0242008-02-10 18:45:23 +0000738 float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ?
Jim Laskey7902c752006-11-07 12:25:45 +0000739 HUGE_VALF : 0.0F;
Owen Anderson03857b22008-08-13 21:49:13 +0000740 return new LiveInterval(reg, Weight);
Alkis Evlogimenos9a8b4902004-04-09 18:07:57 +0000741}
Evan Chengf2fbca62007-11-12 06:35:08 +0000742
Evan Chengc8d044e2008-02-15 18:24:29 +0000743/// getVNInfoSourceReg - Helper function that parses the specified VNInfo
744/// copy field and returns the source register that defines it.
745unsigned LiveIntervals::getVNInfoSourceReg(const VNInfo *VNI) const {
746 if (!VNI->copy)
747 return 0;
748
749 if (VNI->copy->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG)
750 return VNI->copy->getOperand(1).getReg();
Evan Cheng7e073ba2008-04-09 20:57:25 +0000751 if (VNI->copy->getOpcode() == TargetInstrInfo::INSERT_SUBREG)
752 return VNI->copy->getOperand(2).getReg();
Evan Chengc8d044e2008-02-15 18:24:29 +0000753 unsigned SrcReg, DstReg;
754 if (tii_->isMoveInstr(*VNI->copy, SrcReg, DstReg))
755 return SrcReg;
756 assert(0 && "Unrecognized copy instruction!");
757 return 0;
758}
Evan Chengf2fbca62007-11-12 06:35:08 +0000759
760//===----------------------------------------------------------------------===//
761// Register allocator hooks.
762//
763
Evan Chengd70dbb52008-02-22 09:24:50 +0000764/// getReMatImplicitUse - If the remat definition MI has one (for now, we only
765/// allow one) virtual register operand, then its uses are implicitly using
766/// the register. Returns the virtual register.
767unsigned LiveIntervals::getReMatImplicitUse(const LiveInterval &li,
768 MachineInstr *MI) const {
769 unsigned RegOp = 0;
770 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
771 MachineOperand &MO = MI->getOperand(i);
772 if (!MO.isRegister() || !MO.isUse())
773 continue;
774 unsigned Reg = MO.getReg();
775 if (Reg == 0 || Reg == li.reg)
776 continue;
777 // FIXME: For now, only remat MI with at most one register operand.
778 assert(!RegOp &&
779 "Can't rematerialize instruction with multiple register operand!");
780 RegOp = MO.getReg();
Dan Gohman6d69ba82008-07-25 00:02:30 +0000781#ifndef NDEBUG
Evan Chengd70dbb52008-02-22 09:24:50 +0000782 break;
Dan Gohman6d69ba82008-07-25 00:02:30 +0000783#endif
Evan Chengd70dbb52008-02-22 09:24:50 +0000784 }
785 return RegOp;
786}
787
788/// isValNoAvailableAt - Return true if the val# of the specified interval
789/// which reaches the given instruction also reaches the specified use index.
790bool LiveIntervals::isValNoAvailableAt(const LiveInterval &li, MachineInstr *MI,
791 unsigned UseIdx) const {
792 unsigned Index = getInstructionIndex(MI);
793 VNInfo *ValNo = li.FindLiveRangeContaining(Index)->valno;
794 LiveInterval::const_iterator UI = li.FindLiveRangeContaining(UseIdx);
795 return UI != li.end() && UI->valno == ValNo;
796}
797
Evan Chengf2fbca62007-11-12 06:35:08 +0000798/// isReMaterializable - Returns true if the definition MI of the specified
799/// val# of the specified interval is re-materializable.
800bool LiveIntervals::isReMaterializable(const LiveInterval &li,
Evan Cheng5ef3a042007-12-06 00:01:56 +0000801 const VNInfo *ValNo, MachineInstr *MI,
802 bool &isLoad) {
Evan Chengf2fbca62007-11-12 06:35:08 +0000803 if (DisableReMat)
804 return false;
805
Evan Cheng20ccded2008-03-15 00:19:36 +0000806 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF)
Evan Chengd70dbb52008-02-22 09:24:50 +0000807 return true;
Evan Chengdd3465e2008-02-23 01:44:27 +0000808
809 int FrameIdx = 0;
810 if (tii_->isLoadFromStackSlot(MI, FrameIdx) &&
Evan Cheng249ded32008-02-23 03:38:34 +0000811 mf_->getFrameInfo()->isImmutableObjectIndex(FrameIdx))
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000812 // FIXME: Let target specific isReallyTriviallyReMaterializable determines
813 // this but remember this is not safe to fold into a two-address
814 // instruction.
Evan Cheng249ded32008-02-23 03:38:34 +0000815 // This is a load from fixed stack slot. It can be rematerialized.
Evan Chengdd3465e2008-02-23 01:44:27 +0000816 return true;
Evan Chengdd3465e2008-02-23 01:44:27 +0000817
Dan Gohman6d69ba82008-07-25 00:02:30 +0000818 // If the target-specific rules don't identify an instruction as
819 // being trivially rematerializable, use some target-independent
820 // rules.
821 if (!MI->getDesc().isRematerializable() ||
822 !tii_->isTriviallyReMaterializable(MI)) {
Dan Gohman4c8f8702008-07-25 15:08:37 +0000823 if (!EnableAggressiveRemat)
824 return false;
Evan Chengd70dbb52008-02-22 09:24:50 +0000825
Dan Gohman0471a792008-07-28 18:43:51 +0000826 // If the instruction accesses memory but the memoperands have been lost,
Dan Gohman6d69ba82008-07-25 00:02:30 +0000827 // we can't analyze it.
828 const TargetInstrDesc &TID = MI->getDesc();
829 if ((TID.mayLoad() || TID.mayStore()) && MI->memoperands_empty())
830 return false;
831
832 // Avoid instructions obviously unsafe for remat.
833 if (TID.hasUnmodeledSideEffects() || TID.isNotDuplicable())
834 return false;
835
836 // If the instruction accesses memory and the memory could be non-constant,
837 // assume the instruction is not rematerializable.
Dan Gohmanfed90b62008-07-28 21:51:04 +0000838 for (std::list<MachineMemOperand>::const_iterator I = MI->memoperands_begin(),
Dan Gohman6d69ba82008-07-25 00:02:30 +0000839 E = MI->memoperands_end(); I != E; ++I) {
840 const MachineMemOperand &MMO = *I;
841 if (MMO.isVolatile() || MMO.isStore())
842 return false;
843 const Value *V = MMO.getValue();
844 if (!V)
845 return false;
846 if (const PseudoSourceValue *PSV = dyn_cast<PseudoSourceValue>(V)) {
847 if (!PSV->isConstant(mf_->getFrameInfo()))
Evan Chengd70dbb52008-02-22 09:24:50 +0000848 return false;
Dan Gohman6d69ba82008-07-25 00:02:30 +0000849 } else if (!aa_->pointsToConstantMemory(V))
850 return false;
851 }
852
853 // If any of the registers accessed are non-constant, conservatively assume
854 // the instruction is not rematerializable.
855 unsigned ImpUse = 0;
856 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
857 const MachineOperand &MO = MI->getOperand(i);
858 if (MO.isReg()) {
859 unsigned Reg = MO.getReg();
860 if (Reg == 0)
861 continue;
862 if (TargetRegisterInfo::isPhysicalRegister(Reg))
863 return false;
864
865 // Only allow one def, and that in the first operand.
866 if (MO.isDef() != (i == 0))
867 return false;
868
869 // Only allow constant-valued registers.
870 bool IsLiveIn = mri_->isLiveIn(Reg);
871 MachineRegisterInfo::def_iterator I = mri_->def_begin(Reg),
872 E = mri_->def_end();
873
874 // For the def, it should be the only def.
875 if (MO.isDef() && (next(I) != E || IsLiveIn))
876 return false;
877
878 if (MO.isUse()) {
879 // Only allow one use other register use, as that's all the
880 // remat mechanisms support currently.
881 if (Reg != li.reg) {
882 if (ImpUse == 0)
883 ImpUse = Reg;
884 else if (Reg != ImpUse)
885 return false;
886 }
887 // For uses, there should be only one associate def.
888 if (I != E && (next(I) != E || IsLiveIn))
889 return false;
890 }
Evan Chengd70dbb52008-02-22 09:24:50 +0000891 }
892 }
Evan Cheng5ef3a042007-12-06 00:01:56 +0000893 }
Evan Chengf2fbca62007-11-12 06:35:08 +0000894
Dan Gohman6d69ba82008-07-25 00:02:30 +0000895 unsigned ImpUse = getReMatImplicitUse(li, MI);
896 if (ImpUse) {
897 const LiveInterval &ImpLi = getInterval(ImpUse);
898 for (MachineRegisterInfo::use_iterator ri = mri_->use_begin(li.reg),
899 re = mri_->use_end(); ri != re; ++ri) {
900 MachineInstr *UseMI = &*ri;
901 unsigned UseIdx = getInstructionIndex(UseMI);
902 if (li.FindLiveRangeContaining(UseIdx)->valno != ValNo)
903 continue;
904 if (!isValNoAvailableAt(ImpLi, MI, UseIdx))
905 return false;
906 }
907 }
908 return true;
Evan Cheng5ef3a042007-12-06 00:01:56 +0000909}
910
911/// isReMaterializable - Returns true if every definition of MI of every
912/// val# of the specified interval is re-materializable.
913bool LiveIntervals::isReMaterializable(const LiveInterval &li, bool &isLoad) {
914 isLoad = false;
915 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
916 i != e; ++i) {
917 const VNInfo *VNI = *i;
918 unsigned DefIdx = VNI->def;
919 if (DefIdx == ~1U)
920 continue; // Dead val#.
921 // Is the def for the val# rematerializable?
922 if (DefIdx == ~0u)
923 return false;
924 MachineInstr *ReMatDefMI = getInstructionFromIndex(DefIdx);
925 bool DefIsLoad = false;
Evan Chengd70dbb52008-02-22 09:24:50 +0000926 if (!ReMatDefMI ||
927 !isReMaterializable(li, VNI, ReMatDefMI, DefIsLoad))
Evan Cheng5ef3a042007-12-06 00:01:56 +0000928 return false;
929 isLoad |= DefIsLoad;
Evan Chengf2fbca62007-11-12 06:35:08 +0000930 }
931 return true;
932}
933
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000934/// FilterFoldedOps - Filter out two-address use operands. Return
935/// true if it finds any issue with the operands that ought to prevent
936/// folding.
937static bool FilterFoldedOps(MachineInstr *MI,
938 SmallVector<unsigned, 2> &Ops,
939 unsigned &MRInfo,
940 SmallVector<unsigned, 2> &FoldOps) {
Chris Lattner749c6f62008-01-07 07:27:27 +0000941 const TargetInstrDesc &TID = MI->getDesc();
Evan Cheng6e141fd2007-12-12 23:12:09 +0000942
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000943 MRInfo = 0;
Evan Chengaee4af62007-12-02 08:30:39 +0000944 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
945 unsigned OpIdx = Ops[i];
Evan Chengd70dbb52008-02-22 09:24:50 +0000946 MachineOperand &MO = MI->getOperand(OpIdx);
Evan Chengaee4af62007-12-02 08:30:39 +0000947 // FIXME: fold subreg use.
Evan Chengd70dbb52008-02-22 09:24:50 +0000948 if (MO.getSubReg())
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000949 return true;
Evan Chengd70dbb52008-02-22 09:24:50 +0000950 if (MO.isDef())
Evan Chengaee4af62007-12-02 08:30:39 +0000951 MRInfo |= (unsigned)VirtRegMap::isMod;
952 else {
953 // Filter out two-address use operand(s).
Evan Chengd70dbb52008-02-22 09:24:50 +0000954 if (!MO.isImplicit() &&
955 TID.getOperandConstraint(OpIdx, TOI::TIED_TO) != -1) {
Evan Chengaee4af62007-12-02 08:30:39 +0000956 MRInfo = VirtRegMap::isModRef;
957 continue;
958 }
959 MRInfo |= (unsigned)VirtRegMap::isRef;
960 }
961 FoldOps.push_back(OpIdx);
Evan Chenge62f97c2007-12-01 02:07:52 +0000962 }
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000963 return false;
964}
965
966
967/// tryFoldMemoryOperand - Attempts to fold either a spill / restore from
968/// slot / to reg or any rematerialized load into ith operand of specified
969/// MI. If it is successul, MI is updated with the newly created MI and
970/// returns true.
971bool LiveIntervals::tryFoldMemoryOperand(MachineInstr* &MI,
972 VirtRegMap &vrm, MachineInstr *DefMI,
973 unsigned InstrIdx,
974 SmallVector<unsigned, 2> &Ops,
975 bool isSS, int Slot, unsigned Reg) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000976 // If it is an implicit def instruction, just delete it.
Evan Cheng20ccded2008-03-15 00:19:36 +0000977 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000978 RemoveMachineInstrFromMaps(MI);
979 vrm.RemoveMachineInstrFromMaps(MI);
980 MI->eraseFromParent();
981 ++numFolds;
982 return true;
983 }
984
985 // Filter the list of operand indexes that are to be folded. Abort if
986 // any operand will prevent folding.
987 unsigned MRInfo = 0;
988 SmallVector<unsigned, 2> FoldOps;
989 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
990 return false;
Evan Chenge62f97c2007-12-01 02:07:52 +0000991
Evan Cheng427f4c12008-03-31 23:19:51 +0000992 // The only time it's safe to fold into a two address instruction is when
993 // it's folding reload and spill from / into a spill stack slot.
994 if (DefMI && (MRInfo & VirtRegMap::isMod))
Evan Cheng249ded32008-02-23 03:38:34 +0000995 return false;
996
Evan Chengf2f8c2a2008-02-08 22:05:27 +0000997 MachineInstr *fmi = isSS ? tii_->foldMemoryOperand(*mf_, MI, FoldOps, Slot)
998 : tii_->foldMemoryOperand(*mf_, MI, FoldOps, DefMI);
Evan Chengf2fbca62007-11-12 06:35:08 +0000999 if (fmi) {
Evan Chengd3653122008-02-27 03:04:06 +00001000 // Remember this instruction uses the spill slot.
1001 if (isSS) vrm.addSpillSlotUse(Slot, fmi);
1002
Evan Chengf2fbca62007-11-12 06:35:08 +00001003 // Attempt to fold the memory reference into the instruction. If
1004 // we can do this, we don't need to insert spill code.
Evan Chengf2fbca62007-11-12 06:35:08 +00001005 MachineBasicBlock &MBB = *MI->getParent();
Evan Cheng84802932008-01-10 08:24:38 +00001006 if (isSS && !mf_->getFrameInfo()->isImmutableObjectIndex(Slot))
Evan Chengaee4af62007-12-02 08:30:39 +00001007 vrm.virtFolded(Reg, MI, fmi, (VirtRegMap::ModRef)MRInfo);
Evan Cheng81a03822007-11-17 00:40:40 +00001008 vrm.transferSpillPts(MI, fmi);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001009 vrm.transferRestorePts(MI, fmi);
Evan Chengc1f53c72008-03-11 21:34:46 +00001010 vrm.transferEmergencySpills(MI, fmi);
Evan Chengf2fbca62007-11-12 06:35:08 +00001011 mi2iMap_.erase(MI);
Evan Chengcddbb832007-11-30 21:23:43 +00001012 i2miMap_[InstrIdx /InstrSlots::NUM] = fmi;
1013 mi2iMap_[fmi] = InstrIdx;
Evan Chengf2fbca62007-11-12 06:35:08 +00001014 MI = MBB.insert(MBB.erase(MI), fmi);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001015 ++numFolds;
Evan Chengf2fbca62007-11-12 06:35:08 +00001016 return true;
1017 }
1018 return false;
1019}
1020
Evan Cheng018f9b02007-12-05 03:22:34 +00001021/// canFoldMemoryOperand - Returns true if the specified load / store
1022/// folding is possible.
1023bool LiveIntervals::canFoldMemoryOperand(MachineInstr *MI,
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001024 SmallVector<unsigned, 2> &Ops,
Evan Cheng3c75ba82008-04-01 21:37:32 +00001025 bool ReMat) const {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001026 // Filter the list of operand indexes that are to be folded. Abort if
1027 // any operand will prevent folding.
1028 unsigned MRInfo = 0;
Evan Cheng018f9b02007-12-05 03:22:34 +00001029 SmallVector<unsigned, 2> FoldOps;
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001030 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
1031 return false;
Evan Cheng018f9b02007-12-05 03:22:34 +00001032
Evan Cheng3c75ba82008-04-01 21:37:32 +00001033 // It's only legal to remat for a use, not a def.
1034 if (ReMat && (MRInfo & VirtRegMap::isMod))
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001035 return false;
Evan Cheng018f9b02007-12-05 03:22:34 +00001036
Evan Chengd70dbb52008-02-22 09:24:50 +00001037 return tii_->canFoldMemoryOperand(MI, FoldOps);
1038}
1039
Evan Cheng81a03822007-11-17 00:40:40 +00001040bool LiveIntervals::intervalIsInOneMBB(const LiveInterval &li) const {
1041 SmallPtrSet<MachineBasicBlock*, 4> MBBs;
1042 for (LiveInterval::Ranges::const_iterator
1043 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1044 std::vector<IdxMBBPair>::const_iterator II =
1045 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), I->start);
1046 if (II == Idx2MBBMap.end())
1047 continue;
1048 if (I->end > II->first) // crossing a MBB.
1049 return false;
1050 MBBs.insert(II->second);
1051 if (MBBs.size() > 1)
1052 return false;
1053 }
1054 return true;
1055}
1056
Evan Chengd70dbb52008-02-22 09:24:50 +00001057/// rewriteImplicitOps - Rewrite implicit use operands of MI (i.e. uses of
1058/// interval on to-be re-materialized operands of MI) with new register.
1059void LiveIntervals::rewriteImplicitOps(const LiveInterval &li,
1060 MachineInstr *MI, unsigned NewVReg,
1061 VirtRegMap &vrm) {
1062 // There is an implicit use. That means one of the other operand is
1063 // being remat'ed and the remat'ed instruction has li.reg as an
1064 // use operand. Make sure we rewrite that as well.
1065 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1066 MachineOperand &MO = MI->getOperand(i);
1067 if (!MO.isRegister())
1068 continue;
1069 unsigned Reg = MO.getReg();
1070 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
1071 continue;
1072 if (!vrm.isReMaterialized(Reg))
1073 continue;
1074 MachineInstr *ReMatMI = vrm.getReMaterializedMI(Reg);
Evan Cheng6130f662008-03-05 00:59:57 +00001075 MachineOperand *UseMO = ReMatMI->findRegisterUseOperand(li.reg);
1076 if (UseMO)
1077 UseMO->setReg(NewVReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001078 }
1079}
1080
Evan Chengf2fbca62007-11-12 06:35:08 +00001081/// rewriteInstructionForSpills, rewriteInstructionsForSpills - Helper functions
1082/// for addIntervalsForSpills to rewrite uses / defs for the given live range.
Evan Cheng018f9b02007-12-05 03:22:34 +00001083bool LiveIntervals::
Evan Chengd70dbb52008-02-22 09:24:50 +00001084rewriteInstructionForSpills(const LiveInterval &li, const VNInfo *VNI,
1085 bool TrySplit, unsigned index, unsigned end, MachineInstr *MI,
Evan Cheng81a03822007-11-17 00:40:40 +00001086 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Chengf2fbca62007-11-12 06:35:08 +00001087 unsigned Slot, int LdSlot,
1088 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chengd70dbb52008-02-22 09:24:50 +00001089 VirtRegMap &vrm,
Evan Chengf2fbca62007-11-12 06:35:08 +00001090 const TargetRegisterClass* rc,
1091 SmallVector<int, 4> &ReMatIds,
Evan Cheng22f07ff2007-12-11 02:09:15 +00001092 const MachineLoopInfo *loopInfo,
Evan Cheng313d4b82008-02-23 00:33:04 +00001093 unsigned &NewVReg, unsigned ImpUse, bool &HasDef, bool &HasUse,
Owen Anderson28998312008-08-13 22:28:50 +00001094 DenseMap<unsigned,unsigned> &MBBVRegsMap,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001095 std::vector<LiveInterval*> &NewLIs, float &SSWeight) {
1096 MachineBasicBlock *MBB = MI->getParent();
1097 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Cheng018f9b02007-12-05 03:22:34 +00001098 bool CanFold = false;
Evan Chengf2fbca62007-11-12 06:35:08 +00001099 RestartInstruction:
1100 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
1101 MachineOperand& mop = MI->getOperand(i);
1102 if (!mop.isRegister())
1103 continue;
1104 unsigned Reg = mop.getReg();
1105 unsigned RegI = Reg;
Dan Gohman6f0d0242008-02-10 18:45:23 +00001106 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
Evan Chengf2fbca62007-11-12 06:35:08 +00001107 continue;
Evan Chengf2fbca62007-11-12 06:35:08 +00001108 if (Reg != li.reg)
1109 continue;
1110
1111 bool TryFold = !DefIsReMat;
Evan Chengcb3c3302007-11-29 23:02:50 +00001112 bool FoldSS = true; // Default behavior unless it's a remat.
Evan Chengf2fbca62007-11-12 06:35:08 +00001113 int FoldSlot = Slot;
1114 if (DefIsReMat) {
1115 // If this is the rematerializable definition MI itself and
1116 // all of its uses are rematerialized, simply delete it.
Evan Cheng81a03822007-11-17 00:40:40 +00001117 if (MI == ReMatOrigDefMI && CanDelete) {
Evan Chengcddbb832007-11-30 21:23:43 +00001118 DOUT << "\t\t\t\tErasing re-materlizable def: ";
1119 DOUT << MI << '\n';
Evan Chengf2fbca62007-11-12 06:35:08 +00001120 RemoveMachineInstrFromMaps(MI);
Evan Chengcada2452007-11-28 01:28:46 +00001121 vrm.RemoveMachineInstrFromMaps(MI);
Evan Chengf2fbca62007-11-12 06:35:08 +00001122 MI->eraseFromParent();
1123 break;
1124 }
1125
1126 // If def for this use can't be rematerialized, then try folding.
Evan Cheng0cbb1162007-11-29 01:06:25 +00001127 // If def is rematerializable and it's a load, also try folding.
Evan Chengcb3c3302007-11-29 23:02:50 +00001128 TryFold = !ReMatDefMI || (ReMatDefMI && (MI == ReMatOrigDefMI || isLoad));
Evan Chengf2fbca62007-11-12 06:35:08 +00001129 if (isLoad) {
1130 // Try fold loads (from stack slot, constant pool, etc.) into uses.
1131 FoldSS = isLoadSS;
1132 FoldSlot = LdSlot;
1133 }
1134 }
1135
Evan Chengf2fbca62007-11-12 06:35:08 +00001136 // Scan all of the operands of this instruction rewriting operands
1137 // to use NewVReg instead of li.reg as appropriate. We do this for
1138 // two reasons:
1139 //
1140 // 1. If the instr reads the same spilled vreg multiple times, we
1141 // want to reuse the NewVReg.
1142 // 2. If the instr is a two-addr instruction, we are required to
1143 // keep the src/dst regs pinned.
1144 //
1145 // Keep track of whether we replace a use and/or def so that we can
1146 // create the spill interval with the appropriate range.
Evan Chengcddbb832007-11-30 21:23:43 +00001147
Evan Cheng81a03822007-11-17 00:40:40 +00001148 HasUse = mop.isUse();
1149 HasDef = mop.isDef();
Evan Chengaee4af62007-12-02 08:30:39 +00001150 SmallVector<unsigned, 2> Ops;
1151 Ops.push_back(i);
Evan Chengf2fbca62007-11-12 06:35:08 +00001152 for (unsigned j = i+1, e = MI->getNumOperands(); j != e; ++j) {
Evan Chengaee4af62007-12-02 08:30:39 +00001153 const MachineOperand &MOj = MI->getOperand(j);
1154 if (!MOj.isRegister())
Evan Chengf2fbca62007-11-12 06:35:08 +00001155 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001156 unsigned RegJ = MOj.getReg();
Dan Gohman6f0d0242008-02-10 18:45:23 +00001157 if (RegJ == 0 || TargetRegisterInfo::isPhysicalRegister(RegJ))
Evan Chengf2fbca62007-11-12 06:35:08 +00001158 continue;
1159 if (RegJ == RegI) {
Evan Chengaee4af62007-12-02 08:30:39 +00001160 Ops.push_back(j);
1161 HasUse |= MOj.isUse();
1162 HasDef |= MOj.isDef();
Evan Chengf2fbca62007-11-12 06:35:08 +00001163 }
1164 }
1165
Evan Cheng79a796c2008-07-12 01:56:02 +00001166 if (HasUse && !li.liveAt(getUseIndex(index)))
1167 // Must be defined by an implicit def. It should not be spilled. Note,
1168 // this is for correctness reason. e.g.
1169 // 8 %reg1024<def> = IMPLICIT_DEF
1170 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1171 // The live range [12, 14) are not part of the r1024 live interval since
1172 // it's defined by an implicit def. It will not conflicts with live
1173 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengb9890ae2008-07-12 02:22:07 +00001174 // easily see a situation where both registers are reloaded before
Evan Cheng79a796c2008-07-12 01:56:02 +00001175 // the INSERT_SUBREG and both target registers that would overlap.
1176 HasUse = false;
1177
Evan Cheng9c3c2212008-06-06 07:54:39 +00001178 // Update stack slot spill weight if we are splitting.
Evan Chengc3417602008-06-21 06:45:54 +00001179 float Weight = getSpillWeight(HasDef, HasUse, loopDepth);
Evan Cheng9c3c2212008-06-06 07:54:39 +00001180 if (!TrySplit)
1181 SSWeight += Weight;
1182
1183 if (!TryFold)
1184 CanFold = false;
1185 else {
Evan Cheng018f9b02007-12-05 03:22:34 +00001186 // Do not fold load / store here if we are splitting. We'll find an
1187 // optimal point to insert a load / store later.
1188 if (!TrySplit) {
1189 if (tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
1190 Ops, FoldSS, FoldSlot, Reg)) {
1191 // Folding the load/store can completely change the instruction in
1192 // unpredictable ways, rescan it from the beginning.
1193 HasUse = false;
1194 HasDef = false;
1195 CanFold = false;
Evan Cheng9c3c2212008-06-06 07:54:39 +00001196 if (isRemoved(MI)) {
1197 SSWeight -= Weight;
Evan Cheng7e073ba2008-04-09 20:57:25 +00001198 break;
Evan Cheng9c3c2212008-06-06 07:54:39 +00001199 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001200 goto RestartInstruction;
1201 }
1202 } else {
Evan Cheng9c3c2212008-06-06 07:54:39 +00001203 // We'll try to fold it later if it's profitable.
Evan Cheng3c75ba82008-04-01 21:37:32 +00001204 CanFold = canFoldMemoryOperand(MI, Ops, DefIsReMat);
Evan Cheng018f9b02007-12-05 03:22:34 +00001205 }
Evan Cheng9c3c2212008-06-06 07:54:39 +00001206 }
Evan Chengcddbb832007-11-30 21:23:43 +00001207
1208 // Create a new virtual register for the spill interval.
1209 bool CreatedNewVReg = false;
1210 if (NewVReg == 0) {
Evan Chengd70dbb52008-02-22 09:24:50 +00001211 NewVReg = mri_->createVirtualRegister(rc);
Evan Chengcddbb832007-11-30 21:23:43 +00001212 vrm.grow();
1213 CreatedNewVReg = true;
1214 }
1215 mop.setReg(NewVReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001216 if (mop.isImplicit())
1217 rewriteImplicitOps(li, MI, NewVReg, vrm);
Evan Chengcddbb832007-11-30 21:23:43 +00001218
1219 // Reuse NewVReg for other reads.
Evan Chengd70dbb52008-02-22 09:24:50 +00001220 for (unsigned j = 0, e = Ops.size(); j != e; ++j) {
1221 MachineOperand &mopj = MI->getOperand(Ops[j]);
1222 mopj.setReg(NewVReg);
1223 if (mopj.isImplicit())
1224 rewriteImplicitOps(li, MI, NewVReg, vrm);
1225 }
Evan Chengcddbb832007-11-30 21:23:43 +00001226
Evan Cheng81a03822007-11-17 00:40:40 +00001227 if (CreatedNewVReg) {
1228 if (DefIsReMat) {
1229 vrm.setVirtIsReMaterialized(NewVReg, ReMatDefMI/*, CanDelete*/);
Evan Chengd70dbb52008-02-22 09:24:50 +00001230 if (ReMatIds[VNI->id] == VirtRegMap::MAX_STACK_SLOT) {
Evan Cheng81a03822007-11-17 00:40:40 +00001231 // Each valnum may have its own remat id.
Evan Chengd70dbb52008-02-22 09:24:50 +00001232 ReMatIds[VNI->id] = vrm.assignVirtReMatId(NewVReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001233 } else {
Evan Chengd70dbb52008-02-22 09:24:50 +00001234 vrm.assignVirtReMatId(NewVReg, ReMatIds[VNI->id]);
Evan Cheng81a03822007-11-17 00:40:40 +00001235 }
1236 if (!CanDelete || (HasUse && HasDef)) {
1237 // If this is a two-addr instruction then its use operands are
1238 // rematerializable but its def is not. It should be assigned a
1239 // stack slot.
1240 vrm.assignVirt2StackSlot(NewVReg, Slot);
1241 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001242 } else {
Evan Chengf2fbca62007-11-12 06:35:08 +00001243 vrm.assignVirt2StackSlot(NewVReg, Slot);
1244 }
Evan Chengcb3c3302007-11-29 23:02:50 +00001245 } else if (HasUse && HasDef &&
1246 vrm.getStackSlot(NewVReg) == VirtRegMap::NO_STACK_SLOT) {
1247 // If this interval hasn't been assigned a stack slot (because earlier
1248 // def is a deleted remat def), do it now.
1249 assert(Slot != VirtRegMap::NO_STACK_SLOT);
1250 vrm.assignVirt2StackSlot(NewVReg, Slot);
Evan Chengf2fbca62007-11-12 06:35:08 +00001251 }
1252
Evan Cheng313d4b82008-02-23 00:33:04 +00001253 // Re-matting an instruction with virtual register use. Add the
1254 // register as an implicit use on the use MI.
1255 if (DefIsReMat && ImpUse)
1256 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
1257
Evan Chengf2fbca62007-11-12 06:35:08 +00001258 // create a new register interval for this spill / remat.
1259 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001260 if (CreatedNewVReg) {
1261 NewLIs.push_back(&nI);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001262 MBBVRegsMap.insert(std::make_pair(MI->getParent()->getNumber(), NewVReg));
Evan Cheng81a03822007-11-17 00:40:40 +00001263 if (TrySplit)
1264 vrm.setIsSplitFromReg(NewVReg, li.reg);
1265 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001266
1267 if (HasUse) {
Evan Cheng81a03822007-11-17 00:40:40 +00001268 if (CreatedNewVReg) {
1269 LiveRange LR(getLoadIndex(index), getUseIndex(index)+1,
1270 nI.getNextValue(~0U, 0, VNInfoAllocator));
1271 DOUT << " +" << LR;
1272 nI.addRange(LR);
1273 } else {
1274 // Extend the split live interval to this def / use.
1275 unsigned End = getUseIndex(index)+1;
1276 LiveRange LR(nI.ranges[nI.ranges.size()-1].end, End,
1277 nI.getValNumInfo(nI.getNumValNums()-1));
1278 DOUT << " +" << LR;
1279 nI.addRange(LR);
1280 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001281 }
1282 if (HasDef) {
1283 LiveRange LR(getDefIndex(index), getStoreIndex(index),
1284 nI.getNextValue(~0U, 0, VNInfoAllocator));
1285 DOUT << " +" << LR;
1286 nI.addRange(LR);
1287 }
Evan Cheng81a03822007-11-17 00:40:40 +00001288
Evan Chengf2fbca62007-11-12 06:35:08 +00001289 DOUT << "\t\t\t\tAdded new interval: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +00001290 nI.print(DOUT, tri_);
Evan Chengf2fbca62007-11-12 06:35:08 +00001291 DOUT << '\n';
1292 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001293 return CanFold;
Evan Chengf2fbca62007-11-12 06:35:08 +00001294}
Evan Cheng81a03822007-11-17 00:40:40 +00001295bool LiveIntervals::anyKillInMBBAfterIdx(const LiveInterval &li,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001296 const VNInfo *VNI,
1297 MachineBasicBlock *MBB, unsigned Idx) const {
Evan Cheng81a03822007-11-17 00:40:40 +00001298 unsigned End = getMBBEndIdx(MBB);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001299 for (unsigned j = 0, ee = VNI->kills.size(); j != ee; ++j) {
1300 unsigned KillIdx = VNI->kills[j];
1301 if (KillIdx > Idx && KillIdx < End)
1302 return true;
Evan Cheng81a03822007-11-17 00:40:40 +00001303 }
1304 return false;
1305}
1306
Evan Cheng063284c2008-02-21 00:34:19 +00001307/// RewriteInfo - Keep track of machine instrs that will be rewritten
1308/// during spilling.
Dan Gohman844731a2008-05-13 00:00:25 +00001309namespace {
1310 struct RewriteInfo {
1311 unsigned Index;
1312 MachineInstr *MI;
1313 bool HasUse;
1314 bool HasDef;
1315 RewriteInfo(unsigned i, MachineInstr *mi, bool u, bool d)
1316 : Index(i), MI(mi), HasUse(u), HasDef(d) {}
1317 };
Evan Cheng063284c2008-02-21 00:34:19 +00001318
Dan Gohman844731a2008-05-13 00:00:25 +00001319 struct RewriteInfoCompare {
1320 bool operator()(const RewriteInfo &LHS, const RewriteInfo &RHS) const {
1321 return LHS.Index < RHS.Index;
1322 }
1323 };
1324}
Evan Cheng063284c2008-02-21 00:34:19 +00001325
Evan Chengf2fbca62007-11-12 06:35:08 +00001326void LiveIntervals::
Evan Cheng81a03822007-11-17 00:40:40 +00001327rewriteInstructionsForSpills(const LiveInterval &li, bool TrySplit,
Evan Chengf2fbca62007-11-12 06:35:08 +00001328 LiveInterval::Ranges::const_iterator &I,
Evan Cheng81a03822007-11-17 00:40:40 +00001329 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Chengf2fbca62007-11-12 06:35:08 +00001330 unsigned Slot, int LdSlot,
1331 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chengd70dbb52008-02-22 09:24:50 +00001332 VirtRegMap &vrm,
Evan Chengf2fbca62007-11-12 06:35:08 +00001333 const TargetRegisterClass* rc,
1334 SmallVector<int, 4> &ReMatIds,
Evan Cheng22f07ff2007-12-11 02:09:15 +00001335 const MachineLoopInfo *loopInfo,
Evan Cheng81a03822007-11-17 00:40:40 +00001336 BitVector &SpillMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001337 DenseMap<unsigned, std::vector<SRInfo> > &SpillIdxes,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001338 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001339 DenseMap<unsigned, std::vector<SRInfo> > &RestoreIdxes,
1340 DenseMap<unsigned,unsigned> &MBBVRegsMap,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001341 std::vector<LiveInterval*> &NewLIs, float &SSWeight) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001342 bool AllCanFold = true;
Evan Cheng81a03822007-11-17 00:40:40 +00001343 unsigned NewVReg = 0;
Evan Cheng063284c2008-02-21 00:34:19 +00001344 unsigned start = getBaseIndex(I->start);
Evan Chengf2fbca62007-11-12 06:35:08 +00001345 unsigned end = getBaseIndex(I->end-1) + InstrSlots::NUM;
Evan Chengf2fbca62007-11-12 06:35:08 +00001346
Evan Cheng063284c2008-02-21 00:34:19 +00001347 // First collect all the def / use in this live range that will be rewritten.
Evan Cheng7e073ba2008-04-09 20:57:25 +00001348 // Make sure they are sorted according to instruction index.
Evan Cheng063284c2008-02-21 00:34:19 +00001349 std::vector<RewriteInfo> RewriteMIs;
Evan Chengd70dbb52008-02-22 09:24:50 +00001350 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1351 re = mri_->reg_end(); ri != re; ) {
Evan Cheng419852c2008-04-03 16:39:43 +00001352 MachineInstr *MI = &*ri;
Evan Cheng063284c2008-02-21 00:34:19 +00001353 MachineOperand &O = ri.getOperand();
1354 ++ri;
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001355 assert(!O.isImplicit() && "Spilling register that's used as implicit use?");
Evan Cheng063284c2008-02-21 00:34:19 +00001356 unsigned index = getInstructionIndex(MI);
1357 if (index < start || index >= end)
1358 continue;
Evan Cheng79a796c2008-07-12 01:56:02 +00001359 if (O.isUse() && !li.liveAt(getUseIndex(index)))
1360 // Must be defined by an implicit def. It should not be spilled. Note,
1361 // this is for correctness reason. e.g.
1362 // 8 %reg1024<def> = IMPLICIT_DEF
1363 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1364 // The live range [12, 14) are not part of the r1024 live interval since
1365 // it's defined by an implicit def. It will not conflicts with live
1366 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengb9890ae2008-07-12 02:22:07 +00001367 // easily see a situation where both registers are reloaded before
Evan Cheng79a796c2008-07-12 01:56:02 +00001368 // the INSERT_SUBREG and both target registers that would overlap.
1369 continue;
Evan Cheng063284c2008-02-21 00:34:19 +00001370 RewriteMIs.push_back(RewriteInfo(index, MI, O.isUse(), O.isDef()));
1371 }
1372 std::sort(RewriteMIs.begin(), RewriteMIs.end(), RewriteInfoCompare());
1373
Evan Cheng313d4b82008-02-23 00:33:04 +00001374 unsigned ImpUse = DefIsReMat ? getReMatImplicitUse(li, ReMatDefMI) : 0;
Evan Cheng063284c2008-02-21 00:34:19 +00001375 // Now rewrite the defs and uses.
1376 for (unsigned i = 0, e = RewriteMIs.size(); i != e; ) {
1377 RewriteInfo &rwi = RewriteMIs[i];
1378 ++i;
1379 unsigned index = rwi.Index;
1380 bool MIHasUse = rwi.HasUse;
1381 bool MIHasDef = rwi.HasDef;
1382 MachineInstr *MI = rwi.MI;
1383 // If MI def and/or use the same register multiple times, then there
1384 // are multiple entries.
Evan Cheng313d4b82008-02-23 00:33:04 +00001385 unsigned NumUses = MIHasUse;
Evan Cheng063284c2008-02-21 00:34:19 +00001386 while (i != e && RewriteMIs[i].MI == MI) {
1387 assert(RewriteMIs[i].Index == index);
Evan Cheng313d4b82008-02-23 00:33:04 +00001388 bool isUse = RewriteMIs[i].HasUse;
1389 if (isUse) ++NumUses;
1390 MIHasUse |= isUse;
Evan Cheng063284c2008-02-21 00:34:19 +00001391 MIHasDef |= RewriteMIs[i].HasDef;
1392 ++i;
1393 }
Evan Cheng81a03822007-11-17 00:40:40 +00001394 MachineBasicBlock *MBB = MI->getParent();
Evan Cheng313d4b82008-02-23 00:33:04 +00001395
Evan Cheng0a891ed2008-05-23 23:00:04 +00001396 if (ImpUse && MI != ReMatDefMI) {
Evan Cheng313d4b82008-02-23 00:33:04 +00001397 // Re-matting an instruction with virtual register use. Update the
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001398 // register interval's spill weight to HUGE_VALF to prevent it from
1399 // being spilled.
Evan Cheng313d4b82008-02-23 00:33:04 +00001400 LiveInterval &ImpLi = getInterval(ImpUse);
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001401 ImpLi.weight = HUGE_VALF;
Evan Cheng313d4b82008-02-23 00:33:04 +00001402 }
1403
Evan Cheng063284c2008-02-21 00:34:19 +00001404 unsigned MBBId = MBB->getNumber();
Evan Cheng018f9b02007-12-05 03:22:34 +00001405 unsigned ThisVReg = 0;
Evan Cheng70306f82007-12-03 09:58:48 +00001406 if (TrySplit) {
Owen Anderson28998312008-08-13 22:28:50 +00001407 DenseMap<unsigned,unsigned>::iterator NVI = MBBVRegsMap.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001408 if (NVI != MBBVRegsMap.end()) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001409 ThisVReg = NVI->second;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001410 // One common case:
1411 // x = use
1412 // ...
1413 // ...
1414 // def = ...
1415 // = use
1416 // It's better to start a new interval to avoid artifically
1417 // extend the new interval.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001418 if (MIHasDef && !MIHasUse) {
1419 MBBVRegsMap.erase(MBB->getNumber());
Evan Cheng018f9b02007-12-05 03:22:34 +00001420 ThisVReg = 0;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001421 }
1422 }
Evan Chengcada2452007-11-28 01:28:46 +00001423 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001424
1425 bool IsNew = ThisVReg == 0;
1426 if (IsNew) {
1427 // This ends the previous live interval. If all of its def / use
1428 // can be folded, give it a low spill weight.
1429 if (NewVReg && TrySplit && AllCanFold) {
1430 LiveInterval &nI = getOrCreateInterval(NewVReg);
1431 nI.weight /= 10.0F;
1432 }
1433 AllCanFold = true;
1434 }
1435 NewVReg = ThisVReg;
1436
Evan Cheng81a03822007-11-17 00:40:40 +00001437 bool HasDef = false;
1438 bool HasUse = false;
Evan Chengd70dbb52008-02-22 09:24:50 +00001439 bool CanFold = rewriteInstructionForSpills(li, I->valno, TrySplit,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001440 index, end, MI, ReMatOrigDefMI, ReMatDefMI,
1441 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
1442 CanDelete, vrm, rc, ReMatIds, loopInfo, NewVReg,
1443 ImpUse, HasDef, HasUse, MBBVRegsMap, NewLIs, SSWeight);
Evan Cheng81a03822007-11-17 00:40:40 +00001444 if (!HasDef && !HasUse)
1445 continue;
1446
Evan Cheng018f9b02007-12-05 03:22:34 +00001447 AllCanFold &= CanFold;
1448
Evan Cheng81a03822007-11-17 00:40:40 +00001449 // Update weight of spill interval.
1450 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng70306f82007-12-03 09:58:48 +00001451 if (!TrySplit) {
Evan Cheng81a03822007-11-17 00:40:40 +00001452 // The spill weight is now infinity as it cannot be spilled again.
1453 nI.weight = HUGE_VALF;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001454 continue;
Evan Cheng81a03822007-11-17 00:40:40 +00001455 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001456
1457 // Keep track of the last def and first use in each MBB.
Evan Cheng0cbb1162007-11-29 01:06:25 +00001458 if (HasDef) {
1459 if (MI != ReMatOrigDefMI || !CanDelete) {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001460 bool HasKill = false;
1461 if (!HasUse)
1462 HasKill = anyKillInMBBAfterIdx(li, I->valno, MBB, getDefIndex(index));
1463 else {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001464 // If this is a two-address code, then this index starts a new VNInfo.
Evan Cheng3f32d652008-06-04 09:18:41 +00001465 const VNInfo *VNI = li.findDefinedVNInfo(getDefIndex(index));
Evan Cheng0cbb1162007-11-29 01:06:25 +00001466 if (VNI)
1467 HasKill = anyKillInMBBAfterIdx(li, VNI, MBB, getDefIndex(index));
1468 }
Owen Anderson28998312008-08-13 22:28:50 +00001469 DenseMap<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Chenge3110d02007-12-01 04:42:39 +00001470 SpillIdxes.find(MBBId);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001471 if (!HasKill) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001472 if (SII == SpillIdxes.end()) {
1473 std::vector<SRInfo> S;
1474 S.push_back(SRInfo(index, NewVReg, true));
1475 SpillIdxes.insert(std::make_pair(MBBId, S));
1476 } else if (SII->second.back().vreg != NewVReg) {
1477 SII->second.push_back(SRInfo(index, NewVReg, true));
1478 } else if ((int)index > SII->second.back().index) {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001479 // If there is an earlier def and this is a two-address
1480 // instruction, then it's not possible to fold the store (which
1481 // would also fold the load).
Evan Cheng1953d0c2007-11-29 10:12:14 +00001482 SRInfo &Info = SII->second.back();
1483 Info.index = index;
1484 Info.canFold = !HasUse;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001485 }
1486 SpillMBBs.set(MBBId);
Evan Chenge3110d02007-12-01 04:42:39 +00001487 } else if (SII != SpillIdxes.end() &&
1488 SII->second.back().vreg == NewVReg &&
1489 (int)index > SII->second.back().index) {
1490 // There is an earlier def that's not killed (must be two-address).
1491 // The spill is no longer needed.
1492 SII->second.pop_back();
1493 if (SII->second.empty()) {
1494 SpillIdxes.erase(MBBId);
1495 SpillMBBs.reset(MBBId);
1496 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001497 }
1498 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001499 }
1500
1501 if (HasUse) {
Owen Anderson28998312008-08-13 22:28:50 +00001502 DenseMap<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Cheng0cbb1162007-11-29 01:06:25 +00001503 SpillIdxes.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001504 if (SII != SpillIdxes.end() &&
1505 SII->second.back().vreg == NewVReg &&
1506 (int)index > SII->second.back().index)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001507 // Use(s) following the last def, it's not safe to fold the spill.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001508 SII->second.back().canFold = false;
Owen Anderson28998312008-08-13 22:28:50 +00001509 DenseMap<unsigned, std::vector<SRInfo> >::iterator RII =
Evan Cheng0cbb1162007-11-29 01:06:25 +00001510 RestoreIdxes.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001511 if (RII != RestoreIdxes.end() && RII->second.back().vreg == NewVReg)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001512 // If we are splitting live intervals, only fold if it's the first
1513 // use and there isn't another use later in the MBB.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001514 RII->second.back().canFold = false;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001515 else if (IsNew) {
1516 // Only need a reload if there isn't an earlier def / use.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001517 if (RII == RestoreIdxes.end()) {
1518 std::vector<SRInfo> Infos;
1519 Infos.push_back(SRInfo(index, NewVReg, true));
1520 RestoreIdxes.insert(std::make_pair(MBBId, Infos));
1521 } else {
1522 RII->second.push_back(SRInfo(index, NewVReg, true));
1523 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001524 RestoreMBBs.set(MBBId);
1525 }
1526 }
1527
1528 // Update spill weight.
Evan Cheng22f07ff2007-12-11 02:09:15 +00001529 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Chengc3417602008-06-21 06:45:54 +00001530 nI.weight += getSpillWeight(HasDef, HasUse, loopDepth);
Evan Chengf2fbca62007-11-12 06:35:08 +00001531 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001532
1533 if (NewVReg && TrySplit && AllCanFold) {
1534 // If all of its def / use can be folded, give it a low spill weight.
1535 LiveInterval &nI = getOrCreateInterval(NewVReg);
1536 nI.weight /= 10.0F;
1537 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001538}
1539
Evan Cheng1953d0c2007-11-29 10:12:14 +00001540bool LiveIntervals::alsoFoldARestore(int Id, int index, unsigned vr,
1541 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001542 DenseMap<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001543 if (!RestoreMBBs[Id])
1544 return false;
1545 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1546 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1547 if (Restores[i].index == index &&
1548 Restores[i].vreg == vr &&
1549 Restores[i].canFold)
1550 return true;
1551 return false;
1552}
1553
1554void LiveIntervals::eraseRestoreInfo(int Id, int index, unsigned vr,
1555 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001556 DenseMap<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001557 if (!RestoreMBBs[Id])
1558 return;
1559 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1560 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1561 if (Restores[i].index == index && Restores[i].vreg)
1562 Restores[i].index = -1;
1563}
Evan Cheng81a03822007-11-17 00:40:40 +00001564
Evan Cheng4cce6b42008-04-11 17:53:36 +00001565/// handleSpilledImpDefs - Remove IMPLICIT_DEF instructions which are being
1566/// spilled and create empty intervals for their uses.
1567void
1568LiveIntervals::handleSpilledImpDefs(const LiveInterval &li, VirtRegMap &vrm,
1569 const TargetRegisterClass* rc,
1570 std::vector<LiveInterval*> &NewLIs) {
Evan Cheng419852c2008-04-03 16:39:43 +00001571 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1572 re = mri_->reg_end(); ri != re; ) {
Evan Cheng4cce6b42008-04-11 17:53:36 +00001573 MachineOperand &O = ri.getOperand();
Evan Cheng419852c2008-04-03 16:39:43 +00001574 MachineInstr *MI = &*ri;
1575 ++ri;
Evan Cheng4cce6b42008-04-11 17:53:36 +00001576 if (O.isDef()) {
1577 assert(MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF &&
1578 "Register def was not rewritten?");
1579 RemoveMachineInstrFromMaps(MI);
1580 vrm.RemoveMachineInstrFromMaps(MI);
1581 MI->eraseFromParent();
1582 } else {
1583 // This must be an use of an implicit_def so it's not part of the live
1584 // interval. Create a new empty live interval for it.
1585 // FIXME: Can we simply erase some of the instructions? e.g. Stores?
1586 unsigned NewVReg = mri_->createVirtualRegister(rc);
1587 vrm.grow();
1588 vrm.setIsImplicitlyDefined(NewVReg);
1589 NewLIs.push_back(&getOrCreateInterval(NewVReg));
1590 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1591 MachineOperand &MO = MI->getOperand(i);
1592 if (MO.isReg() && MO.getReg() == li.reg)
1593 MO.setReg(NewVReg);
1594 }
1595 }
Evan Cheng419852c2008-04-03 16:39:43 +00001596 }
1597}
1598
Evan Cheng81a03822007-11-17 00:40:40 +00001599
Evan Chengf2fbca62007-11-12 06:35:08 +00001600std::vector<LiveInterval*> LiveIntervals::
Evan Cheng81a03822007-11-17 00:40:40 +00001601addIntervalsForSpills(const LiveInterval &li,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001602 const MachineLoopInfo *loopInfo, VirtRegMap &vrm,
1603 float &SSWeight) {
Evan Chengf2fbca62007-11-12 06:35:08 +00001604 assert(li.weight != HUGE_VALF &&
1605 "attempt to spill already spilled interval!");
1606
1607 DOUT << "\t\t\t\tadding intervals for spills for interval: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +00001608 li.print(DOUT, tri_);
Evan Chengf2fbca62007-11-12 06:35:08 +00001609 DOUT << '\n';
1610
Evan Cheng9c3c2212008-06-06 07:54:39 +00001611 // Spill slot weight.
1612 SSWeight = 0.0f;
1613
Evan Cheng81a03822007-11-17 00:40:40 +00001614 // Each bit specify whether it a spill is required in the MBB.
1615 BitVector SpillMBBs(mf_->getNumBlockIDs());
Owen Anderson28998312008-08-13 22:28:50 +00001616 DenseMap<unsigned, std::vector<SRInfo> > SpillIdxes;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001617 BitVector RestoreMBBs(mf_->getNumBlockIDs());
Owen Anderson28998312008-08-13 22:28:50 +00001618 DenseMap<unsigned, std::vector<SRInfo> > RestoreIdxes;
1619 DenseMap<unsigned,unsigned> MBBVRegsMap;
Evan Chengf2fbca62007-11-12 06:35:08 +00001620 std::vector<LiveInterval*> NewLIs;
Evan Chengd70dbb52008-02-22 09:24:50 +00001621 const TargetRegisterClass* rc = mri_->getRegClass(li.reg);
Evan Chengf2fbca62007-11-12 06:35:08 +00001622
1623 unsigned NumValNums = li.getNumValNums();
1624 SmallVector<MachineInstr*, 4> ReMatDefs;
1625 ReMatDefs.resize(NumValNums, NULL);
1626 SmallVector<MachineInstr*, 4> ReMatOrigDefs;
1627 ReMatOrigDefs.resize(NumValNums, NULL);
1628 SmallVector<int, 4> ReMatIds;
1629 ReMatIds.resize(NumValNums, VirtRegMap::MAX_STACK_SLOT);
1630 BitVector ReMatDelete(NumValNums);
1631 unsigned Slot = VirtRegMap::MAX_STACK_SLOT;
1632
Evan Cheng81a03822007-11-17 00:40:40 +00001633 // Spilling a split live interval. It cannot be split any further. Also,
1634 // it's also guaranteed to be a single val# / range interval.
1635 if (vrm.getPreSplitReg(li.reg)) {
1636 vrm.setIsSplitFromReg(li.reg, 0);
Evan Chengd120ffd2007-12-05 10:24:35 +00001637 // Unset the split kill marker on the last use.
1638 unsigned KillIdx = vrm.getKillPoint(li.reg);
1639 if (KillIdx) {
1640 MachineInstr *KillMI = getInstructionFromIndex(KillIdx);
1641 assert(KillMI && "Last use disappeared?");
1642 int KillOp = KillMI->findRegisterUseOperandIdx(li.reg, true);
1643 assert(KillOp != -1 && "Last use disappeared?");
Chris Lattnerf7382302007-12-30 21:56:09 +00001644 KillMI->getOperand(KillOp).setIsKill(false);
Evan Chengd120ffd2007-12-05 10:24:35 +00001645 }
Evan Chengadf85902007-12-05 09:51:10 +00001646 vrm.removeKillPoint(li.reg);
Evan Cheng81a03822007-11-17 00:40:40 +00001647 bool DefIsReMat = vrm.isReMaterialized(li.reg);
1648 Slot = vrm.getStackSlot(li.reg);
1649 assert(Slot != VirtRegMap::MAX_STACK_SLOT);
1650 MachineInstr *ReMatDefMI = DefIsReMat ?
1651 vrm.getReMaterializedMI(li.reg) : NULL;
1652 int LdSlot = 0;
1653 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
1654 bool isLoad = isLoadSS ||
Chris Lattner749c6f62008-01-07 07:27:27 +00001655 (DefIsReMat && (ReMatDefMI->getDesc().isSimpleLoad()));
Evan Cheng81a03822007-11-17 00:40:40 +00001656 bool IsFirstRange = true;
1657 for (LiveInterval::Ranges::const_iterator
1658 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1659 // If this is a split live interval with multiple ranges, it means there
1660 // are two-address instructions that re-defined the value. Only the
1661 // first def can be rematerialized!
1662 if (IsFirstRange) {
Evan Chengcb3c3302007-11-29 23:02:50 +00001663 // Note ReMatOrigDefMI has already been deleted.
Evan Cheng81a03822007-11-17 00:40:40 +00001664 rewriteInstructionsForSpills(li, false, I, NULL, ReMatDefMI,
1665 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chengd70dbb52008-02-22 09:24:50 +00001666 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001667 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001668 MBBVRegsMap, NewLIs, SSWeight);
Evan Cheng81a03822007-11-17 00:40:40 +00001669 } else {
1670 rewriteInstructionsForSpills(li, false, I, NULL, 0,
1671 Slot, 0, false, false, false,
Evan Chengd70dbb52008-02-22 09:24:50 +00001672 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001673 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001674 MBBVRegsMap, NewLIs, SSWeight);
Evan Cheng81a03822007-11-17 00:40:40 +00001675 }
1676 IsFirstRange = false;
1677 }
Evan Cheng419852c2008-04-03 16:39:43 +00001678
Evan Cheng9c3c2212008-06-06 07:54:39 +00001679 SSWeight = 0.0f; // Already accounted for when split.
Evan Cheng4cce6b42008-04-11 17:53:36 +00001680 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001681 return NewLIs;
1682 }
1683
1684 bool TrySplit = SplitAtBB && !intervalIsInOneMBB(li);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001685 if (SplitLimit != -1 && (int)numSplits >= SplitLimit)
1686 TrySplit = false;
1687 if (TrySplit)
1688 ++numSplits;
Evan Chengf2fbca62007-11-12 06:35:08 +00001689 bool NeedStackSlot = false;
1690 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
1691 i != e; ++i) {
1692 const VNInfo *VNI = *i;
1693 unsigned VN = VNI->id;
1694 unsigned DefIdx = VNI->def;
1695 if (DefIdx == ~1U)
1696 continue; // Dead val#.
1697 // Is the def for the val# rematerializable?
Evan Cheng81a03822007-11-17 00:40:40 +00001698 MachineInstr *ReMatDefMI = (DefIdx == ~0u)
1699 ? 0 : getInstructionFromIndex(DefIdx);
Evan Cheng5ef3a042007-12-06 00:01:56 +00001700 bool dummy;
1701 if (ReMatDefMI && isReMaterializable(li, VNI, ReMatDefMI, dummy)) {
Evan Chengf2fbca62007-11-12 06:35:08 +00001702 // Remember how to remat the def of this val#.
Evan Cheng81a03822007-11-17 00:40:40 +00001703 ReMatOrigDefs[VN] = ReMatDefMI;
Dan Gohman2c3f7ae2008-07-17 23:49:46 +00001704 // Original def may be modified so we have to make a copy here.
Evan Cheng1ed99222008-07-19 00:37:25 +00001705 MachineInstr *Clone = mf_->CloneMachineInstr(ReMatDefMI);
1706 ClonedMIs.push_back(Clone);
1707 ReMatDefs[VN] = Clone;
Evan Chengf2fbca62007-11-12 06:35:08 +00001708
1709 bool CanDelete = true;
Evan Chengc3fc7d92007-11-29 09:49:23 +00001710 if (VNI->hasPHIKill) {
1711 // A kill is a phi node, not all of its uses can be rematerialized.
Evan Chengf2fbca62007-11-12 06:35:08 +00001712 // It must not be deleted.
Evan Chengc3fc7d92007-11-29 09:49:23 +00001713 CanDelete = false;
1714 // Need a stack slot if there is any live range where uses cannot be
1715 // rematerialized.
1716 NeedStackSlot = true;
Evan Chengf2fbca62007-11-12 06:35:08 +00001717 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001718 if (CanDelete)
1719 ReMatDelete.set(VN);
1720 } else {
1721 // Need a stack slot if there is any live range where uses cannot be
1722 // rematerialized.
1723 NeedStackSlot = true;
1724 }
1725 }
1726
1727 // One stack slot per live interval.
Evan Cheng81a03822007-11-17 00:40:40 +00001728 if (NeedStackSlot && vrm.getPreSplitReg(li.reg) == 0)
Evan Chengf2fbca62007-11-12 06:35:08 +00001729 Slot = vrm.assignVirt2StackSlot(li.reg);
1730
1731 // Create new intervals and rewrite defs and uses.
1732 for (LiveInterval::Ranges::const_iterator
1733 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
Evan Cheng81a03822007-11-17 00:40:40 +00001734 MachineInstr *ReMatDefMI = ReMatDefs[I->valno->id];
1735 MachineInstr *ReMatOrigDefMI = ReMatOrigDefs[I->valno->id];
1736 bool DefIsReMat = ReMatDefMI != NULL;
Evan Chengf2fbca62007-11-12 06:35:08 +00001737 bool CanDelete = ReMatDelete[I->valno->id];
1738 int LdSlot = 0;
Evan Cheng81a03822007-11-17 00:40:40 +00001739 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
Evan Chengf2fbca62007-11-12 06:35:08 +00001740 bool isLoad = isLoadSS ||
Chris Lattner749c6f62008-01-07 07:27:27 +00001741 (DefIsReMat && ReMatDefMI->getDesc().isSimpleLoad());
Evan Cheng81a03822007-11-17 00:40:40 +00001742 rewriteInstructionsForSpills(li, TrySplit, I, ReMatOrigDefMI, ReMatDefMI,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001743 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chengd70dbb52008-02-22 09:24:50 +00001744 CanDelete, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001745 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001746 MBBVRegsMap, NewLIs, SSWeight);
Evan Chengf2fbca62007-11-12 06:35:08 +00001747 }
1748
Evan Cheng0cbb1162007-11-29 01:06:25 +00001749 // Insert spills / restores if we are splitting.
Evan Cheng419852c2008-04-03 16:39:43 +00001750 if (!TrySplit) {
Evan Cheng4cce6b42008-04-11 17:53:36 +00001751 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001752 return NewLIs;
Evan Cheng419852c2008-04-03 16:39:43 +00001753 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00001754
Evan Chengb50bb8c2007-12-05 08:16:32 +00001755 SmallPtrSet<LiveInterval*, 4> AddedKill;
Evan Chengaee4af62007-12-02 08:30:39 +00001756 SmallVector<unsigned, 2> Ops;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001757 if (NeedStackSlot) {
1758 int Id = SpillMBBs.find_first();
1759 while (Id != -1) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00001760 MachineBasicBlock *MBB = mf_->getBlockNumbered(Id);
1761 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001762 std::vector<SRInfo> &spills = SpillIdxes[Id];
1763 for (unsigned i = 0, e = spills.size(); i != e; ++i) {
1764 int index = spills[i].index;
1765 unsigned VReg = spills[i].vreg;
Evan Cheng597d10d2007-12-04 00:32:23 +00001766 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001767 bool isReMat = vrm.isReMaterialized(VReg);
1768 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengaee4af62007-12-02 08:30:39 +00001769 bool CanFold = false;
1770 bool FoundUse = false;
1771 Ops.clear();
Evan Chengcddbb832007-11-30 21:23:43 +00001772 if (spills[i].canFold) {
Evan Chengaee4af62007-12-02 08:30:39 +00001773 CanFold = true;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001774 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
1775 MachineOperand &MO = MI->getOperand(j);
1776 if (!MO.isRegister() || MO.getReg() != VReg)
1777 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001778
1779 Ops.push_back(j);
1780 if (MO.isDef())
Evan Chengcddbb832007-11-30 21:23:43 +00001781 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001782 if (isReMat ||
1783 (!FoundUse && !alsoFoldARestore(Id, index, VReg,
1784 RestoreMBBs, RestoreIdxes))) {
1785 // MI has two-address uses of the same register. If the use
1786 // isn't the first and only use in the BB, then we can't fold
1787 // it. FIXME: Move this to rewriteInstructionsForSpills.
1788 CanFold = false;
Evan Chengcddbb832007-11-30 21:23:43 +00001789 break;
1790 }
Evan Chengaee4af62007-12-02 08:30:39 +00001791 FoundUse = true;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001792 }
1793 }
1794 // Fold the store into the def if possible.
Evan Chengcddbb832007-11-30 21:23:43 +00001795 bool Folded = false;
Evan Chengaee4af62007-12-02 08:30:39 +00001796 if (CanFold && !Ops.empty()) {
1797 if (tryFoldMemoryOperand(MI, vrm, NULL, index, Ops, true, Slot,VReg)){
Evan Chengcddbb832007-11-30 21:23:43 +00001798 Folded = true;
Evan Chengf38d14f2007-12-05 09:05:34 +00001799 if (FoundUse > 0) {
Evan Chengaee4af62007-12-02 08:30:39 +00001800 // Also folded uses, do not issue a load.
1801 eraseRestoreInfo(Id, index, VReg, RestoreMBBs, RestoreIdxes);
Evan Chengf38d14f2007-12-05 09:05:34 +00001802 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
1803 }
Evan Cheng597d10d2007-12-04 00:32:23 +00001804 nI.removeRange(getDefIndex(index), getStoreIndex(index));
Evan Chengcddbb832007-11-30 21:23:43 +00001805 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001806 }
1807
Evan Cheng7e073ba2008-04-09 20:57:25 +00001808 // Otherwise tell the spiller to issue a spill.
Evan Chengb50bb8c2007-12-05 08:16:32 +00001809 if (!Folded) {
1810 LiveRange *LR = &nI.ranges[nI.ranges.size()-1];
1811 bool isKill = LR->end == getStoreIndex(index);
Evan Chengb0a6f622008-05-20 08:10:37 +00001812 if (!MI->registerDefIsDead(nI.reg))
1813 // No need to spill a dead def.
1814 vrm.addSpillPoint(VReg, isKill, MI);
Evan Chengb50bb8c2007-12-05 08:16:32 +00001815 if (isKill)
1816 AddedKill.insert(&nI);
1817 }
Evan Cheng9c3c2212008-06-06 07:54:39 +00001818
1819 // Update spill slot weight.
1820 if (!isReMat)
Evan Chengc3417602008-06-21 06:45:54 +00001821 SSWeight += getSpillWeight(true, false, loopDepth);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001822 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00001823 Id = SpillMBBs.find_next(Id);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001824 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00001825 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001826
Evan Cheng1953d0c2007-11-29 10:12:14 +00001827 int Id = RestoreMBBs.find_first();
1828 while (Id != -1) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00001829 MachineBasicBlock *MBB = mf_->getBlockNumbered(Id);
1830 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
1831
Evan Cheng1953d0c2007-11-29 10:12:14 +00001832 std::vector<SRInfo> &restores = RestoreIdxes[Id];
1833 for (unsigned i = 0, e = restores.size(); i != e; ++i) {
1834 int index = restores[i].index;
1835 if (index == -1)
1836 continue;
1837 unsigned VReg = restores[i].vreg;
Evan Cheng597d10d2007-12-04 00:32:23 +00001838 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng9c3c2212008-06-06 07:54:39 +00001839 bool isReMat = vrm.isReMaterialized(VReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001840 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengaee4af62007-12-02 08:30:39 +00001841 bool CanFold = false;
1842 Ops.clear();
Evan Chengcddbb832007-11-30 21:23:43 +00001843 if (restores[i].canFold) {
Evan Chengaee4af62007-12-02 08:30:39 +00001844 CanFold = true;
Evan Cheng81a03822007-11-17 00:40:40 +00001845 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
1846 MachineOperand &MO = MI->getOperand(j);
1847 if (!MO.isRegister() || MO.getReg() != VReg)
1848 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001849
Evan Cheng0cbb1162007-11-29 01:06:25 +00001850 if (MO.isDef()) {
Evan Chengaee4af62007-12-02 08:30:39 +00001851 // If this restore were to be folded, it would have been folded
1852 // already.
1853 CanFold = false;
Evan Cheng81a03822007-11-17 00:40:40 +00001854 break;
1855 }
Evan Chengaee4af62007-12-02 08:30:39 +00001856 Ops.push_back(j);
Evan Cheng81a03822007-11-17 00:40:40 +00001857 }
1858 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001859
1860 // Fold the load into the use if possible.
Evan Chengcddbb832007-11-30 21:23:43 +00001861 bool Folded = false;
Evan Chengaee4af62007-12-02 08:30:39 +00001862 if (CanFold && !Ops.empty()) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00001863 if (!isReMat)
Evan Chengaee4af62007-12-02 08:30:39 +00001864 Folded = tryFoldMemoryOperand(MI, vrm, NULL,index,Ops,true,Slot,VReg);
1865 else {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001866 MachineInstr *ReMatDefMI = vrm.getReMaterializedMI(VReg);
1867 int LdSlot = 0;
1868 bool isLoadSS = tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
1869 // If the rematerializable def is a load, also try to fold it.
Chris Lattner749c6f62008-01-07 07:27:27 +00001870 if (isLoadSS || ReMatDefMI->getDesc().isSimpleLoad())
Evan Chengaee4af62007-12-02 08:30:39 +00001871 Folded = tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
1872 Ops, isLoadSS, LdSlot, VReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001873 unsigned ImpUse = getReMatImplicitUse(li, ReMatDefMI);
1874 if (ImpUse) {
1875 // Re-matting an instruction with virtual register use. Add the
1876 // register as an implicit use on the use MI and update the register
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001877 // interval's spill weight to HUGE_VALF to prevent it from being
1878 // spilled.
Evan Chengd70dbb52008-02-22 09:24:50 +00001879 LiveInterval &ImpLi = getInterval(ImpUse);
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001880 ImpLi.weight = HUGE_VALF;
Evan Chengd70dbb52008-02-22 09:24:50 +00001881 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
1882 }
Evan Chengaee4af62007-12-02 08:30:39 +00001883 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001884 }
1885 // If folding is not possible / failed, then tell the spiller to issue a
1886 // load / rematerialization for us.
Evan Cheng597d10d2007-12-04 00:32:23 +00001887 if (Folded)
1888 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
Evan Chengb50bb8c2007-12-05 08:16:32 +00001889 else
Evan Cheng0cbb1162007-11-29 01:06:25 +00001890 vrm.addRestorePoint(VReg, MI);
Evan Cheng9c3c2212008-06-06 07:54:39 +00001891
1892 // Update spill slot weight.
1893 if (!isReMat)
Evan Chengc3417602008-06-21 06:45:54 +00001894 SSWeight += getSpillWeight(false, true, loopDepth);
Evan Cheng81a03822007-11-17 00:40:40 +00001895 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00001896 Id = RestoreMBBs.find_next(Id);
Evan Cheng81a03822007-11-17 00:40:40 +00001897 }
1898
Evan Chengb50bb8c2007-12-05 08:16:32 +00001899 // Finalize intervals: add kills, finalize spill weights, and filter out
1900 // dead intervals.
Evan Cheng597d10d2007-12-04 00:32:23 +00001901 std::vector<LiveInterval*> RetNewLIs;
1902 for (unsigned i = 0, e = NewLIs.size(); i != e; ++i) {
1903 LiveInterval *LI = NewLIs[i];
1904 if (!LI->empty()) {
Owen Anderson496bac52008-07-23 19:47:27 +00001905 LI->weight /= InstrSlots::NUM * getApproximateInstructionCount(*LI);
Evan Chengb50bb8c2007-12-05 08:16:32 +00001906 if (!AddedKill.count(LI)) {
1907 LiveRange *LR = &LI->ranges[LI->ranges.size()-1];
Evan Chengd120ffd2007-12-05 10:24:35 +00001908 unsigned LastUseIdx = getBaseIndex(LR->end);
1909 MachineInstr *LastUse = getInstructionFromIndex(LastUseIdx);
Evan Cheng6130f662008-03-05 00:59:57 +00001910 int UseIdx = LastUse->findRegisterUseOperandIdx(LI->reg, false);
Evan Chengb50bb8c2007-12-05 08:16:32 +00001911 assert(UseIdx != -1);
Evan Chengd70dbb52008-02-22 09:24:50 +00001912 if (LastUse->getOperand(UseIdx).isImplicit() ||
1913 LastUse->getDesc().getOperandConstraint(UseIdx,TOI::TIED_TO) == -1){
Evan Chengb50bb8c2007-12-05 08:16:32 +00001914 LastUse->getOperand(UseIdx).setIsKill();
Evan Chengd120ffd2007-12-05 10:24:35 +00001915 vrm.addKillPoint(LI->reg, LastUseIdx);
Evan Chengadf85902007-12-05 09:51:10 +00001916 }
Evan Chengb50bb8c2007-12-05 08:16:32 +00001917 }
Evan Cheng597d10d2007-12-04 00:32:23 +00001918 RetNewLIs.push_back(LI);
1919 }
1920 }
Evan Cheng81a03822007-11-17 00:40:40 +00001921
Evan Cheng4cce6b42008-04-11 17:53:36 +00001922 handleSpilledImpDefs(li, vrm, rc, RetNewLIs);
Evan Cheng597d10d2007-12-04 00:32:23 +00001923 return RetNewLIs;
Evan Chengf2fbca62007-11-12 06:35:08 +00001924}
Evan Cheng676dd7c2008-03-11 07:19:34 +00001925
1926/// hasAllocatableSuperReg - Return true if the specified physical register has
1927/// any super register that's allocatable.
1928bool LiveIntervals::hasAllocatableSuperReg(unsigned Reg) const {
1929 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS)
1930 if (allocatableRegs_[*AS] && hasInterval(*AS))
1931 return true;
1932 return false;
1933}
1934
1935/// getRepresentativeReg - Find the largest super register of the specified
1936/// physical register.
1937unsigned LiveIntervals::getRepresentativeReg(unsigned Reg) const {
1938 // Find the largest super-register that is allocatable.
1939 unsigned BestReg = Reg;
1940 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS) {
1941 unsigned SuperReg = *AS;
1942 if (!hasAllocatableSuperReg(SuperReg) && hasInterval(SuperReg)) {
1943 BestReg = SuperReg;
1944 break;
1945 }
1946 }
1947 return BestReg;
1948}
1949
1950/// getNumConflictsWithPhysReg - Return the number of uses and defs of the
1951/// specified interval that conflicts with the specified physical register.
1952unsigned LiveIntervals::getNumConflictsWithPhysReg(const LiveInterval &li,
1953 unsigned PhysReg) const {
1954 unsigned NumConflicts = 0;
1955 const LiveInterval &pli = getInterval(getRepresentativeReg(PhysReg));
1956 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
1957 E = mri_->reg_end(); I != E; ++I) {
1958 MachineOperand &O = I.getOperand();
1959 MachineInstr *MI = O.getParent();
1960 unsigned Index = getInstructionIndex(MI);
1961 if (pli.liveAt(Index))
1962 ++NumConflicts;
1963 }
1964 return NumConflicts;
1965}
1966
1967/// spillPhysRegAroundRegDefsUses - Spill the specified physical register
1968/// around all defs and uses of the specified interval.
1969void LiveIntervals::spillPhysRegAroundRegDefsUses(const LiveInterval &li,
1970 unsigned PhysReg, VirtRegMap &vrm) {
1971 unsigned SpillReg = getRepresentativeReg(PhysReg);
1972
1973 for (const unsigned *AS = tri_->getAliasSet(PhysReg); *AS; ++AS)
1974 // If there are registers which alias PhysReg, but which are not a
1975 // sub-register of the chosen representative super register. Assert
1976 // since we can't handle it yet.
1977 assert(*AS == SpillReg || !allocatableRegs_[*AS] ||
1978 tri_->isSuperRegister(*AS, SpillReg));
1979
1980 LiveInterval &pli = getInterval(SpillReg);
1981 SmallPtrSet<MachineInstr*, 8> SeenMIs;
1982 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
1983 E = mri_->reg_end(); I != E; ++I) {
1984 MachineOperand &O = I.getOperand();
1985 MachineInstr *MI = O.getParent();
1986 if (SeenMIs.count(MI))
1987 continue;
1988 SeenMIs.insert(MI);
1989 unsigned Index = getInstructionIndex(MI);
1990 if (pli.liveAt(Index)) {
1991 vrm.addEmergencySpill(SpillReg, MI);
1992 pli.removeRange(getLoadIndex(Index), getStoreIndex(Index)+1);
1993 for (const unsigned* AS = tri_->getSubRegisters(SpillReg); *AS; ++AS) {
1994 if (!hasInterval(*AS))
1995 continue;
1996 LiveInterval &spli = getInterval(*AS);
1997 if (spli.liveAt(Index))
1998 spli.removeRange(getLoadIndex(Index), getStoreIndex(Index)+1);
1999 }
2000 }
2001 }
2002}
Owen Andersonc4dc1322008-06-05 17:15:43 +00002003
2004LiveRange LiveIntervals::addLiveRangeToEndOfBlock(unsigned reg,
2005 MachineInstr* startInst) {
2006 LiveInterval& Interval = getOrCreateInterval(reg);
2007 VNInfo* VN = Interval.getNextValue(
2008 getInstructionIndex(startInst) + InstrSlots::DEF,
2009 startInst, getVNInfoAllocator());
2010 VN->hasPHIKill = true;
2011 VN->kills.push_back(getMBBEndIdx(startInst->getParent()));
2012 LiveRange LR(getInstructionIndex(startInst) + InstrSlots::DEF,
2013 getMBBEndIdx(startInst->getParent()) + 1, VN);
2014 Interval.addRange(LR);
2015
2016 return LR;
2017}