blob: a51291298b9ea6d269e3a402dbc3ecb86157b14b [file] [log] [blame]
Chris Lattnera3b8b5c2004-07-23 17:56:30 +00001//===-- LiveIntervalAnalysis.cpp - Live Interval Analysis -----------------===//
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the LiveInterval analysis pass which is used
11// by the Linear Scan Register allocator. This pass linearizes the
12// basic blocks of the function in DFS order and uses the
13// LiveVariables pass to conservatively compute live intervals for
14// each virtual and physical register.
15//
16//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "liveintervals"
Chris Lattner3c3fe462005-09-21 04:19:09 +000019#include "llvm/CodeGen/LiveIntervalAnalysis.h"
Misha Brukman08a6c762004-09-03 18:25:53 +000020#include "VirtRegMap.h"
Chris Lattner015959e2004-05-01 21:24:39 +000021#include "llvm/Value.h"
Dan Gohman6d69ba82008-07-25 00:02:30 +000022#include "llvm/Analysis/AliasAnalysis.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000023#include "llvm/CodeGen/LiveVariables.h"
24#include "llvm/CodeGen/MachineFrameInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000025#include "llvm/CodeGen/MachineInstr.h"
Evan Cheng22f07ff2007-12-11 02:09:15 +000026#include "llvm/CodeGen/MachineLoopInfo.h"
Chris Lattner84bc5422007-12-31 04:13:23 +000027#include "llvm/CodeGen/MachineRegisterInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000028#include "llvm/CodeGen/Passes.h"
Dan Gohman6d69ba82008-07-25 00:02:30 +000029#include "llvm/CodeGen/PseudoSourceValue.h"
Dan Gohman6f0d0242008-02-10 18:45:23 +000030#include "llvm/Target/TargetRegisterInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000031#include "llvm/Target/TargetInstrInfo.h"
32#include "llvm/Target/TargetMachine.h"
Owen Anderson95dad832008-10-07 20:22:28 +000033#include "llvm/Target/TargetOptions.h"
Reid Spencer551ccae2004-09-01 22:55:40 +000034#include "llvm/Support/CommandLine.h"
35#include "llvm/Support/Debug.h"
36#include "llvm/ADT/Statistic.h"
37#include "llvm/ADT/STLExtras.h"
Alkis Evlogimenos20aa4742004-09-03 18:19:51 +000038#include <algorithm>
Jeff Cohen97af7512006-12-02 02:22:01 +000039#include <cmath>
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000040using namespace llvm;
41
Dan Gohman844731a2008-05-13 00:00:25 +000042// Hidden options for help debugging.
43static cl::opt<bool> DisableReMat("disable-rematerialization",
44 cl::init(false), cl::Hidden);
Evan Cheng81a03822007-11-17 00:40:40 +000045
Dan Gohman844731a2008-05-13 00:00:25 +000046static cl::opt<bool> SplitAtBB("split-intervals-at-bb",
47 cl::init(true), cl::Hidden);
48static cl::opt<int> SplitLimit("split-limit",
49 cl::init(-1), cl::Hidden);
Evan Chengbc165e42007-08-16 07:24:22 +000050
Dan Gohman4c8f8702008-07-25 15:08:37 +000051static cl::opt<bool> EnableAggressiveRemat("aggressive-remat", cl::Hidden);
52
Owen Andersonae339ba2008-08-19 00:17:30 +000053static cl::opt<bool> EnableFastSpilling("fast-spill",
54 cl::init(false), cl::Hidden);
55
Chris Lattnercd3245a2006-12-19 22:41:21 +000056STATISTIC(numIntervals, "Number of original intervals");
Evan Cheng0cbb1162007-11-29 01:06:25 +000057STATISTIC(numFolds , "Number of loads/stores folded into instructions");
58STATISTIC(numSplits , "Number of intervals split");
Chris Lattnercd3245a2006-12-19 22:41:21 +000059
Devang Patel19974732007-05-03 01:11:54 +000060char LiveIntervals::ID = 0;
Dan Gohman844731a2008-05-13 00:00:25 +000061static RegisterPass<LiveIntervals> X("liveintervals", "Live Interval Analysis");
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000062
Chris Lattnerf7da2c72006-08-24 22:43:55 +000063void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
Dan Gohman6d69ba82008-07-25 00:02:30 +000064 AU.addRequired<AliasAnalysis>();
65 AU.addPreserved<AliasAnalysis>();
David Greene25133302007-06-08 17:18:56 +000066 AU.addPreserved<LiveVariables>();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000067 AU.addRequired<LiveVariables>();
Bill Wendling67d65bb2008-01-04 20:54:55 +000068 AU.addPreservedID(MachineLoopInfoID);
69 AU.addPreservedID(MachineDominatorsID);
Owen Anderson95dad832008-10-07 20:22:28 +000070
71 if (!StrongPHIElim) {
72 AU.addPreservedID(PHIEliminationID);
73 AU.addRequiredID(PHIEliminationID);
74 }
75
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000076 AU.addRequiredID(TwoAddressInstructionPassID);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000077 MachineFunctionPass::getAnalysisUsage(AU);
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000078}
79
Chris Lattnerf7da2c72006-08-24 22:43:55 +000080void LiveIntervals::releaseMemory() {
Owen Anderson03857b22008-08-13 21:49:13 +000081 // Free the live intervals themselves.
Owen Anderson20e28392008-08-13 22:08:30 +000082 for (DenseMap<unsigned, LiveInterval*>::iterator I = r2iMap_.begin(),
Owen Anderson03857b22008-08-13 21:49:13 +000083 E = r2iMap_.end(); I != E; ++I)
84 delete I->second;
85
Evan Cheng3f32d652008-06-04 09:18:41 +000086 MBB2IdxMap.clear();
Evan Cheng4ca980e2007-10-17 02:10:22 +000087 Idx2MBBMap.clear();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000088 mi2iMap_.clear();
89 i2miMap_.clear();
90 r2iMap_.clear();
Evan Chengdd199d22007-09-06 01:07:24 +000091 // Release VNInfo memroy regions after all VNInfo objects are dtor'd.
92 VNInfoAllocator.Reset();
Evan Cheng1ed99222008-07-19 00:37:25 +000093 while (!ClonedMIs.empty()) {
94 MachineInstr *MI = ClonedMIs.back();
95 ClonedMIs.pop_back();
96 mf_->DeleteMachineInstr(MI);
97 }
Alkis Evlogimenos08cec002004-01-31 19:59:32 +000098}
99
Owen Anderson80b3ce62008-05-28 20:54:50 +0000100void LiveIntervals::computeNumbering() {
101 Index2MiMap OldI2MI = i2miMap_;
Owen Anderson7fbad272008-07-23 21:37:49 +0000102 std::vector<IdxMBBPair> OldI2MBB = Idx2MBBMap;
Owen Anderson80b3ce62008-05-28 20:54:50 +0000103
104 Idx2MBBMap.clear();
105 MBB2IdxMap.clear();
106 mi2iMap_.clear();
107 i2miMap_.clear();
108
Owen Andersona1566f22008-07-22 22:46:49 +0000109 FunctionSize = 0;
110
Chris Lattner428b92e2006-09-15 03:57:23 +0000111 // Number MachineInstrs and MachineBasicBlocks.
112 // Initialize MBB indexes to a sentinal.
Evan Cheng549f27d32007-08-13 23:45:17 +0000113 MBB2IdxMap.resize(mf_->getNumBlockIDs(), std::make_pair(~0U,~0U));
Chris Lattner428b92e2006-09-15 03:57:23 +0000114
115 unsigned MIIndex = 0;
116 for (MachineFunction::iterator MBB = mf_->begin(), E = mf_->end();
117 MBB != E; ++MBB) {
Evan Cheng549f27d32007-08-13 23:45:17 +0000118 unsigned StartIdx = MIIndex;
Evan Cheng0c9f92e2007-02-13 01:30:55 +0000119
Owen Anderson7fbad272008-07-23 21:37:49 +0000120 // Insert an empty slot at the beginning of each block.
121 MIIndex += InstrSlots::NUM;
122 i2miMap_.push_back(0);
123
Chris Lattner428b92e2006-09-15 03:57:23 +0000124 for (MachineBasicBlock::iterator I = MBB->begin(), E = MBB->end();
125 I != E; ++I) {
126 bool inserted = mi2iMap_.insert(std::make_pair(I, MIIndex)).second;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000127 assert(inserted && "multiple MachineInstr -> index mappings");
Chris Lattner428b92e2006-09-15 03:57:23 +0000128 i2miMap_.push_back(I);
129 MIIndex += InstrSlots::NUM;
Owen Andersona1566f22008-07-22 22:46:49 +0000130 FunctionSize++;
Owen Anderson7fbad272008-07-23 21:37:49 +0000131
Evan Cheng4ed43292008-10-18 05:21:37 +0000132 // Insert max(1, numdefs) empty slots after every instruction.
Evan Cheng99fe34b2008-10-18 05:18:55 +0000133 unsigned Slots = I->getDesc().getNumDefs();
134 if (Slots == 0)
135 Slots = 1;
136 MIIndex += InstrSlots::NUM * Slots;
137 while (Slots--)
138 i2miMap_.push_back(0);
Owen Anderson35578012008-06-16 07:10:49 +0000139 }
Owen Anderson7fbad272008-07-23 21:37:49 +0000140
Owen Anderson1fbb4542008-06-16 16:58:24 +0000141 // Set the MBB2IdxMap entry for this MBB.
142 MBB2IdxMap[MBB->getNumber()] = std::make_pair(StartIdx, MIIndex - 1);
143 Idx2MBBMap.push_back(std::make_pair(StartIdx, MBB));
Chris Lattner428b92e2006-09-15 03:57:23 +0000144 }
Evan Cheng4ca980e2007-10-17 02:10:22 +0000145 std::sort(Idx2MBBMap.begin(), Idx2MBBMap.end(), Idx2MBBCompare());
Owen Anderson80b3ce62008-05-28 20:54:50 +0000146
147 if (!OldI2MI.empty())
Owen Anderson788d0412008-08-06 18:35:45 +0000148 for (iterator OI = begin(), OE = end(); OI != OE; ++OI) {
Owen Anderson03857b22008-08-13 21:49:13 +0000149 for (LiveInterval::iterator LI = OI->second->begin(),
150 LE = OI->second->end(); LI != LE; ++LI) {
Owen Anderson4b5b2092008-05-29 18:15:49 +0000151
Owen Anderson7eec0c22008-05-29 23:01:22 +0000152 // Remap the start index of the live range to the corresponding new
153 // number, or our best guess at what it _should_ correspond to if the
154 // original instruction has been erased. This is either the following
155 // instruction or its predecessor.
Owen Anderson7fbad272008-07-23 21:37:49 +0000156 unsigned index = LI->start / InstrSlots::NUM;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000157 unsigned offset = LI->start % InstrSlots::NUM;
Owen Anderson0a7615a2008-07-25 23:06:59 +0000158 if (offset == InstrSlots::LOAD) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000159 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000160 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->start);
Owen Anderson7fbad272008-07-23 21:37:49 +0000161 // Take the pair containing the index
162 std::vector<IdxMBBPair>::const_iterator J =
Owen Andersona0c032f2008-07-29 21:15:44 +0000163 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000164
Owen Anderson7fbad272008-07-23 21:37:49 +0000165 LI->start = getMBBStartIdx(J->second);
166 } else {
167 LI->start = mi2iMap_[OldI2MI[index]] + offset;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000168 }
169
170 // Remap the ending index in the same way that we remapped the start,
171 // except for the final step where we always map to the immediately
172 // following instruction.
Owen Andersond7dcbec2008-07-25 19:50:48 +0000173 index = (LI->end - 1) / InstrSlots::NUM;
Owen Anderson7fbad272008-07-23 21:37:49 +0000174 offset = LI->end % InstrSlots::NUM;
Owen Anderson9382b932008-07-30 00:22:56 +0000175 if (offset == InstrSlots::LOAD) {
176 // VReg dies at end of block.
Owen Anderson7fbad272008-07-23 21:37:49 +0000177 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000178 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->end);
Owen Anderson9382b932008-07-30 00:22:56 +0000179 --I;
Owen Anderson7fbad272008-07-23 21:37:49 +0000180
Owen Anderson9382b932008-07-30 00:22:56 +0000181 LI->end = getMBBEndIdx(I->second) + 1;
Owen Anderson4b5b2092008-05-29 18:15:49 +0000182 } else {
Owen Andersond7dcbec2008-07-25 19:50:48 +0000183 unsigned idx = index;
Owen Anderson8d0cc0a2008-07-25 21:07:13 +0000184 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
185
186 if (index != OldI2MI.size())
187 LI->end = mi2iMap_[OldI2MI[index]] + (idx == index ? offset : 0);
188 else
189 LI->end = InstrSlots::NUM * i2miMap_.size();
Owen Anderson4b5b2092008-05-29 18:15:49 +0000190 }
Owen Anderson788d0412008-08-06 18:35:45 +0000191 }
192
Owen Anderson03857b22008-08-13 21:49:13 +0000193 for (LiveInterval::vni_iterator VNI = OI->second->vni_begin(),
194 VNE = OI->second->vni_end(); VNI != VNE; ++VNI) {
Owen Anderson788d0412008-08-06 18:35:45 +0000195 VNInfo* vni = *VNI;
Owen Anderson745825f42008-05-28 22:40:08 +0000196
Owen Anderson7eec0c22008-05-29 23:01:22 +0000197 // Remap the VNInfo def index, which works the same as the
Owen Anderson788d0412008-08-06 18:35:45 +0000198 // start indices above. VN's with special sentinel defs
199 // don't need to be remapped.
Owen Anderson91292392008-07-30 17:42:47 +0000200 if (vni->def != ~0U && vni->def != ~1U) {
Owen Anderson788d0412008-08-06 18:35:45 +0000201 unsigned index = vni->def / InstrSlots::NUM;
202 unsigned offset = vni->def % InstrSlots::NUM;
Owen Anderson91292392008-07-30 17:42:47 +0000203 if (offset == InstrSlots::LOAD) {
204 std::vector<IdxMBBPair>::const_iterator I =
Owen Anderson0a7615a2008-07-25 23:06:59 +0000205 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->def);
Owen Anderson91292392008-07-30 17:42:47 +0000206 // Take the pair containing the index
207 std::vector<IdxMBBPair>::const_iterator J =
Owen Andersona0c032f2008-07-29 21:15:44 +0000208 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000209
Owen Anderson91292392008-07-30 17:42:47 +0000210 vni->def = getMBBStartIdx(J->second);
211 } else {
212 vni->def = mi2iMap_[OldI2MI[index]] + offset;
213 }
Owen Anderson7eec0c22008-05-29 23:01:22 +0000214 }
Owen Anderson745825f42008-05-28 22:40:08 +0000215
Owen Anderson7eec0c22008-05-29 23:01:22 +0000216 // Remap the VNInfo kill indices, which works the same as
217 // the end indices above.
Owen Anderson4b5b2092008-05-29 18:15:49 +0000218 for (size_t i = 0; i < vni->kills.size(); ++i) {
Owen Anderson9382b932008-07-30 00:22:56 +0000219 // PHI kills don't need to be remapped.
220 if (!vni->kills[i]) continue;
221
Owen Anderson788d0412008-08-06 18:35:45 +0000222 unsigned index = (vni->kills[i]-1) / InstrSlots::NUM;
223 unsigned offset = vni->kills[i] % InstrSlots::NUM;
Owen Anderson309c6162008-09-30 22:51:54 +0000224 if (offset == InstrSlots::LOAD) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000225 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000226 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->kills[i]);
Owen Anderson9382b932008-07-30 00:22:56 +0000227 --I;
Owen Anderson7fbad272008-07-23 21:37:49 +0000228
Owen Anderson788d0412008-08-06 18:35:45 +0000229 vni->kills[i] = getMBBEndIdx(I->second);
Owen Anderson7fbad272008-07-23 21:37:49 +0000230 } else {
Owen Andersond7dcbec2008-07-25 19:50:48 +0000231 unsigned idx = index;
Owen Anderson8d0cc0a2008-07-25 21:07:13 +0000232 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
233
234 if (index != OldI2MI.size())
235 vni->kills[i] = mi2iMap_[OldI2MI[index]] +
236 (idx == index ? offset : 0);
237 else
238 vni->kills[i] = InstrSlots::NUM * i2miMap_.size();
Owen Anderson7eec0c22008-05-29 23:01:22 +0000239 }
Owen Anderson4b5b2092008-05-29 18:15:49 +0000240 }
Owen Anderson80b3ce62008-05-28 20:54:50 +0000241 }
Owen Anderson788d0412008-08-06 18:35:45 +0000242 }
Owen Anderson80b3ce62008-05-28 20:54:50 +0000243}
Alkis Evlogimenosd6e40a62004-01-14 10:44:29 +0000244
Owen Anderson80b3ce62008-05-28 20:54:50 +0000245/// runOnMachineFunction - Register allocate the whole function
246///
247bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
248 mf_ = &fn;
249 mri_ = &mf_->getRegInfo();
250 tm_ = &fn.getTarget();
251 tri_ = tm_->getRegisterInfo();
252 tii_ = tm_->getInstrInfo();
Dan Gohman6d69ba82008-07-25 00:02:30 +0000253 aa_ = &getAnalysis<AliasAnalysis>();
Owen Anderson80b3ce62008-05-28 20:54:50 +0000254 lv_ = &getAnalysis<LiveVariables>();
255 allocatableRegs_ = tri_->getAllocatableSet(fn);
256
257 computeNumbering();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000258 computeIntervals();
Alkis Evlogimenos843b1602004-02-15 10:24:21 +0000259
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000260 numIntervals += getNumIntervals();
261
Chris Lattner70ca3582004-09-30 15:59:17 +0000262 DEBUG(dump());
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000263 return true;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000264}
265
Chris Lattner70ca3582004-09-30 15:59:17 +0000266/// print - Implement the dump method.
Reid Spencerce9653c2004-12-07 04:03:45 +0000267void LiveIntervals::print(std::ostream &O, const Module* ) const {
Chris Lattner70ca3582004-09-30 15:59:17 +0000268 O << "********** INTERVALS **********\n";
Chris Lattner8e7a7092005-07-27 23:03:38 +0000269 for (const_iterator I = begin(), E = end(); I != E; ++I) {
Owen Anderson03857b22008-08-13 21:49:13 +0000270 I->second->print(O, tri_);
Evan Cheng3f32d652008-06-04 09:18:41 +0000271 O << "\n";
Chris Lattner8e7a7092005-07-27 23:03:38 +0000272 }
Chris Lattner70ca3582004-09-30 15:59:17 +0000273
274 O << "********** MACHINEINSTRS **********\n";
275 for (MachineFunction::iterator mbbi = mf_->begin(), mbbe = mf_->end();
276 mbbi != mbbe; ++mbbi) {
277 O << ((Value*)mbbi->getBasicBlock())->getName() << ":\n";
278 for (MachineBasicBlock::iterator mii = mbbi->begin(),
279 mie = mbbi->end(); mii != mie; ++mii) {
Chris Lattner477e4552004-09-30 16:10:45 +0000280 O << getInstructionIndex(mii) << '\t' << *mii;
Chris Lattner70ca3582004-09-30 15:59:17 +0000281 }
282 }
283}
284
Evan Chengc92da382007-11-03 07:20:12 +0000285/// conflictsWithPhysRegDef - Returns true if the specified register
286/// is defined during the duration of the specified interval.
287bool LiveIntervals::conflictsWithPhysRegDef(const LiveInterval &li,
288 VirtRegMap &vrm, unsigned reg) {
289 for (LiveInterval::Ranges::const_iterator
290 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
291 for (unsigned index = getBaseIndex(I->start),
292 end = getBaseIndex(I->end-1) + InstrSlots::NUM; index != end;
293 index += InstrSlots::NUM) {
294 // skip deleted instructions
295 while (index != end && !getInstructionFromIndex(index))
296 index += InstrSlots::NUM;
297 if (index == end) break;
298
299 MachineInstr *MI = getInstructionFromIndex(index);
Evan Cheng5d446262007-11-15 08:13:29 +0000300 unsigned SrcReg, DstReg;
301 if (tii_->isMoveInstr(*MI, SrcReg, DstReg))
302 if (SrcReg == li.reg || DstReg == li.reg)
303 continue;
Evan Chengc92da382007-11-03 07:20:12 +0000304 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
305 MachineOperand& mop = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +0000306 if (!mop.isReg())
Evan Chengc92da382007-11-03 07:20:12 +0000307 continue;
308 unsigned PhysReg = mop.getReg();
Evan Cheng5d446262007-11-15 08:13:29 +0000309 if (PhysReg == 0 || PhysReg == li.reg)
Evan Chengc92da382007-11-03 07:20:12 +0000310 continue;
Dan Gohman6f0d0242008-02-10 18:45:23 +0000311 if (TargetRegisterInfo::isVirtualRegister(PhysReg)) {
Evan Cheng5d446262007-11-15 08:13:29 +0000312 if (!vrm.hasPhys(PhysReg))
313 continue;
Evan Chengc92da382007-11-03 07:20:12 +0000314 PhysReg = vrm.getPhys(PhysReg);
Evan Cheng5d446262007-11-15 08:13:29 +0000315 }
Dan Gohman6f0d0242008-02-10 18:45:23 +0000316 if (PhysReg && tri_->regsOverlap(PhysReg, reg))
Evan Chengc92da382007-11-03 07:20:12 +0000317 return true;
318 }
319 }
320 }
321
322 return false;
323}
324
Evan Cheng549f27d32007-08-13 23:45:17 +0000325void LiveIntervals::printRegName(unsigned reg) const {
Dan Gohman6f0d0242008-02-10 18:45:23 +0000326 if (TargetRegisterInfo::isPhysicalRegister(reg))
Bill Wendlinge6d088a2008-02-26 21:47:57 +0000327 cerr << tri_->getName(reg);
Evan Cheng549f27d32007-08-13 23:45:17 +0000328 else
329 cerr << "%reg" << reg;
330}
331
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000332void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000333 MachineBasicBlock::iterator mi,
Owen Anderson6b098de2008-06-25 23:39:39 +0000334 unsigned MIIdx, MachineOperand& MO,
Evan Chengef0732d2008-07-10 07:35:43 +0000335 unsigned MOIdx,
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000336 LiveInterval &interval) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000337 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000338 LiveVariables::VarInfo& vi = lv_->getVarInfo(interval.reg);
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000339
Evan Cheng419852c2008-04-03 16:39:43 +0000340 if (mi->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
341 DOUT << "is a implicit_def\n";
342 return;
343 }
344
Alkis Evlogimenos70651572004-08-04 09:46:56 +0000345 // Virtual registers may be defined multiple times (due to phi
346 // elimination and 2-addr elimination). Much of what we do only has to be
347 // done once for the vreg. We use an empty interval to detect the first
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000348 // time we see a vreg.
349 if (interval.empty()) {
350 // Get the Idx of the defining instructions.
Chris Lattner6b128bd2006-09-03 08:07:11 +0000351 unsigned defIndex = getDefIndex(MIIdx);
Dale Johannesen86b49f82008-09-24 01:07:17 +0000352 // Earlyclobbers move back one.
353 if (MO.isEarlyClobber())
354 defIndex = getUseIndex(MIIdx);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000355 VNInfo *ValNo;
Evan Chengc8d044e2008-02-15 18:24:29 +0000356 MachineInstr *CopyMI = NULL;
Chris Lattner91725b72006-08-31 05:54:43 +0000357 unsigned SrcReg, DstReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000358 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000359 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Chengc8d044e2008-02-15 18:24:29 +0000360 tii_->isMoveInstr(*mi, SrcReg, DstReg))
361 CopyMI = mi;
362 ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000363
364 assert(ValNo->id == 0 && "First value in interval is not 0?");
Chris Lattner7ac2d312004-07-24 02:59:07 +0000365
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000366 // Loop over all of the blocks that the vreg is defined in. There are
367 // two cases we have to handle here. The most common case is a vreg
368 // whose lifetime is contained within a basic block. In this case there
369 // will be a single kill, in MBB, which comes after the definition.
370 if (vi.Kills.size() == 1 && vi.Kills[0]->getParent() == mbb) {
371 // FIXME: what about dead vars?
372 unsigned killIdx;
373 if (vi.Kills[0] != mi)
374 killIdx = getUseIndex(getInstructionIndex(vi.Kills[0]))+1;
375 else
376 killIdx = defIndex+1;
Chris Lattner6097d132004-07-19 02:15:56 +0000377
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000378 // If the kill happens after the definition, we have an intra-block
379 // live range.
380 if (killIdx > defIndex) {
Evan Cheng61de82d2007-02-15 05:59:24 +0000381 assert(vi.AliveBlocks.none() &&
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000382 "Shouldn't be alive across any blocks!");
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000383 LiveRange LR(defIndex, killIdx, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000384 interval.addRange(LR);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000385 DOUT << " +" << LR << "\n";
Evan Chengf3bb2e62007-09-05 21:46:51 +0000386 interval.addKill(ValNo, killIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000387 return;
388 }
Alkis Evlogimenosdd2cc652003-12-18 08:48:48 +0000389 }
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000390
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000391 // The other case we handle is when a virtual register lives to the end
392 // of the defining block, potentially live across some blocks, then is
393 // live into some number of blocks, but gets killed. Start by adding a
394 // range that goes from this definition to the end of the defining block.
Owen Anderson7fbad272008-07-23 21:37:49 +0000395 LiveRange NewLR(defIndex, getMBBEndIdx(mbb)+1, ValNo);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000396 DOUT << " +" << NewLR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000397 interval.addRange(NewLR);
398
399 // Iterate over all of the blocks that the variable is completely
400 // live in, adding [insrtIndex(begin), instrIndex(end)+4) to the
401 // live interval.
Dan Gohman4a829ec2008-11-13 16:31:27 +0000402 for (int i = vi.AliveBlocks.find_first(); i != -1;
403 i = vi.AliveBlocks.find_next(i)) {
404 LiveRange LR(getMBBStartIdx(i),
405 getMBBEndIdx(i)+1, // MBB ends at -1.
406 ValNo);
407 interval.addRange(LR);
408 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000409 }
410
411 // Finally, this virtual register is live from the start of any killing
412 // block to the 'use' slot of the killing instruction.
413 for (unsigned i = 0, e = vi.Kills.size(); i != e; ++i) {
414 MachineInstr *Kill = vi.Kills[i];
Evan Cheng8df78602007-08-08 03:00:28 +0000415 unsigned killIdx = getUseIndex(getInstructionIndex(Kill))+1;
Chris Lattner428b92e2006-09-15 03:57:23 +0000416 LiveRange LR(getMBBStartIdx(Kill->getParent()),
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000417 killIdx, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000418 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000419 interval.addKill(ValNo, killIdx);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000420 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000421 }
422
423 } else {
424 // If this is the second time we see a virtual register definition, it
425 // must be due to phi elimination or two addr elimination. If this is
Evan Chengbf105c82006-11-03 03:04:46 +0000426 // the result of two address elimination, then the vreg is one of the
427 // def-and-use register operand.
Evan Chengef0732d2008-07-10 07:35:43 +0000428 if (mi->isRegReDefinedByTwoAddr(interval.reg, MOIdx)) {
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000429 // If this is a two-address definition, then we have already processed
430 // the live range. The only problem is that we didn't realize there
431 // are actually two values in the live interval. Because of this we
432 // need to take the LiveRegion that defines this register and split it
433 // into two values.
Evan Chenga07cec92008-01-10 08:22:10 +0000434 assert(interval.containsOneValue());
435 unsigned DefIndex = getDefIndex(interval.getValNumInfo(0)->def);
Chris Lattner6b128bd2006-09-03 08:07:11 +0000436 unsigned RedefIndex = getDefIndex(MIIdx);
Dale Johannesen86b49f82008-09-24 01:07:17 +0000437 // Earlyclobbers move back one.
438 if (MO.isEarlyClobber())
439 RedefIndex = getUseIndex(MIIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000440
Evan Cheng4f8ff162007-08-11 00:59:19 +0000441 const LiveRange *OldLR = interval.getLiveRangeContaining(RedefIndex-1);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000442 VNInfo *OldValNo = OldLR->valno;
Evan Cheng4f8ff162007-08-11 00:59:19 +0000443
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000444 // Delete the initial value, which should be short and continuous,
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000445 // because the 2-addr copy must be in the same MBB as the redef.
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000446 interval.removeRange(DefIndex, RedefIndex);
Alkis Evlogimenos70651572004-08-04 09:46:56 +0000447
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000448 // Two-address vregs should always only be redefined once. This means
449 // that at this point, there should be exactly one value number in it.
450 assert(interval.containsOneValue() && "Unexpected 2-addr liveint!");
451
Chris Lattner91725b72006-08-31 05:54:43 +0000452 // The new value number (#1) is defined by the instruction we claimed
453 // defined value #0.
Evan Chengc8d044e2008-02-15 18:24:29 +0000454 VNInfo *ValNo = interval.getNextValue(OldValNo->def, OldValNo->copy,
455 VNInfoAllocator);
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000456
Chris Lattner91725b72006-08-31 05:54:43 +0000457 // Value#0 is now defined by the 2-addr instruction.
Evan Chengc8d044e2008-02-15 18:24:29 +0000458 OldValNo->def = RedefIndex;
459 OldValNo->copy = 0;
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000460
461 // Add the new live interval which replaces the range for the input copy.
462 LiveRange LR(DefIndex, RedefIndex, ValNo);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000463 DOUT << " replace range with " << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000464 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000465 interval.addKill(ValNo, RedefIndex);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000466
467 // If this redefinition is dead, we need to add a dummy unit live
468 // range covering the def slot.
Owen Anderson6b098de2008-06-25 23:39:39 +0000469 if (MO.isDead())
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000470 interval.addRange(LiveRange(RedefIndex, RedefIndex+1, OldValNo));
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000471
Evan Cheng56fdd7a2007-03-15 21:19:28 +0000472 DOUT << " RESULT: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +0000473 interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000474
475 } else {
476 // Otherwise, this must be because of phi elimination. If this is the
477 // first redefinition of the vreg that we have seen, go back and change
478 // the live range in the PHI block to be a different value number.
479 if (interval.containsOneValue()) {
480 assert(vi.Kills.size() == 1 &&
481 "PHI elimination vreg should have one kill, the PHI itself!");
482
483 // Remove the old range that we now know has an incorrect number.
Evan Chengf3bb2e62007-09-05 21:46:51 +0000484 VNInfo *VNI = interval.getValNumInfo(0);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000485 MachineInstr *Killer = vi.Kills[0];
Chris Lattner428b92e2006-09-15 03:57:23 +0000486 unsigned Start = getMBBStartIdx(Killer->getParent());
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000487 unsigned End = getUseIndex(getInstructionIndex(Killer))+1;
Evan Cheng56fdd7a2007-03-15 21:19:28 +0000488 DOUT << " Removing [" << Start << "," << End << "] from: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +0000489 interval.print(DOUT, tri_); DOUT << "\n";
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000490 interval.removeRange(Start, End);
Evan Chengc3fc7d92007-11-29 09:49:23 +0000491 VNI->hasPHIKill = true;
Dan Gohman6f0d0242008-02-10 18:45:23 +0000492 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000493
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000494 // Replace the interval with one of a NEW value number. Note that this
495 // value number isn't actually defined by an instruction, weird huh? :)
Evan Chengf3bb2e62007-09-05 21:46:51 +0000496 LiveRange LR(Start, End, interval.getNextValue(~0, 0, VNInfoAllocator));
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000497 DOUT << " replace range with " << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000498 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000499 interval.addKill(LR.valno, End);
Dan Gohman6f0d0242008-02-10 18:45:23 +0000500 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000501 }
502
503 // In the case of PHI elimination, each variable definition is only
504 // live until the end of the block. We've already taken care of the
505 // rest of the live range.
Chris Lattner6b128bd2006-09-03 08:07:11 +0000506 unsigned defIndex = getDefIndex(MIIdx);
Dale Johannesen86b49f82008-09-24 01:07:17 +0000507 // Earlyclobbers move back one.
508 if (MO.isEarlyClobber())
509 defIndex = getUseIndex(MIIdx);
Chris Lattner91725b72006-08-31 05:54:43 +0000510
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000511 VNInfo *ValNo;
Evan Chengc8d044e2008-02-15 18:24:29 +0000512 MachineInstr *CopyMI = NULL;
Chris Lattner91725b72006-08-31 05:54:43 +0000513 unsigned SrcReg, DstReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000514 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000515 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Chengc8d044e2008-02-15 18:24:29 +0000516 tii_->isMoveInstr(*mi, SrcReg, DstReg))
517 CopyMI = mi;
518 ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
Chris Lattner91725b72006-08-31 05:54:43 +0000519
Owen Anderson7fbad272008-07-23 21:37:49 +0000520 unsigned killIndex = getMBBEndIdx(mbb) + 1;
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000521 LiveRange LR(defIndex, killIndex, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000522 interval.addRange(LR);
Evan Chengc3fc7d92007-11-29 09:49:23 +0000523 interval.addKill(ValNo, killIndex);
524 ValNo->hasPHIKill = true;
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000525 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000526 }
527 }
528
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000529 DOUT << '\n';
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000530}
531
Chris Lattnerf35fef72004-07-23 21:24:19 +0000532void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000533 MachineBasicBlock::iterator mi,
Chris Lattner6b128bd2006-09-03 08:07:11 +0000534 unsigned MIIdx,
Owen Anderson6b098de2008-06-25 23:39:39 +0000535 MachineOperand& MO,
Chris Lattner91725b72006-08-31 05:54:43 +0000536 LiveInterval &interval,
Evan Chengc8d044e2008-02-15 18:24:29 +0000537 MachineInstr *CopyMI) {
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000538 // A physical register cannot be live across basic block, so its
539 // lifetime must end somewhere in its defining basic block.
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000540 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
Alkis Evlogimenos02ba13c2004-01-31 23:13:30 +0000541
Chris Lattner6b128bd2006-09-03 08:07:11 +0000542 unsigned baseIndex = MIIdx;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000543 unsigned start = getDefIndex(baseIndex);
Dale Johannesen86b49f82008-09-24 01:07:17 +0000544 // Earlyclobbers move back one.
545 if (MO.isEarlyClobber())
546 start = getUseIndex(MIIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000547 unsigned end = start;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000548
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000549 // If it is not used after definition, it is considered dead at
550 // the instruction defining it. Hence its interval is:
551 // [defSlot(def), defSlot(def)+1)
Owen Anderson6b098de2008-06-25 23:39:39 +0000552 if (MO.isDead()) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000553 DOUT << " dead";
Dale Johannesen86b49f82008-09-24 01:07:17 +0000554 end = start + 1;
Chris Lattnerab4b66d2005-08-23 22:51:41 +0000555 goto exit;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000556 }
557
558 // If it is not dead on definition, it must be killed by a
559 // subsequent instruction. Hence its interval is:
560 // [defSlot(def), useSlot(kill)+1)
Owen Anderson7fbad272008-07-23 21:37:49 +0000561 baseIndex += InstrSlots::NUM;
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000562 while (++mi != MBB->end()) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000563 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
564 getInstructionFromIndex(baseIndex) == 0)
565 baseIndex += InstrSlots::NUM;
Evan Cheng6130f662008-03-05 00:59:57 +0000566 if (mi->killsRegister(interval.reg, tri_)) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000567 DOUT << " killed";
Chris Lattnerab4b66d2005-08-23 22:51:41 +0000568 end = getUseIndex(baseIndex) + 1;
569 goto exit;
Evan Cheng6130f662008-03-05 00:59:57 +0000570 } else if (mi->modifiesRegister(interval.reg, tri_)) {
Evan Cheng9a1956a2006-11-15 20:54:11 +0000571 // Another instruction redefines the register before it is ever read.
572 // Then the register is essentially dead at the instruction that defines
573 // it. Hence its interval is:
574 // [defSlot(def), defSlot(def)+1)
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000575 DOUT << " dead";
Dale Johannesen86b49f82008-09-24 01:07:17 +0000576 end = start + 1;
Evan Cheng9a1956a2006-11-15 20:54:11 +0000577 goto exit;
Alkis Evlogimenosaf254732004-01-13 22:26:14 +0000578 }
Owen Anderson7fbad272008-07-23 21:37:49 +0000579
580 baseIndex += InstrSlots::NUM;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000581 }
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000582
583 // The only case we should have a dead physreg here without a killing or
584 // instruction where we know it's dead is if it is live-in to the function
585 // and never used.
Evan Chengc8d044e2008-02-15 18:24:29 +0000586 assert(!CopyMI && "physreg was not killed in defining block!");
Dale Johannesen86b49f82008-09-24 01:07:17 +0000587 end = start + 1;
Alkis Evlogimenos02ba13c2004-01-31 23:13:30 +0000588
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000589exit:
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000590 assert(start < end && "did not find end of interval?");
Chris Lattnerf768bba2005-03-09 23:05:19 +0000591
Evan Cheng24a3cc42007-04-25 07:30:23 +0000592 // Already exists? Extend old live interval.
593 LiveInterval::iterator OldLR = interval.FindLiveRangeContaining(start);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000594 VNInfo *ValNo = (OldLR != interval.end())
Evan Chengc8d044e2008-02-15 18:24:29 +0000595 ? OldLR->valno : interval.getNextValue(start, CopyMI, VNInfoAllocator);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000596 LiveRange LR(start, end, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000597 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000598 interval.addKill(LR.valno, end);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000599 DOUT << " +" << LR << '\n';
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000600}
601
Chris Lattnerf35fef72004-07-23 21:24:19 +0000602void LiveIntervals::handleRegisterDef(MachineBasicBlock *MBB,
603 MachineBasicBlock::iterator MI,
Chris Lattner6b128bd2006-09-03 08:07:11 +0000604 unsigned MIIdx,
Evan Chengef0732d2008-07-10 07:35:43 +0000605 MachineOperand& MO,
606 unsigned MOIdx) {
Owen Anderson6b098de2008-06-25 23:39:39 +0000607 if (TargetRegisterInfo::isVirtualRegister(MO.getReg()))
Evan Chengef0732d2008-07-10 07:35:43 +0000608 handleVirtualRegisterDef(MBB, MI, MIIdx, MO, MOIdx,
Owen Anderson6b098de2008-06-25 23:39:39 +0000609 getOrCreateInterval(MO.getReg()));
610 else if (allocatableRegs_[MO.getReg()]) {
Evan Chengc8d044e2008-02-15 18:24:29 +0000611 MachineInstr *CopyMI = NULL;
Chris Lattner91725b72006-08-31 05:54:43 +0000612 unsigned SrcReg, DstReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000613 if (MI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000614 MI->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Chengc8d044e2008-02-15 18:24:29 +0000615 tii_->isMoveInstr(*MI, SrcReg, DstReg))
616 CopyMI = MI;
Owen Anderson6b098de2008-06-25 23:39:39 +0000617 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
618 getOrCreateInterval(MO.getReg()), CopyMI);
Evan Cheng24a3cc42007-04-25 07:30:23 +0000619 // Def of a register also defines its sub-registers.
Owen Anderson6b098de2008-06-25 23:39:39 +0000620 for (const unsigned* AS = tri_->getSubRegisters(MO.getReg()); *AS; ++AS)
Evan Cheng6130f662008-03-05 00:59:57 +0000621 // If MI also modifies the sub-register explicitly, avoid processing it
622 // more than once. Do not pass in TRI here so it checks for exact match.
623 if (!MI->modifiesRegister(*AS))
Owen Anderson6b098de2008-06-25 23:39:39 +0000624 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
625 getOrCreateInterval(*AS), 0);
Chris Lattnerf35fef72004-07-23 21:24:19 +0000626 }
Alkis Evlogimenos4d46e1e2004-01-31 14:37:41 +0000627}
628
Evan Chengb371f452007-02-19 21:49:54 +0000629void LiveIntervals::handleLiveInRegister(MachineBasicBlock *MBB,
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000630 unsigned MIIdx,
Evan Cheng24a3cc42007-04-25 07:30:23 +0000631 LiveInterval &interval, bool isAlias) {
Evan Chengb371f452007-02-19 21:49:54 +0000632 DOUT << "\t\tlivein register: "; DEBUG(printRegName(interval.reg));
633
634 // Look for kills, if it reaches a def before it's killed, then it shouldn't
635 // be considered a livein.
636 MachineBasicBlock::iterator mi = MBB->begin();
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000637 unsigned baseIndex = MIIdx;
638 unsigned start = baseIndex;
Owen Anderson99500ae2008-09-15 22:00:38 +0000639 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
640 getInstructionFromIndex(baseIndex) == 0)
641 baseIndex += InstrSlots::NUM;
642 unsigned end = baseIndex;
643
Evan Chengb371f452007-02-19 21:49:54 +0000644 while (mi != MBB->end()) {
Evan Cheng6130f662008-03-05 00:59:57 +0000645 if (mi->killsRegister(interval.reg, tri_)) {
Evan Chengb371f452007-02-19 21:49:54 +0000646 DOUT << " killed";
647 end = getUseIndex(baseIndex) + 1;
648 goto exit;
Evan Cheng6130f662008-03-05 00:59:57 +0000649 } else if (mi->modifiesRegister(interval.reg, tri_)) {
Evan Chengb371f452007-02-19 21:49:54 +0000650 // Another instruction redefines the register before it is ever read.
651 // Then the register is essentially dead at the instruction that defines
652 // it. Hence its interval is:
653 // [defSlot(def), defSlot(def)+1)
654 DOUT << " dead";
655 end = getDefIndex(start) + 1;
656 goto exit;
657 }
658
659 baseIndex += InstrSlots::NUM;
Owen Anderson7fbad272008-07-23 21:37:49 +0000660 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
661 getInstructionFromIndex(baseIndex) == 0)
662 baseIndex += InstrSlots::NUM;
Evan Chengb371f452007-02-19 21:49:54 +0000663 ++mi;
664 }
665
666exit:
Evan Cheng75611fb2007-06-27 01:16:36 +0000667 // Live-in register might not be used at all.
668 if (end == MIIdx) {
Evan Cheng292da942007-06-27 18:47:28 +0000669 if (isAlias) {
670 DOUT << " dead";
Evan Cheng75611fb2007-06-27 01:16:36 +0000671 end = getDefIndex(MIIdx) + 1;
Evan Cheng292da942007-06-27 18:47:28 +0000672 } else {
673 DOUT << " live through";
674 end = baseIndex;
675 }
Evan Cheng24a3cc42007-04-25 07:30:23 +0000676 }
677
Owen Anderson99500ae2008-09-15 22:00:38 +0000678 LiveRange LR(start, end, interval.getNextValue(~0U, 0, VNInfoAllocator));
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000679 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000680 interval.addKill(LR.valno, end);
Evan Cheng24c2e5c2007-08-08 07:03:29 +0000681 DOUT << " +" << LR << '\n';
Evan Chengb371f452007-02-19 21:49:54 +0000682}
683
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000684/// computeIntervals - computes the live intervals for virtual
Alkis Evlogimenos4d46e1e2004-01-31 14:37:41 +0000685/// registers. for some ordering of the machine instructions [1,N] a
Alkis Evlogimenos08cec002004-01-31 19:59:32 +0000686/// live interval is an interval [i, j) where 1 <= i <= j < N for
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000687/// which a variable is live
Dale Johannesen91aac102008-09-17 21:13:11 +0000688void LiveIntervals::computeIntervals() {
Dale Johannesen91aac102008-09-17 21:13:11 +0000689
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000690 DOUT << "********** COMPUTING LIVE INTERVALS **********\n"
691 << "********** Function: "
692 << ((Value*)mf_->getFunction())->getName() << '\n';
Owen Anderson7fbad272008-07-23 21:37:49 +0000693
Chris Lattner428b92e2006-09-15 03:57:23 +0000694 for (MachineFunction::iterator MBBI = mf_->begin(), E = mf_->end();
695 MBBI != E; ++MBBI) {
696 MachineBasicBlock *MBB = MBBI;
Owen Anderson134eb732008-09-21 20:43:24 +0000697 // Track the index of the current machine instr.
698 unsigned MIIndex = getMBBStartIdx(MBB);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000699 DOUT << ((Value*)MBB->getBasicBlock())->getName() << ":\n";
Alkis Evlogimenos6b4edba2003-12-21 20:19:10 +0000700
Chris Lattner428b92e2006-09-15 03:57:23 +0000701 MachineBasicBlock::iterator MI = MBB->begin(), miEnd = MBB->end();
Evan Cheng0c9f92e2007-02-13 01:30:55 +0000702
Dan Gohmancb406c22007-10-03 19:26:29 +0000703 // Create intervals for live-ins to this BB first.
704 for (MachineBasicBlock::const_livein_iterator LI = MBB->livein_begin(),
705 LE = MBB->livein_end(); LI != LE; ++LI) {
706 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*LI));
707 // Multiple live-ins can alias the same register.
Dan Gohman6f0d0242008-02-10 18:45:23 +0000708 for (const unsigned* AS = tri_->getSubRegisters(*LI); *AS; ++AS)
Dan Gohmancb406c22007-10-03 19:26:29 +0000709 if (!hasInterval(*AS))
710 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*AS),
711 true);
Chris Lattnerdffb2e82006-09-04 18:27:40 +0000712 }
713
Owen Anderson99500ae2008-09-15 22:00:38 +0000714 // Skip over empty initial indices.
715 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
716 getInstructionFromIndex(MIIndex) == 0)
717 MIIndex += InstrSlots::NUM;
718
Chris Lattner428b92e2006-09-15 03:57:23 +0000719 for (; MI != miEnd; ++MI) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000720 DOUT << MIIndex << "\t" << *MI;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000721
Evan Cheng438f7bc2006-11-10 08:43:01 +0000722 // Handle defs.
Chris Lattner428b92e2006-09-15 03:57:23 +0000723 for (int i = MI->getNumOperands() - 1; i >= 0; --i) {
724 MachineOperand &MO = MI->getOperand(i);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000725 // handle register defs - build intervals
Dan Gohmand735b802008-10-03 15:45:36 +0000726 if (MO.isReg() && MO.getReg() && MO.isDef()) {
Evan Chengef0732d2008-07-10 07:35:43 +0000727 handleRegisterDef(MBB, MI, MIIndex, MO, i);
Dale Johannesen91aac102008-09-17 21:13:11 +0000728 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000729 }
Evan Cheng99fe34b2008-10-18 05:18:55 +0000730
731 // Skip over the empty slots after each instruction.
732 unsigned Slots = MI->getDesc().getNumDefs();
733 if (Slots == 0)
734 Slots = 1;
735 MIIndex += InstrSlots::NUM * Slots;
Owen Anderson7fbad272008-07-23 21:37:49 +0000736
737 // Skip over empty indices.
738 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
739 getInstructionFromIndex(MIIndex) == 0)
740 MIIndex += InstrSlots::NUM;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000741 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000742 }
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000743}
Alkis Evlogimenosb27ef242003-12-05 10:38:28 +0000744
Evan Chengd0e32c52008-10-29 05:06:14 +0000745bool LiveIntervals::findLiveInMBBs(unsigned Start, unsigned End,
Evan Chenga5bfc972007-10-17 06:53:44 +0000746 SmallVectorImpl<MachineBasicBlock*> &MBBs) const {
Evan Cheng4ca980e2007-10-17 02:10:22 +0000747 std::vector<IdxMBBPair>::const_iterator I =
Evan Chengd0e32c52008-10-29 05:06:14 +0000748 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), Start);
Evan Cheng4ca980e2007-10-17 02:10:22 +0000749
750 bool ResVal = false;
751 while (I != Idx2MBBMap.end()) {
Evan Chengd0e32c52008-10-29 05:06:14 +0000752 if (I->first > End)
Evan Cheng4ca980e2007-10-17 02:10:22 +0000753 break;
754 MBBs.push_back(I->second);
755 ResVal = true;
756 ++I;
757 }
758 return ResVal;
759}
760
Evan Chengd0e32c52008-10-29 05:06:14 +0000761bool LiveIntervals::findReachableMBBs(unsigned Start, unsigned End,
762 SmallVectorImpl<MachineBasicBlock*> &MBBs) const {
763 std::vector<IdxMBBPair>::const_iterator I =
764 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), Start);
765
766 bool ResVal = false;
767 while (I != Idx2MBBMap.end()) {
768 if (I->first > End)
769 break;
770 MachineBasicBlock *MBB = I->second;
771 if (getMBBEndIdx(MBB) > End)
772 break;
773 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
774 SE = MBB->succ_end(); SI != SE; ++SI)
775 MBBs.push_back(*SI);
776 ResVal = true;
777 ++I;
778 }
779 return ResVal;
780}
781
Owen Anderson03857b22008-08-13 21:49:13 +0000782LiveInterval* LiveIntervals::createInterval(unsigned reg) {
Dan Gohman6f0d0242008-02-10 18:45:23 +0000783 float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ?
Jim Laskey7902c752006-11-07 12:25:45 +0000784 HUGE_VALF : 0.0F;
Owen Anderson03857b22008-08-13 21:49:13 +0000785 return new LiveInterval(reg, Weight);
Alkis Evlogimenos9a8b4902004-04-09 18:07:57 +0000786}
Evan Chengf2fbca62007-11-12 06:35:08 +0000787
Evan Chengc8d044e2008-02-15 18:24:29 +0000788/// getVNInfoSourceReg - Helper function that parses the specified VNInfo
789/// copy field and returns the source register that defines it.
790unsigned LiveIntervals::getVNInfoSourceReg(const VNInfo *VNI) const {
791 if (!VNI->copy)
792 return 0;
793
794 if (VNI->copy->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG)
795 return VNI->copy->getOperand(1).getReg();
Evan Cheng7e073ba2008-04-09 20:57:25 +0000796 if (VNI->copy->getOpcode() == TargetInstrInfo::INSERT_SUBREG)
797 return VNI->copy->getOperand(2).getReg();
Evan Chengc8d044e2008-02-15 18:24:29 +0000798 unsigned SrcReg, DstReg;
799 if (tii_->isMoveInstr(*VNI->copy, SrcReg, DstReg))
800 return SrcReg;
801 assert(0 && "Unrecognized copy instruction!");
802 return 0;
803}
Evan Chengf2fbca62007-11-12 06:35:08 +0000804
805//===----------------------------------------------------------------------===//
806// Register allocator hooks.
807//
808
Evan Chengd70dbb52008-02-22 09:24:50 +0000809/// getReMatImplicitUse - If the remat definition MI has one (for now, we only
810/// allow one) virtual register operand, then its uses are implicitly using
811/// the register. Returns the virtual register.
812unsigned LiveIntervals::getReMatImplicitUse(const LiveInterval &li,
813 MachineInstr *MI) const {
814 unsigned RegOp = 0;
815 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
816 MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +0000817 if (!MO.isReg() || !MO.isUse())
Evan Chengd70dbb52008-02-22 09:24:50 +0000818 continue;
819 unsigned Reg = MO.getReg();
820 if (Reg == 0 || Reg == li.reg)
821 continue;
822 // FIXME: For now, only remat MI with at most one register operand.
823 assert(!RegOp &&
824 "Can't rematerialize instruction with multiple register operand!");
825 RegOp = MO.getReg();
Dan Gohman6d69ba82008-07-25 00:02:30 +0000826#ifndef NDEBUG
Evan Chengd70dbb52008-02-22 09:24:50 +0000827 break;
Dan Gohman6d69ba82008-07-25 00:02:30 +0000828#endif
Evan Chengd70dbb52008-02-22 09:24:50 +0000829 }
830 return RegOp;
831}
832
833/// isValNoAvailableAt - Return true if the val# of the specified interval
834/// which reaches the given instruction also reaches the specified use index.
835bool LiveIntervals::isValNoAvailableAt(const LiveInterval &li, MachineInstr *MI,
836 unsigned UseIdx) const {
837 unsigned Index = getInstructionIndex(MI);
838 VNInfo *ValNo = li.FindLiveRangeContaining(Index)->valno;
839 LiveInterval::const_iterator UI = li.FindLiveRangeContaining(UseIdx);
840 return UI != li.end() && UI->valno == ValNo;
841}
842
Evan Chengf2fbca62007-11-12 06:35:08 +0000843/// isReMaterializable - Returns true if the definition MI of the specified
844/// val# of the specified interval is re-materializable.
845bool LiveIntervals::isReMaterializable(const LiveInterval &li,
Evan Cheng5ef3a042007-12-06 00:01:56 +0000846 const VNInfo *ValNo, MachineInstr *MI,
Evan Chengdc377862008-09-30 15:44:16 +0000847 SmallVectorImpl<LiveInterval*> &SpillIs,
Evan Cheng5ef3a042007-12-06 00:01:56 +0000848 bool &isLoad) {
Evan Chengf2fbca62007-11-12 06:35:08 +0000849 if (DisableReMat)
850 return false;
851
Evan Cheng20ccded2008-03-15 00:19:36 +0000852 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF)
Evan Chengd70dbb52008-02-22 09:24:50 +0000853 return true;
Evan Chengdd3465e2008-02-23 01:44:27 +0000854
855 int FrameIdx = 0;
856 if (tii_->isLoadFromStackSlot(MI, FrameIdx) &&
Evan Cheng249ded32008-02-23 03:38:34 +0000857 mf_->getFrameInfo()->isImmutableObjectIndex(FrameIdx))
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000858 // FIXME: Let target specific isReallyTriviallyReMaterializable determines
859 // this but remember this is not safe to fold into a two-address
860 // instruction.
Evan Cheng249ded32008-02-23 03:38:34 +0000861 // This is a load from fixed stack slot. It can be rematerialized.
Evan Chengdd3465e2008-02-23 01:44:27 +0000862 return true;
Evan Chengdd3465e2008-02-23 01:44:27 +0000863
Dan Gohman6d69ba82008-07-25 00:02:30 +0000864 // If the target-specific rules don't identify an instruction as
865 // being trivially rematerializable, use some target-independent
866 // rules.
867 if (!MI->getDesc().isRematerializable() ||
868 !tii_->isTriviallyReMaterializable(MI)) {
Dan Gohman4c8f8702008-07-25 15:08:37 +0000869 if (!EnableAggressiveRemat)
870 return false;
Evan Chengd70dbb52008-02-22 09:24:50 +0000871
Dan Gohman0471a792008-07-28 18:43:51 +0000872 // If the instruction accesses memory but the memoperands have been lost,
Dan Gohman6d69ba82008-07-25 00:02:30 +0000873 // we can't analyze it.
874 const TargetInstrDesc &TID = MI->getDesc();
875 if ((TID.mayLoad() || TID.mayStore()) && MI->memoperands_empty())
876 return false;
877
878 // Avoid instructions obviously unsafe for remat.
879 if (TID.hasUnmodeledSideEffects() || TID.isNotDuplicable())
880 return false;
881
882 // If the instruction accesses memory and the memory could be non-constant,
883 // assume the instruction is not rematerializable.
Evan Chengdc377862008-09-30 15:44:16 +0000884 for (std::list<MachineMemOperand>::const_iterator
885 I = MI->memoperands_begin(), E = MI->memoperands_end(); I != E; ++I){
Dan Gohman6d69ba82008-07-25 00:02:30 +0000886 const MachineMemOperand &MMO = *I;
887 if (MMO.isVolatile() || MMO.isStore())
888 return false;
889 const Value *V = MMO.getValue();
890 if (!V)
891 return false;
892 if (const PseudoSourceValue *PSV = dyn_cast<PseudoSourceValue>(V)) {
893 if (!PSV->isConstant(mf_->getFrameInfo()))
Evan Chengd70dbb52008-02-22 09:24:50 +0000894 return false;
Dan Gohman6d69ba82008-07-25 00:02:30 +0000895 } else if (!aa_->pointsToConstantMemory(V))
896 return false;
897 }
898
899 // If any of the registers accessed are non-constant, conservatively assume
900 // the instruction is not rematerializable.
901 unsigned ImpUse = 0;
902 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
903 const MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +0000904 if (MO.isReg()) {
Dan Gohman6d69ba82008-07-25 00:02:30 +0000905 unsigned Reg = MO.getReg();
906 if (Reg == 0)
907 continue;
908 if (TargetRegisterInfo::isPhysicalRegister(Reg))
909 return false;
910
911 // Only allow one def, and that in the first operand.
912 if (MO.isDef() != (i == 0))
913 return false;
914
915 // Only allow constant-valued registers.
916 bool IsLiveIn = mri_->isLiveIn(Reg);
917 MachineRegisterInfo::def_iterator I = mri_->def_begin(Reg),
918 E = mri_->def_end();
919
920 // For the def, it should be the only def.
921 if (MO.isDef() && (next(I) != E || IsLiveIn))
922 return false;
923
924 if (MO.isUse()) {
925 // Only allow one use other register use, as that's all the
926 // remat mechanisms support currently.
927 if (Reg != li.reg) {
928 if (ImpUse == 0)
929 ImpUse = Reg;
930 else if (Reg != ImpUse)
931 return false;
932 }
933 // For uses, there should be only one associate def.
934 if (I != E && (next(I) != E || IsLiveIn))
935 return false;
936 }
Evan Chengd70dbb52008-02-22 09:24:50 +0000937 }
938 }
Evan Cheng5ef3a042007-12-06 00:01:56 +0000939 }
Evan Chengf2fbca62007-11-12 06:35:08 +0000940
Dan Gohman6d69ba82008-07-25 00:02:30 +0000941 unsigned ImpUse = getReMatImplicitUse(li, MI);
942 if (ImpUse) {
943 const LiveInterval &ImpLi = getInterval(ImpUse);
944 for (MachineRegisterInfo::use_iterator ri = mri_->use_begin(li.reg),
945 re = mri_->use_end(); ri != re; ++ri) {
946 MachineInstr *UseMI = &*ri;
947 unsigned UseIdx = getInstructionIndex(UseMI);
948 if (li.FindLiveRangeContaining(UseIdx)->valno != ValNo)
949 continue;
950 if (!isValNoAvailableAt(ImpLi, MI, UseIdx))
951 return false;
952 }
Evan Chengdc377862008-09-30 15:44:16 +0000953
954 // If a register operand of the re-materialized instruction is going to
955 // be spilled next, then it's not legal to re-materialize this instruction.
956 for (unsigned i = 0, e = SpillIs.size(); i != e; ++i)
957 if (ImpUse == SpillIs[i]->reg)
958 return false;
Dan Gohman6d69ba82008-07-25 00:02:30 +0000959 }
960 return true;
Evan Cheng5ef3a042007-12-06 00:01:56 +0000961}
962
Evan Cheng06587492008-10-24 02:05:00 +0000963/// isReMaterializable - Returns true if the definition MI of the specified
964/// val# of the specified interval is re-materializable.
965bool LiveIntervals::isReMaterializable(const LiveInterval &li,
966 const VNInfo *ValNo, MachineInstr *MI) {
967 SmallVector<LiveInterval*, 4> Dummy1;
968 bool Dummy2;
969 return isReMaterializable(li, ValNo, MI, Dummy1, Dummy2);
970}
971
Evan Cheng5ef3a042007-12-06 00:01:56 +0000972/// isReMaterializable - Returns true if every definition of MI of every
973/// val# of the specified interval is re-materializable.
Evan Chengdc377862008-09-30 15:44:16 +0000974bool LiveIntervals::isReMaterializable(const LiveInterval &li,
975 SmallVectorImpl<LiveInterval*> &SpillIs,
976 bool &isLoad) {
Evan Cheng5ef3a042007-12-06 00:01:56 +0000977 isLoad = false;
978 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
979 i != e; ++i) {
980 const VNInfo *VNI = *i;
981 unsigned DefIdx = VNI->def;
982 if (DefIdx == ~1U)
983 continue; // Dead val#.
984 // Is the def for the val# rematerializable?
985 if (DefIdx == ~0u)
986 return false;
987 MachineInstr *ReMatDefMI = getInstructionFromIndex(DefIdx);
988 bool DefIsLoad = false;
Evan Chengd70dbb52008-02-22 09:24:50 +0000989 if (!ReMatDefMI ||
Evan Chengdc377862008-09-30 15:44:16 +0000990 !isReMaterializable(li, VNI, ReMatDefMI, SpillIs, DefIsLoad))
Evan Cheng5ef3a042007-12-06 00:01:56 +0000991 return false;
992 isLoad |= DefIsLoad;
Evan Chengf2fbca62007-11-12 06:35:08 +0000993 }
994 return true;
995}
996
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000997/// FilterFoldedOps - Filter out two-address use operands. Return
998/// true if it finds any issue with the operands that ought to prevent
999/// folding.
1000static bool FilterFoldedOps(MachineInstr *MI,
1001 SmallVector<unsigned, 2> &Ops,
1002 unsigned &MRInfo,
1003 SmallVector<unsigned, 2> &FoldOps) {
Chris Lattner749c6f62008-01-07 07:27:27 +00001004 const TargetInstrDesc &TID = MI->getDesc();
Evan Cheng6e141fd2007-12-12 23:12:09 +00001005
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001006 MRInfo = 0;
Evan Chengaee4af62007-12-02 08:30:39 +00001007 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
1008 unsigned OpIdx = Ops[i];
Evan Chengd70dbb52008-02-22 09:24:50 +00001009 MachineOperand &MO = MI->getOperand(OpIdx);
Evan Chengaee4af62007-12-02 08:30:39 +00001010 // FIXME: fold subreg use.
Evan Chengd70dbb52008-02-22 09:24:50 +00001011 if (MO.getSubReg())
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001012 return true;
Evan Chengd70dbb52008-02-22 09:24:50 +00001013 if (MO.isDef())
Evan Chengaee4af62007-12-02 08:30:39 +00001014 MRInfo |= (unsigned)VirtRegMap::isMod;
1015 else {
1016 // Filter out two-address use operand(s).
Evan Chengd70dbb52008-02-22 09:24:50 +00001017 if (!MO.isImplicit() &&
1018 TID.getOperandConstraint(OpIdx, TOI::TIED_TO) != -1) {
Evan Chengaee4af62007-12-02 08:30:39 +00001019 MRInfo = VirtRegMap::isModRef;
1020 continue;
1021 }
1022 MRInfo |= (unsigned)VirtRegMap::isRef;
1023 }
1024 FoldOps.push_back(OpIdx);
Evan Chenge62f97c2007-12-01 02:07:52 +00001025 }
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001026 return false;
1027}
1028
1029
1030/// tryFoldMemoryOperand - Attempts to fold either a spill / restore from
1031/// slot / to reg or any rematerialized load into ith operand of specified
1032/// MI. If it is successul, MI is updated with the newly created MI and
1033/// returns true.
1034bool LiveIntervals::tryFoldMemoryOperand(MachineInstr* &MI,
1035 VirtRegMap &vrm, MachineInstr *DefMI,
1036 unsigned InstrIdx,
1037 SmallVector<unsigned, 2> &Ops,
1038 bool isSS, int Slot, unsigned Reg) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001039 // If it is an implicit def instruction, just delete it.
Evan Cheng20ccded2008-03-15 00:19:36 +00001040 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001041 RemoveMachineInstrFromMaps(MI);
1042 vrm.RemoveMachineInstrFromMaps(MI);
1043 MI->eraseFromParent();
1044 ++numFolds;
1045 return true;
1046 }
1047
1048 // Filter the list of operand indexes that are to be folded. Abort if
1049 // any operand will prevent folding.
1050 unsigned MRInfo = 0;
1051 SmallVector<unsigned, 2> FoldOps;
1052 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
1053 return false;
Evan Chenge62f97c2007-12-01 02:07:52 +00001054
Evan Cheng427f4c12008-03-31 23:19:51 +00001055 // The only time it's safe to fold into a two address instruction is when
1056 // it's folding reload and spill from / into a spill stack slot.
1057 if (DefMI && (MRInfo & VirtRegMap::isMod))
Evan Cheng249ded32008-02-23 03:38:34 +00001058 return false;
1059
Evan Chengf2f8c2a2008-02-08 22:05:27 +00001060 MachineInstr *fmi = isSS ? tii_->foldMemoryOperand(*mf_, MI, FoldOps, Slot)
1061 : tii_->foldMemoryOperand(*mf_, MI, FoldOps, DefMI);
Evan Chengf2fbca62007-11-12 06:35:08 +00001062 if (fmi) {
Evan Chengd3653122008-02-27 03:04:06 +00001063 // Remember this instruction uses the spill slot.
1064 if (isSS) vrm.addSpillSlotUse(Slot, fmi);
1065
Evan Chengf2fbca62007-11-12 06:35:08 +00001066 // Attempt to fold the memory reference into the instruction. If
1067 // we can do this, we don't need to insert spill code.
Evan Chengf2fbca62007-11-12 06:35:08 +00001068 MachineBasicBlock &MBB = *MI->getParent();
Evan Cheng84802932008-01-10 08:24:38 +00001069 if (isSS && !mf_->getFrameInfo()->isImmutableObjectIndex(Slot))
Evan Chengaee4af62007-12-02 08:30:39 +00001070 vrm.virtFolded(Reg, MI, fmi, (VirtRegMap::ModRef)MRInfo);
Evan Cheng81a03822007-11-17 00:40:40 +00001071 vrm.transferSpillPts(MI, fmi);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001072 vrm.transferRestorePts(MI, fmi);
Evan Chengc1f53c72008-03-11 21:34:46 +00001073 vrm.transferEmergencySpills(MI, fmi);
Evan Chengf2fbca62007-11-12 06:35:08 +00001074 mi2iMap_.erase(MI);
Evan Chengcddbb832007-11-30 21:23:43 +00001075 i2miMap_[InstrIdx /InstrSlots::NUM] = fmi;
1076 mi2iMap_[fmi] = InstrIdx;
Evan Chengf2fbca62007-11-12 06:35:08 +00001077 MI = MBB.insert(MBB.erase(MI), fmi);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001078 ++numFolds;
Evan Chengf2fbca62007-11-12 06:35:08 +00001079 return true;
1080 }
1081 return false;
1082}
1083
Evan Cheng018f9b02007-12-05 03:22:34 +00001084/// canFoldMemoryOperand - Returns true if the specified load / store
1085/// folding is possible.
1086bool LiveIntervals::canFoldMemoryOperand(MachineInstr *MI,
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001087 SmallVector<unsigned, 2> &Ops,
Evan Cheng3c75ba82008-04-01 21:37:32 +00001088 bool ReMat) const {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001089 // Filter the list of operand indexes that are to be folded. Abort if
1090 // any operand will prevent folding.
1091 unsigned MRInfo = 0;
Evan Cheng018f9b02007-12-05 03:22:34 +00001092 SmallVector<unsigned, 2> FoldOps;
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001093 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
1094 return false;
Evan Cheng018f9b02007-12-05 03:22:34 +00001095
Evan Cheng3c75ba82008-04-01 21:37:32 +00001096 // It's only legal to remat for a use, not a def.
1097 if (ReMat && (MRInfo & VirtRegMap::isMod))
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001098 return false;
Evan Cheng018f9b02007-12-05 03:22:34 +00001099
Evan Chengd70dbb52008-02-22 09:24:50 +00001100 return tii_->canFoldMemoryOperand(MI, FoldOps);
1101}
1102
Evan Cheng81a03822007-11-17 00:40:40 +00001103bool LiveIntervals::intervalIsInOneMBB(const LiveInterval &li) const {
1104 SmallPtrSet<MachineBasicBlock*, 4> MBBs;
1105 for (LiveInterval::Ranges::const_iterator
1106 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1107 std::vector<IdxMBBPair>::const_iterator II =
1108 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), I->start);
1109 if (II == Idx2MBBMap.end())
1110 continue;
1111 if (I->end > II->first) // crossing a MBB.
1112 return false;
1113 MBBs.insert(II->second);
1114 if (MBBs.size() > 1)
1115 return false;
1116 }
1117 return true;
1118}
1119
Evan Chengd70dbb52008-02-22 09:24:50 +00001120/// rewriteImplicitOps - Rewrite implicit use operands of MI (i.e. uses of
1121/// interval on to-be re-materialized operands of MI) with new register.
1122void LiveIntervals::rewriteImplicitOps(const LiveInterval &li,
1123 MachineInstr *MI, unsigned NewVReg,
1124 VirtRegMap &vrm) {
1125 // There is an implicit use. That means one of the other operand is
1126 // being remat'ed and the remat'ed instruction has li.reg as an
1127 // use operand. Make sure we rewrite that as well.
1128 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1129 MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001130 if (!MO.isReg())
Evan Chengd70dbb52008-02-22 09:24:50 +00001131 continue;
1132 unsigned Reg = MO.getReg();
1133 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
1134 continue;
1135 if (!vrm.isReMaterialized(Reg))
1136 continue;
1137 MachineInstr *ReMatMI = vrm.getReMaterializedMI(Reg);
Evan Cheng6130f662008-03-05 00:59:57 +00001138 MachineOperand *UseMO = ReMatMI->findRegisterUseOperand(li.reg);
1139 if (UseMO)
1140 UseMO->setReg(NewVReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001141 }
1142}
1143
Evan Chengf2fbca62007-11-12 06:35:08 +00001144/// rewriteInstructionForSpills, rewriteInstructionsForSpills - Helper functions
1145/// for addIntervalsForSpills to rewrite uses / defs for the given live range.
Evan Cheng018f9b02007-12-05 03:22:34 +00001146bool LiveIntervals::
Evan Chengd70dbb52008-02-22 09:24:50 +00001147rewriteInstructionForSpills(const LiveInterval &li, const VNInfo *VNI,
1148 bool TrySplit, unsigned index, unsigned end, MachineInstr *MI,
Evan Cheng81a03822007-11-17 00:40:40 +00001149 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Chengf2fbca62007-11-12 06:35:08 +00001150 unsigned Slot, int LdSlot,
1151 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chengd70dbb52008-02-22 09:24:50 +00001152 VirtRegMap &vrm,
Evan Chengf2fbca62007-11-12 06:35:08 +00001153 const TargetRegisterClass* rc,
1154 SmallVector<int, 4> &ReMatIds,
Evan Cheng22f07ff2007-12-11 02:09:15 +00001155 const MachineLoopInfo *loopInfo,
Evan Cheng313d4b82008-02-23 00:33:04 +00001156 unsigned &NewVReg, unsigned ImpUse, bool &HasDef, bool &HasUse,
Owen Anderson28998312008-08-13 22:28:50 +00001157 DenseMap<unsigned,unsigned> &MBBVRegsMap,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001158 std::vector<LiveInterval*> &NewLIs, float &SSWeight) {
1159 MachineBasicBlock *MBB = MI->getParent();
1160 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Cheng018f9b02007-12-05 03:22:34 +00001161 bool CanFold = false;
Evan Chengf2fbca62007-11-12 06:35:08 +00001162 RestartInstruction:
1163 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
1164 MachineOperand& mop = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001165 if (!mop.isReg())
Evan Chengf2fbca62007-11-12 06:35:08 +00001166 continue;
1167 unsigned Reg = mop.getReg();
1168 unsigned RegI = Reg;
Dan Gohman6f0d0242008-02-10 18:45:23 +00001169 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
Evan Chengf2fbca62007-11-12 06:35:08 +00001170 continue;
Evan Chengf2fbca62007-11-12 06:35:08 +00001171 if (Reg != li.reg)
1172 continue;
1173
1174 bool TryFold = !DefIsReMat;
Evan Chengcb3c3302007-11-29 23:02:50 +00001175 bool FoldSS = true; // Default behavior unless it's a remat.
Evan Chengf2fbca62007-11-12 06:35:08 +00001176 int FoldSlot = Slot;
1177 if (DefIsReMat) {
1178 // If this is the rematerializable definition MI itself and
1179 // all of its uses are rematerialized, simply delete it.
Evan Cheng81a03822007-11-17 00:40:40 +00001180 if (MI == ReMatOrigDefMI && CanDelete) {
Evan Chengcddbb832007-11-30 21:23:43 +00001181 DOUT << "\t\t\t\tErasing re-materlizable def: ";
1182 DOUT << MI << '\n';
Evan Chengf2fbca62007-11-12 06:35:08 +00001183 RemoveMachineInstrFromMaps(MI);
Evan Chengcada2452007-11-28 01:28:46 +00001184 vrm.RemoveMachineInstrFromMaps(MI);
Evan Chengf2fbca62007-11-12 06:35:08 +00001185 MI->eraseFromParent();
1186 break;
1187 }
1188
1189 // If def for this use can't be rematerialized, then try folding.
Evan Cheng0cbb1162007-11-29 01:06:25 +00001190 // If def is rematerializable and it's a load, also try folding.
Evan Chengcb3c3302007-11-29 23:02:50 +00001191 TryFold = !ReMatDefMI || (ReMatDefMI && (MI == ReMatOrigDefMI || isLoad));
Evan Chengf2fbca62007-11-12 06:35:08 +00001192 if (isLoad) {
1193 // Try fold loads (from stack slot, constant pool, etc.) into uses.
1194 FoldSS = isLoadSS;
1195 FoldSlot = LdSlot;
1196 }
1197 }
1198
Evan Chengf2fbca62007-11-12 06:35:08 +00001199 // Scan all of the operands of this instruction rewriting operands
1200 // to use NewVReg instead of li.reg as appropriate. We do this for
1201 // two reasons:
1202 //
1203 // 1. If the instr reads the same spilled vreg multiple times, we
1204 // want to reuse the NewVReg.
1205 // 2. If the instr is a two-addr instruction, we are required to
1206 // keep the src/dst regs pinned.
1207 //
1208 // Keep track of whether we replace a use and/or def so that we can
1209 // create the spill interval with the appropriate range.
Evan Chengcddbb832007-11-30 21:23:43 +00001210
Evan Cheng81a03822007-11-17 00:40:40 +00001211 HasUse = mop.isUse();
1212 HasDef = mop.isDef();
Evan Chengaee4af62007-12-02 08:30:39 +00001213 SmallVector<unsigned, 2> Ops;
1214 Ops.push_back(i);
Evan Chengf2fbca62007-11-12 06:35:08 +00001215 for (unsigned j = i+1, e = MI->getNumOperands(); j != e; ++j) {
Evan Chengaee4af62007-12-02 08:30:39 +00001216 const MachineOperand &MOj = MI->getOperand(j);
Dan Gohmand735b802008-10-03 15:45:36 +00001217 if (!MOj.isReg())
Evan Chengf2fbca62007-11-12 06:35:08 +00001218 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001219 unsigned RegJ = MOj.getReg();
Dan Gohman6f0d0242008-02-10 18:45:23 +00001220 if (RegJ == 0 || TargetRegisterInfo::isPhysicalRegister(RegJ))
Evan Chengf2fbca62007-11-12 06:35:08 +00001221 continue;
1222 if (RegJ == RegI) {
Evan Chengaee4af62007-12-02 08:30:39 +00001223 Ops.push_back(j);
1224 HasUse |= MOj.isUse();
1225 HasDef |= MOj.isDef();
Evan Chengf2fbca62007-11-12 06:35:08 +00001226 }
1227 }
1228
Evan Cheng79a796c2008-07-12 01:56:02 +00001229 if (HasUse && !li.liveAt(getUseIndex(index)))
1230 // Must be defined by an implicit def. It should not be spilled. Note,
1231 // this is for correctness reason. e.g.
1232 // 8 %reg1024<def> = IMPLICIT_DEF
1233 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1234 // The live range [12, 14) are not part of the r1024 live interval since
1235 // it's defined by an implicit def. It will not conflicts with live
1236 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengb9890ae2008-07-12 02:22:07 +00001237 // easily see a situation where both registers are reloaded before
Evan Cheng79a796c2008-07-12 01:56:02 +00001238 // the INSERT_SUBREG and both target registers that would overlap.
1239 HasUse = false;
1240
Evan Cheng9c3c2212008-06-06 07:54:39 +00001241 // Update stack slot spill weight if we are splitting.
Evan Chengc3417602008-06-21 06:45:54 +00001242 float Weight = getSpillWeight(HasDef, HasUse, loopDepth);
Evan Cheng9c3c2212008-06-06 07:54:39 +00001243 if (!TrySplit)
1244 SSWeight += Weight;
1245
David Greene26b86a02008-10-27 17:38:59 +00001246 // Create a new virtual register for the spill interval.
1247 // Create the new register now so we can map the fold instruction
1248 // to the new register so when it is unfolded we get the correct
1249 // answer.
1250 bool CreatedNewVReg = false;
1251 if (NewVReg == 0) {
1252 NewVReg = mri_->createVirtualRegister(rc);
1253 vrm.grow();
1254 CreatedNewVReg = true;
1255 }
1256
Evan Cheng9c3c2212008-06-06 07:54:39 +00001257 if (!TryFold)
1258 CanFold = false;
1259 else {
Evan Cheng018f9b02007-12-05 03:22:34 +00001260 // Do not fold load / store here if we are splitting. We'll find an
1261 // optimal point to insert a load / store later.
1262 if (!TrySplit) {
1263 if (tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
David Greene26b86a02008-10-27 17:38:59 +00001264 Ops, FoldSS, FoldSlot, NewVReg)) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001265 // Folding the load/store can completely change the instruction in
1266 // unpredictable ways, rescan it from the beginning.
David Greene26b86a02008-10-27 17:38:59 +00001267
1268 if (FoldSS) {
1269 // We need to give the new vreg the same stack slot as the
1270 // spilled interval.
1271 vrm.assignVirt2StackSlot(NewVReg, FoldSlot);
1272 }
1273
Evan Cheng018f9b02007-12-05 03:22:34 +00001274 HasUse = false;
1275 HasDef = false;
1276 CanFold = false;
Evan Cheng9c3c2212008-06-06 07:54:39 +00001277 if (isRemoved(MI)) {
1278 SSWeight -= Weight;
Evan Cheng7e073ba2008-04-09 20:57:25 +00001279 break;
Evan Cheng9c3c2212008-06-06 07:54:39 +00001280 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001281 goto RestartInstruction;
1282 }
1283 } else {
Evan Cheng9c3c2212008-06-06 07:54:39 +00001284 // We'll try to fold it later if it's profitable.
Evan Cheng3c75ba82008-04-01 21:37:32 +00001285 CanFold = canFoldMemoryOperand(MI, Ops, DefIsReMat);
Evan Cheng018f9b02007-12-05 03:22:34 +00001286 }
Evan Cheng9c3c2212008-06-06 07:54:39 +00001287 }
Evan Chengcddbb832007-11-30 21:23:43 +00001288
Evan Chengcddbb832007-11-30 21:23:43 +00001289 mop.setReg(NewVReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001290 if (mop.isImplicit())
1291 rewriteImplicitOps(li, MI, NewVReg, vrm);
Evan Chengcddbb832007-11-30 21:23:43 +00001292
1293 // Reuse NewVReg for other reads.
Evan Chengd70dbb52008-02-22 09:24:50 +00001294 for (unsigned j = 0, e = Ops.size(); j != e; ++j) {
1295 MachineOperand &mopj = MI->getOperand(Ops[j]);
1296 mopj.setReg(NewVReg);
1297 if (mopj.isImplicit())
1298 rewriteImplicitOps(li, MI, NewVReg, vrm);
1299 }
Evan Chengcddbb832007-11-30 21:23:43 +00001300
Evan Cheng81a03822007-11-17 00:40:40 +00001301 if (CreatedNewVReg) {
1302 if (DefIsReMat) {
1303 vrm.setVirtIsReMaterialized(NewVReg, ReMatDefMI/*, CanDelete*/);
Evan Chengd70dbb52008-02-22 09:24:50 +00001304 if (ReMatIds[VNI->id] == VirtRegMap::MAX_STACK_SLOT) {
Evan Cheng81a03822007-11-17 00:40:40 +00001305 // Each valnum may have its own remat id.
Evan Chengd70dbb52008-02-22 09:24:50 +00001306 ReMatIds[VNI->id] = vrm.assignVirtReMatId(NewVReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001307 } else {
Evan Chengd70dbb52008-02-22 09:24:50 +00001308 vrm.assignVirtReMatId(NewVReg, ReMatIds[VNI->id]);
Evan Cheng81a03822007-11-17 00:40:40 +00001309 }
1310 if (!CanDelete || (HasUse && HasDef)) {
1311 // If this is a two-addr instruction then its use operands are
1312 // rematerializable but its def is not. It should be assigned a
1313 // stack slot.
1314 vrm.assignVirt2StackSlot(NewVReg, Slot);
1315 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001316 } else {
Evan Chengf2fbca62007-11-12 06:35:08 +00001317 vrm.assignVirt2StackSlot(NewVReg, Slot);
1318 }
Evan Chengcb3c3302007-11-29 23:02:50 +00001319 } else if (HasUse && HasDef &&
1320 vrm.getStackSlot(NewVReg) == VirtRegMap::NO_STACK_SLOT) {
1321 // If this interval hasn't been assigned a stack slot (because earlier
1322 // def is a deleted remat def), do it now.
1323 assert(Slot != VirtRegMap::NO_STACK_SLOT);
1324 vrm.assignVirt2StackSlot(NewVReg, Slot);
Evan Chengf2fbca62007-11-12 06:35:08 +00001325 }
1326
Evan Cheng313d4b82008-02-23 00:33:04 +00001327 // Re-matting an instruction with virtual register use. Add the
1328 // register as an implicit use on the use MI.
1329 if (DefIsReMat && ImpUse)
1330 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
1331
Evan Chengf2fbca62007-11-12 06:35:08 +00001332 // create a new register interval for this spill / remat.
1333 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001334 if (CreatedNewVReg) {
1335 NewLIs.push_back(&nI);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001336 MBBVRegsMap.insert(std::make_pair(MI->getParent()->getNumber(), NewVReg));
Evan Cheng81a03822007-11-17 00:40:40 +00001337 if (TrySplit)
1338 vrm.setIsSplitFromReg(NewVReg, li.reg);
1339 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001340
1341 if (HasUse) {
Evan Cheng81a03822007-11-17 00:40:40 +00001342 if (CreatedNewVReg) {
1343 LiveRange LR(getLoadIndex(index), getUseIndex(index)+1,
1344 nI.getNextValue(~0U, 0, VNInfoAllocator));
1345 DOUT << " +" << LR;
1346 nI.addRange(LR);
1347 } else {
1348 // Extend the split live interval to this def / use.
1349 unsigned End = getUseIndex(index)+1;
1350 LiveRange LR(nI.ranges[nI.ranges.size()-1].end, End,
1351 nI.getValNumInfo(nI.getNumValNums()-1));
1352 DOUT << " +" << LR;
1353 nI.addRange(LR);
1354 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001355 }
1356 if (HasDef) {
1357 LiveRange LR(getDefIndex(index), getStoreIndex(index),
1358 nI.getNextValue(~0U, 0, VNInfoAllocator));
1359 DOUT << " +" << LR;
1360 nI.addRange(LR);
1361 }
Evan Cheng81a03822007-11-17 00:40:40 +00001362
Evan Chengf2fbca62007-11-12 06:35:08 +00001363 DOUT << "\t\t\t\tAdded new interval: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +00001364 nI.print(DOUT, tri_);
Evan Chengf2fbca62007-11-12 06:35:08 +00001365 DOUT << '\n';
1366 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001367 return CanFold;
Evan Chengf2fbca62007-11-12 06:35:08 +00001368}
Evan Cheng81a03822007-11-17 00:40:40 +00001369bool LiveIntervals::anyKillInMBBAfterIdx(const LiveInterval &li,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001370 const VNInfo *VNI,
1371 MachineBasicBlock *MBB, unsigned Idx) const {
Evan Cheng81a03822007-11-17 00:40:40 +00001372 unsigned End = getMBBEndIdx(MBB);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001373 for (unsigned j = 0, ee = VNI->kills.size(); j != ee; ++j) {
1374 unsigned KillIdx = VNI->kills[j];
1375 if (KillIdx > Idx && KillIdx < End)
1376 return true;
Evan Cheng81a03822007-11-17 00:40:40 +00001377 }
1378 return false;
1379}
1380
Evan Cheng063284c2008-02-21 00:34:19 +00001381/// RewriteInfo - Keep track of machine instrs that will be rewritten
1382/// during spilling.
Dan Gohman844731a2008-05-13 00:00:25 +00001383namespace {
1384 struct RewriteInfo {
1385 unsigned Index;
1386 MachineInstr *MI;
1387 bool HasUse;
1388 bool HasDef;
1389 RewriteInfo(unsigned i, MachineInstr *mi, bool u, bool d)
1390 : Index(i), MI(mi), HasUse(u), HasDef(d) {}
1391 };
Evan Cheng063284c2008-02-21 00:34:19 +00001392
Dan Gohman844731a2008-05-13 00:00:25 +00001393 struct RewriteInfoCompare {
1394 bool operator()(const RewriteInfo &LHS, const RewriteInfo &RHS) const {
1395 return LHS.Index < RHS.Index;
1396 }
1397 };
1398}
Evan Cheng063284c2008-02-21 00:34:19 +00001399
Evan Chengf2fbca62007-11-12 06:35:08 +00001400void LiveIntervals::
Evan Cheng81a03822007-11-17 00:40:40 +00001401rewriteInstructionsForSpills(const LiveInterval &li, bool TrySplit,
Evan Chengf2fbca62007-11-12 06:35:08 +00001402 LiveInterval::Ranges::const_iterator &I,
Evan Cheng81a03822007-11-17 00:40:40 +00001403 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Chengf2fbca62007-11-12 06:35:08 +00001404 unsigned Slot, int LdSlot,
1405 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chengd70dbb52008-02-22 09:24:50 +00001406 VirtRegMap &vrm,
Evan Chengf2fbca62007-11-12 06:35:08 +00001407 const TargetRegisterClass* rc,
1408 SmallVector<int, 4> &ReMatIds,
Evan Cheng22f07ff2007-12-11 02:09:15 +00001409 const MachineLoopInfo *loopInfo,
Evan Cheng81a03822007-11-17 00:40:40 +00001410 BitVector &SpillMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001411 DenseMap<unsigned, std::vector<SRInfo> > &SpillIdxes,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001412 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001413 DenseMap<unsigned, std::vector<SRInfo> > &RestoreIdxes,
1414 DenseMap<unsigned,unsigned> &MBBVRegsMap,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001415 std::vector<LiveInterval*> &NewLIs, float &SSWeight) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001416 bool AllCanFold = true;
Evan Cheng81a03822007-11-17 00:40:40 +00001417 unsigned NewVReg = 0;
Evan Cheng063284c2008-02-21 00:34:19 +00001418 unsigned start = getBaseIndex(I->start);
Evan Chengf2fbca62007-11-12 06:35:08 +00001419 unsigned end = getBaseIndex(I->end-1) + InstrSlots::NUM;
Evan Chengf2fbca62007-11-12 06:35:08 +00001420
Evan Cheng063284c2008-02-21 00:34:19 +00001421 // First collect all the def / use in this live range that will be rewritten.
Evan Cheng7e073ba2008-04-09 20:57:25 +00001422 // Make sure they are sorted according to instruction index.
Evan Cheng063284c2008-02-21 00:34:19 +00001423 std::vector<RewriteInfo> RewriteMIs;
Evan Chengd70dbb52008-02-22 09:24:50 +00001424 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1425 re = mri_->reg_end(); ri != re; ) {
Evan Cheng419852c2008-04-03 16:39:43 +00001426 MachineInstr *MI = &*ri;
Evan Cheng063284c2008-02-21 00:34:19 +00001427 MachineOperand &O = ri.getOperand();
1428 ++ri;
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001429 assert(!O.isImplicit() && "Spilling register that's used as implicit use?");
Evan Cheng063284c2008-02-21 00:34:19 +00001430 unsigned index = getInstructionIndex(MI);
1431 if (index < start || index >= end)
1432 continue;
Evan Cheng79a796c2008-07-12 01:56:02 +00001433 if (O.isUse() && !li.liveAt(getUseIndex(index)))
1434 // Must be defined by an implicit def. It should not be spilled. Note,
1435 // this is for correctness reason. e.g.
1436 // 8 %reg1024<def> = IMPLICIT_DEF
1437 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1438 // The live range [12, 14) are not part of the r1024 live interval since
1439 // it's defined by an implicit def. It will not conflicts with live
1440 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengb9890ae2008-07-12 02:22:07 +00001441 // easily see a situation where both registers are reloaded before
Evan Cheng79a796c2008-07-12 01:56:02 +00001442 // the INSERT_SUBREG and both target registers that would overlap.
1443 continue;
Evan Cheng063284c2008-02-21 00:34:19 +00001444 RewriteMIs.push_back(RewriteInfo(index, MI, O.isUse(), O.isDef()));
1445 }
1446 std::sort(RewriteMIs.begin(), RewriteMIs.end(), RewriteInfoCompare());
1447
Evan Cheng313d4b82008-02-23 00:33:04 +00001448 unsigned ImpUse = DefIsReMat ? getReMatImplicitUse(li, ReMatDefMI) : 0;
Evan Cheng063284c2008-02-21 00:34:19 +00001449 // Now rewrite the defs and uses.
1450 for (unsigned i = 0, e = RewriteMIs.size(); i != e; ) {
1451 RewriteInfo &rwi = RewriteMIs[i];
1452 ++i;
1453 unsigned index = rwi.Index;
1454 bool MIHasUse = rwi.HasUse;
1455 bool MIHasDef = rwi.HasDef;
1456 MachineInstr *MI = rwi.MI;
1457 // If MI def and/or use the same register multiple times, then there
1458 // are multiple entries.
Evan Cheng313d4b82008-02-23 00:33:04 +00001459 unsigned NumUses = MIHasUse;
Evan Cheng063284c2008-02-21 00:34:19 +00001460 while (i != e && RewriteMIs[i].MI == MI) {
1461 assert(RewriteMIs[i].Index == index);
Evan Cheng313d4b82008-02-23 00:33:04 +00001462 bool isUse = RewriteMIs[i].HasUse;
1463 if (isUse) ++NumUses;
1464 MIHasUse |= isUse;
Evan Cheng063284c2008-02-21 00:34:19 +00001465 MIHasDef |= RewriteMIs[i].HasDef;
1466 ++i;
1467 }
Evan Cheng81a03822007-11-17 00:40:40 +00001468 MachineBasicBlock *MBB = MI->getParent();
Evan Cheng313d4b82008-02-23 00:33:04 +00001469
Evan Cheng0a891ed2008-05-23 23:00:04 +00001470 if (ImpUse && MI != ReMatDefMI) {
Evan Cheng313d4b82008-02-23 00:33:04 +00001471 // Re-matting an instruction with virtual register use. Update the
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001472 // register interval's spill weight to HUGE_VALF to prevent it from
1473 // being spilled.
Evan Cheng313d4b82008-02-23 00:33:04 +00001474 LiveInterval &ImpLi = getInterval(ImpUse);
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001475 ImpLi.weight = HUGE_VALF;
Evan Cheng313d4b82008-02-23 00:33:04 +00001476 }
1477
Evan Cheng063284c2008-02-21 00:34:19 +00001478 unsigned MBBId = MBB->getNumber();
Evan Cheng018f9b02007-12-05 03:22:34 +00001479 unsigned ThisVReg = 0;
Evan Cheng70306f82007-12-03 09:58:48 +00001480 if (TrySplit) {
Owen Anderson28998312008-08-13 22:28:50 +00001481 DenseMap<unsigned,unsigned>::iterator NVI = MBBVRegsMap.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001482 if (NVI != MBBVRegsMap.end()) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001483 ThisVReg = NVI->second;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001484 // One common case:
1485 // x = use
1486 // ...
1487 // ...
1488 // def = ...
1489 // = use
1490 // It's better to start a new interval to avoid artifically
1491 // extend the new interval.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001492 if (MIHasDef && !MIHasUse) {
1493 MBBVRegsMap.erase(MBB->getNumber());
Evan Cheng018f9b02007-12-05 03:22:34 +00001494 ThisVReg = 0;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001495 }
1496 }
Evan Chengcada2452007-11-28 01:28:46 +00001497 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001498
1499 bool IsNew = ThisVReg == 0;
1500 if (IsNew) {
1501 // This ends the previous live interval. If all of its def / use
1502 // can be folded, give it a low spill weight.
1503 if (NewVReg && TrySplit && AllCanFold) {
1504 LiveInterval &nI = getOrCreateInterval(NewVReg);
1505 nI.weight /= 10.0F;
1506 }
1507 AllCanFold = true;
1508 }
1509 NewVReg = ThisVReg;
1510
Evan Cheng81a03822007-11-17 00:40:40 +00001511 bool HasDef = false;
1512 bool HasUse = false;
Evan Chengd70dbb52008-02-22 09:24:50 +00001513 bool CanFold = rewriteInstructionForSpills(li, I->valno, TrySplit,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001514 index, end, MI, ReMatOrigDefMI, ReMatDefMI,
1515 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
1516 CanDelete, vrm, rc, ReMatIds, loopInfo, NewVReg,
1517 ImpUse, HasDef, HasUse, MBBVRegsMap, NewLIs, SSWeight);
Evan Cheng81a03822007-11-17 00:40:40 +00001518 if (!HasDef && !HasUse)
1519 continue;
1520
Evan Cheng018f9b02007-12-05 03:22:34 +00001521 AllCanFold &= CanFold;
1522
Evan Cheng81a03822007-11-17 00:40:40 +00001523 // Update weight of spill interval.
1524 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng70306f82007-12-03 09:58:48 +00001525 if (!TrySplit) {
Evan Cheng81a03822007-11-17 00:40:40 +00001526 // The spill weight is now infinity as it cannot be spilled again.
1527 nI.weight = HUGE_VALF;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001528 continue;
Evan Cheng81a03822007-11-17 00:40:40 +00001529 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001530
1531 // Keep track of the last def and first use in each MBB.
Evan Cheng0cbb1162007-11-29 01:06:25 +00001532 if (HasDef) {
1533 if (MI != ReMatOrigDefMI || !CanDelete) {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001534 bool HasKill = false;
1535 if (!HasUse)
1536 HasKill = anyKillInMBBAfterIdx(li, I->valno, MBB, getDefIndex(index));
1537 else {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001538 // If this is a two-address code, then this index starts a new VNInfo.
Evan Cheng3f32d652008-06-04 09:18:41 +00001539 const VNInfo *VNI = li.findDefinedVNInfo(getDefIndex(index));
Evan Cheng0cbb1162007-11-29 01:06:25 +00001540 if (VNI)
1541 HasKill = anyKillInMBBAfterIdx(li, VNI, MBB, getDefIndex(index));
1542 }
Owen Anderson28998312008-08-13 22:28:50 +00001543 DenseMap<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Chenge3110d02007-12-01 04:42:39 +00001544 SpillIdxes.find(MBBId);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001545 if (!HasKill) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001546 if (SII == SpillIdxes.end()) {
1547 std::vector<SRInfo> S;
1548 S.push_back(SRInfo(index, NewVReg, true));
1549 SpillIdxes.insert(std::make_pair(MBBId, S));
1550 } else if (SII->second.back().vreg != NewVReg) {
1551 SII->second.push_back(SRInfo(index, NewVReg, true));
1552 } else if ((int)index > SII->second.back().index) {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001553 // If there is an earlier def and this is a two-address
1554 // instruction, then it's not possible to fold the store (which
1555 // would also fold the load).
Evan Cheng1953d0c2007-11-29 10:12:14 +00001556 SRInfo &Info = SII->second.back();
1557 Info.index = index;
1558 Info.canFold = !HasUse;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001559 }
1560 SpillMBBs.set(MBBId);
Evan Chenge3110d02007-12-01 04:42:39 +00001561 } else if (SII != SpillIdxes.end() &&
1562 SII->second.back().vreg == NewVReg &&
1563 (int)index > SII->second.back().index) {
1564 // There is an earlier def that's not killed (must be two-address).
1565 // The spill is no longer needed.
1566 SII->second.pop_back();
1567 if (SII->second.empty()) {
1568 SpillIdxes.erase(MBBId);
1569 SpillMBBs.reset(MBBId);
1570 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001571 }
1572 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001573 }
1574
1575 if (HasUse) {
Owen Anderson28998312008-08-13 22:28:50 +00001576 DenseMap<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Cheng0cbb1162007-11-29 01:06:25 +00001577 SpillIdxes.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001578 if (SII != SpillIdxes.end() &&
1579 SII->second.back().vreg == NewVReg &&
1580 (int)index > SII->second.back().index)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001581 // Use(s) following the last def, it's not safe to fold the spill.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001582 SII->second.back().canFold = false;
Owen Anderson28998312008-08-13 22:28:50 +00001583 DenseMap<unsigned, std::vector<SRInfo> >::iterator RII =
Evan Cheng0cbb1162007-11-29 01:06:25 +00001584 RestoreIdxes.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001585 if (RII != RestoreIdxes.end() && RII->second.back().vreg == NewVReg)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001586 // If we are splitting live intervals, only fold if it's the first
1587 // use and there isn't another use later in the MBB.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001588 RII->second.back().canFold = false;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001589 else if (IsNew) {
1590 // Only need a reload if there isn't an earlier def / use.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001591 if (RII == RestoreIdxes.end()) {
1592 std::vector<SRInfo> Infos;
1593 Infos.push_back(SRInfo(index, NewVReg, true));
1594 RestoreIdxes.insert(std::make_pair(MBBId, Infos));
1595 } else {
1596 RII->second.push_back(SRInfo(index, NewVReg, true));
1597 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001598 RestoreMBBs.set(MBBId);
1599 }
1600 }
1601
1602 // Update spill weight.
Evan Cheng22f07ff2007-12-11 02:09:15 +00001603 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Chengc3417602008-06-21 06:45:54 +00001604 nI.weight += getSpillWeight(HasDef, HasUse, loopDepth);
Evan Chengf2fbca62007-11-12 06:35:08 +00001605 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001606
1607 if (NewVReg && TrySplit && AllCanFold) {
1608 // If all of its def / use can be folded, give it a low spill weight.
1609 LiveInterval &nI = getOrCreateInterval(NewVReg);
1610 nI.weight /= 10.0F;
1611 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001612}
1613
Evan Cheng1953d0c2007-11-29 10:12:14 +00001614bool LiveIntervals::alsoFoldARestore(int Id, int index, unsigned vr,
1615 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001616 DenseMap<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001617 if (!RestoreMBBs[Id])
1618 return false;
1619 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1620 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1621 if (Restores[i].index == index &&
1622 Restores[i].vreg == vr &&
1623 Restores[i].canFold)
1624 return true;
1625 return false;
1626}
1627
1628void LiveIntervals::eraseRestoreInfo(int Id, int index, unsigned vr,
1629 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001630 DenseMap<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001631 if (!RestoreMBBs[Id])
1632 return;
1633 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1634 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1635 if (Restores[i].index == index && Restores[i].vreg)
1636 Restores[i].index = -1;
1637}
Evan Cheng81a03822007-11-17 00:40:40 +00001638
Evan Cheng4cce6b42008-04-11 17:53:36 +00001639/// handleSpilledImpDefs - Remove IMPLICIT_DEF instructions which are being
1640/// spilled and create empty intervals for their uses.
1641void
1642LiveIntervals::handleSpilledImpDefs(const LiveInterval &li, VirtRegMap &vrm,
1643 const TargetRegisterClass* rc,
1644 std::vector<LiveInterval*> &NewLIs) {
Evan Cheng419852c2008-04-03 16:39:43 +00001645 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1646 re = mri_->reg_end(); ri != re; ) {
Evan Cheng4cce6b42008-04-11 17:53:36 +00001647 MachineOperand &O = ri.getOperand();
Evan Cheng419852c2008-04-03 16:39:43 +00001648 MachineInstr *MI = &*ri;
1649 ++ri;
Evan Cheng4cce6b42008-04-11 17:53:36 +00001650 if (O.isDef()) {
1651 assert(MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF &&
1652 "Register def was not rewritten?");
1653 RemoveMachineInstrFromMaps(MI);
1654 vrm.RemoveMachineInstrFromMaps(MI);
1655 MI->eraseFromParent();
1656 } else {
1657 // This must be an use of an implicit_def so it's not part of the live
1658 // interval. Create a new empty live interval for it.
1659 // FIXME: Can we simply erase some of the instructions? e.g. Stores?
1660 unsigned NewVReg = mri_->createVirtualRegister(rc);
1661 vrm.grow();
1662 vrm.setIsImplicitlyDefined(NewVReg);
1663 NewLIs.push_back(&getOrCreateInterval(NewVReg));
1664 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1665 MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001666 if (MO.isReg() && MO.getReg() == li.reg)
Evan Cheng4cce6b42008-04-11 17:53:36 +00001667 MO.setReg(NewVReg);
1668 }
1669 }
Evan Cheng419852c2008-04-03 16:39:43 +00001670 }
1671}
1672
Owen Anderson133f10f2008-08-18 19:52:22 +00001673namespace {
1674 struct LISorter {
1675 bool operator()(LiveInterval* A, LiveInterval* B) {
1676 return A->beginNumber() < B->beginNumber();
1677 }
1678 };
1679}
Evan Cheng81a03822007-11-17 00:40:40 +00001680
Evan Chengf2fbca62007-11-12 06:35:08 +00001681std::vector<LiveInterval*> LiveIntervals::
Owen Andersond6664312008-08-18 18:05:32 +00001682addIntervalsForSpillsFast(const LiveInterval &li,
1683 const MachineLoopInfo *loopInfo,
1684 VirtRegMap &vrm, float& SSWeight) {
Owen Anderson17197312008-08-18 23:41:04 +00001685 unsigned slot = vrm.assignVirt2StackSlot(li.reg);
Owen Andersond6664312008-08-18 18:05:32 +00001686
1687 std::vector<LiveInterval*> added;
1688
1689 assert(li.weight != HUGE_VALF &&
1690 "attempt to spill already spilled interval!");
1691
1692 DOUT << "\t\t\t\tadding intervals for spills for interval: ";
1693 DEBUG(li.dump());
1694 DOUT << '\n';
1695
1696 const TargetRegisterClass* rc = mri_->getRegClass(li.reg);
1697
Owen Anderson9a032932008-08-18 21:20:32 +00001698 SSWeight = 0.0f;
1699
Owen Andersona41e47a2008-08-19 22:12:11 +00001700 MachineRegisterInfo::reg_iterator RI = mri_->reg_begin(li.reg);
1701 while (RI != mri_->reg_end()) {
1702 MachineInstr* MI = &*RI;
1703
1704 SmallVector<unsigned, 2> Indices;
1705 bool HasUse = false;
1706 bool HasDef = false;
1707
1708 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
1709 MachineOperand& mop = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001710 if (!mop.isReg() || mop.getReg() != li.reg) continue;
Owen Andersona41e47a2008-08-19 22:12:11 +00001711
1712 HasUse |= MI->getOperand(i).isUse();
1713 HasDef |= MI->getOperand(i).isDef();
1714
1715 Indices.push_back(i);
1716 }
1717
1718 if (!tryFoldMemoryOperand(MI, vrm, NULL, getInstructionIndex(MI),
1719 Indices, true, slot, li.reg)) {
1720 unsigned NewVReg = mri_->createVirtualRegister(rc);
Owen Anderson9a032932008-08-18 21:20:32 +00001721 vrm.grow();
Owen Anderson17197312008-08-18 23:41:04 +00001722 vrm.assignVirt2StackSlot(NewVReg, slot);
1723
Owen Andersona41e47a2008-08-19 22:12:11 +00001724 // create a new register for this spill
1725 LiveInterval &nI = getOrCreateInterval(NewVReg);
Owen Andersond6664312008-08-18 18:05:32 +00001726
Owen Andersona41e47a2008-08-19 22:12:11 +00001727 // the spill weight is now infinity as it
1728 // cannot be spilled again
1729 nI.weight = HUGE_VALF;
1730
1731 // Rewrite register operands to use the new vreg.
1732 for (SmallVectorImpl<unsigned>::iterator I = Indices.begin(),
1733 E = Indices.end(); I != E; ++I) {
1734 MI->getOperand(*I).setReg(NewVReg);
1735
1736 if (MI->getOperand(*I).isUse())
1737 MI->getOperand(*I).setIsKill(true);
1738 }
1739
1740 // Fill in the new live interval.
1741 unsigned index = getInstructionIndex(MI);
1742 if (HasUse) {
1743 LiveRange LR(getLoadIndex(index), getUseIndex(index),
1744 nI.getNextValue(~0U, 0, getVNInfoAllocator()));
1745 DOUT << " +" << LR;
1746 nI.addRange(LR);
1747 vrm.addRestorePoint(NewVReg, MI);
1748 }
1749 if (HasDef) {
1750 LiveRange LR(getDefIndex(index), getStoreIndex(index),
1751 nI.getNextValue(~0U, 0, getVNInfoAllocator()));
1752 DOUT << " +" << LR;
1753 nI.addRange(LR);
1754 vrm.addSpillPoint(NewVReg, true, MI);
1755 }
1756
Owen Anderson17197312008-08-18 23:41:04 +00001757 added.push_back(&nI);
Owen Anderson8dc2cbe2008-08-18 18:38:12 +00001758
Owen Andersona41e47a2008-08-19 22:12:11 +00001759 DOUT << "\t\t\t\tadded new interval: ";
1760 DEBUG(nI.dump());
1761 DOUT << '\n';
1762
1763 unsigned loopDepth = loopInfo->getLoopDepth(MI->getParent());
1764 if (HasUse) {
1765 if (HasDef)
1766 SSWeight += getSpillWeight(true, true, loopDepth);
1767 else
1768 SSWeight += getSpillWeight(false, true, loopDepth);
1769 } else
1770 SSWeight += getSpillWeight(true, false, loopDepth);
1771 }
Owen Anderson9a032932008-08-18 21:20:32 +00001772
Owen Anderson9a032932008-08-18 21:20:32 +00001773
Owen Andersona41e47a2008-08-19 22:12:11 +00001774 RI = mri_->reg_begin(li.reg);
Owen Andersond6664312008-08-18 18:05:32 +00001775 }
Owen Andersond6664312008-08-18 18:05:32 +00001776
Owen Andersona41e47a2008-08-19 22:12:11 +00001777 // Clients expect the new intervals to be returned in sorted order.
Owen Anderson133f10f2008-08-18 19:52:22 +00001778 std::sort(added.begin(), added.end(), LISorter());
1779
Owen Andersond6664312008-08-18 18:05:32 +00001780 return added;
1781}
1782
1783std::vector<LiveInterval*> LiveIntervals::
Evan Cheng81a03822007-11-17 00:40:40 +00001784addIntervalsForSpills(const LiveInterval &li,
Evan Chengdc377862008-09-30 15:44:16 +00001785 SmallVectorImpl<LiveInterval*> &SpillIs,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001786 const MachineLoopInfo *loopInfo, VirtRegMap &vrm,
1787 float &SSWeight) {
Owen Andersonae339ba2008-08-19 00:17:30 +00001788
1789 if (EnableFastSpilling)
1790 return addIntervalsForSpillsFast(li, loopInfo, vrm, SSWeight);
1791
Evan Chengf2fbca62007-11-12 06:35:08 +00001792 assert(li.weight != HUGE_VALF &&
1793 "attempt to spill already spilled interval!");
1794
1795 DOUT << "\t\t\t\tadding intervals for spills for interval: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +00001796 li.print(DOUT, tri_);
Evan Chengf2fbca62007-11-12 06:35:08 +00001797 DOUT << '\n';
1798
Evan Cheng9c3c2212008-06-06 07:54:39 +00001799 // Spill slot weight.
1800 SSWeight = 0.0f;
1801
Evan Cheng81a03822007-11-17 00:40:40 +00001802 // Each bit specify whether it a spill is required in the MBB.
1803 BitVector SpillMBBs(mf_->getNumBlockIDs());
Owen Anderson28998312008-08-13 22:28:50 +00001804 DenseMap<unsigned, std::vector<SRInfo> > SpillIdxes;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001805 BitVector RestoreMBBs(mf_->getNumBlockIDs());
Owen Anderson28998312008-08-13 22:28:50 +00001806 DenseMap<unsigned, std::vector<SRInfo> > RestoreIdxes;
1807 DenseMap<unsigned,unsigned> MBBVRegsMap;
Evan Chengf2fbca62007-11-12 06:35:08 +00001808 std::vector<LiveInterval*> NewLIs;
Evan Chengd70dbb52008-02-22 09:24:50 +00001809 const TargetRegisterClass* rc = mri_->getRegClass(li.reg);
Evan Chengf2fbca62007-11-12 06:35:08 +00001810
1811 unsigned NumValNums = li.getNumValNums();
1812 SmallVector<MachineInstr*, 4> ReMatDefs;
1813 ReMatDefs.resize(NumValNums, NULL);
1814 SmallVector<MachineInstr*, 4> ReMatOrigDefs;
1815 ReMatOrigDefs.resize(NumValNums, NULL);
1816 SmallVector<int, 4> ReMatIds;
1817 ReMatIds.resize(NumValNums, VirtRegMap::MAX_STACK_SLOT);
1818 BitVector ReMatDelete(NumValNums);
1819 unsigned Slot = VirtRegMap::MAX_STACK_SLOT;
1820
Evan Cheng81a03822007-11-17 00:40:40 +00001821 // Spilling a split live interval. It cannot be split any further. Also,
1822 // it's also guaranteed to be a single val# / range interval.
1823 if (vrm.getPreSplitReg(li.reg)) {
1824 vrm.setIsSplitFromReg(li.reg, 0);
Evan Chengd120ffd2007-12-05 10:24:35 +00001825 // Unset the split kill marker on the last use.
1826 unsigned KillIdx = vrm.getKillPoint(li.reg);
1827 if (KillIdx) {
1828 MachineInstr *KillMI = getInstructionFromIndex(KillIdx);
1829 assert(KillMI && "Last use disappeared?");
1830 int KillOp = KillMI->findRegisterUseOperandIdx(li.reg, true);
1831 assert(KillOp != -1 && "Last use disappeared?");
Chris Lattnerf7382302007-12-30 21:56:09 +00001832 KillMI->getOperand(KillOp).setIsKill(false);
Evan Chengd120ffd2007-12-05 10:24:35 +00001833 }
Evan Chengadf85902007-12-05 09:51:10 +00001834 vrm.removeKillPoint(li.reg);
Evan Cheng81a03822007-11-17 00:40:40 +00001835 bool DefIsReMat = vrm.isReMaterialized(li.reg);
1836 Slot = vrm.getStackSlot(li.reg);
1837 assert(Slot != VirtRegMap::MAX_STACK_SLOT);
1838 MachineInstr *ReMatDefMI = DefIsReMat ?
1839 vrm.getReMaterializedMI(li.reg) : NULL;
1840 int LdSlot = 0;
1841 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
1842 bool isLoad = isLoadSS ||
Chris Lattner749c6f62008-01-07 07:27:27 +00001843 (DefIsReMat && (ReMatDefMI->getDesc().isSimpleLoad()));
Evan Cheng81a03822007-11-17 00:40:40 +00001844 bool IsFirstRange = true;
1845 for (LiveInterval::Ranges::const_iterator
1846 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1847 // If this is a split live interval with multiple ranges, it means there
1848 // are two-address instructions that re-defined the value. Only the
1849 // first def can be rematerialized!
1850 if (IsFirstRange) {
Evan Chengcb3c3302007-11-29 23:02:50 +00001851 // Note ReMatOrigDefMI has already been deleted.
Evan Cheng81a03822007-11-17 00:40:40 +00001852 rewriteInstructionsForSpills(li, false, I, NULL, ReMatDefMI,
1853 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chengd70dbb52008-02-22 09:24:50 +00001854 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001855 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001856 MBBVRegsMap, NewLIs, SSWeight);
Evan Cheng81a03822007-11-17 00:40:40 +00001857 } else {
1858 rewriteInstructionsForSpills(li, false, I, NULL, 0,
1859 Slot, 0, false, false, false,
Evan Chengd70dbb52008-02-22 09:24:50 +00001860 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001861 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001862 MBBVRegsMap, NewLIs, SSWeight);
Evan Cheng81a03822007-11-17 00:40:40 +00001863 }
1864 IsFirstRange = false;
1865 }
Evan Cheng419852c2008-04-03 16:39:43 +00001866
Evan Cheng9c3c2212008-06-06 07:54:39 +00001867 SSWeight = 0.0f; // Already accounted for when split.
Evan Cheng4cce6b42008-04-11 17:53:36 +00001868 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001869 return NewLIs;
1870 }
1871
1872 bool TrySplit = SplitAtBB && !intervalIsInOneMBB(li);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001873 if (SplitLimit != -1 && (int)numSplits >= SplitLimit)
1874 TrySplit = false;
1875 if (TrySplit)
1876 ++numSplits;
Evan Chengf2fbca62007-11-12 06:35:08 +00001877 bool NeedStackSlot = false;
1878 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
1879 i != e; ++i) {
1880 const VNInfo *VNI = *i;
1881 unsigned VN = VNI->id;
1882 unsigned DefIdx = VNI->def;
1883 if (DefIdx == ~1U)
1884 continue; // Dead val#.
1885 // Is the def for the val# rematerializable?
Evan Cheng81a03822007-11-17 00:40:40 +00001886 MachineInstr *ReMatDefMI = (DefIdx == ~0u)
1887 ? 0 : getInstructionFromIndex(DefIdx);
Evan Cheng5ef3a042007-12-06 00:01:56 +00001888 bool dummy;
Evan Chengdc377862008-09-30 15:44:16 +00001889 if (ReMatDefMI && isReMaterializable(li, VNI, ReMatDefMI, SpillIs, dummy)) {
Evan Chengf2fbca62007-11-12 06:35:08 +00001890 // Remember how to remat the def of this val#.
Evan Cheng81a03822007-11-17 00:40:40 +00001891 ReMatOrigDefs[VN] = ReMatDefMI;
Dan Gohman2c3f7ae2008-07-17 23:49:46 +00001892 // Original def may be modified so we have to make a copy here.
Evan Cheng1ed99222008-07-19 00:37:25 +00001893 MachineInstr *Clone = mf_->CloneMachineInstr(ReMatDefMI);
1894 ClonedMIs.push_back(Clone);
1895 ReMatDefs[VN] = Clone;
Evan Chengf2fbca62007-11-12 06:35:08 +00001896
1897 bool CanDelete = true;
Evan Chengc3fc7d92007-11-29 09:49:23 +00001898 if (VNI->hasPHIKill) {
1899 // A kill is a phi node, not all of its uses can be rematerialized.
Evan Chengf2fbca62007-11-12 06:35:08 +00001900 // It must not be deleted.
Evan Chengc3fc7d92007-11-29 09:49:23 +00001901 CanDelete = false;
1902 // Need a stack slot if there is any live range where uses cannot be
1903 // rematerialized.
1904 NeedStackSlot = true;
Evan Chengf2fbca62007-11-12 06:35:08 +00001905 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001906 if (CanDelete)
1907 ReMatDelete.set(VN);
1908 } else {
1909 // Need a stack slot if there is any live range where uses cannot be
1910 // rematerialized.
1911 NeedStackSlot = true;
1912 }
1913 }
1914
1915 // One stack slot per live interval.
Evan Cheng81a03822007-11-17 00:40:40 +00001916 if (NeedStackSlot && vrm.getPreSplitReg(li.reg) == 0)
Evan Chengf2fbca62007-11-12 06:35:08 +00001917 Slot = vrm.assignVirt2StackSlot(li.reg);
1918
1919 // Create new intervals and rewrite defs and uses.
1920 for (LiveInterval::Ranges::const_iterator
1921 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
Evan Cheng81a03822007-11-17 00:40:40 +00001922 MachineInstr *ReMatDefMI = ReMatDefs[I->valno->id];
1923 MachineInstr *ReMatOrigDefMI = ReMatOrigDefs[I->valno->id];
1924 bool DefIsReMat = ReMatDefMI != NULL;
Evan Chengf2fbca62007-11-12 06:35:08 +00001925 bool CanDelete = ReMatDelete[I->valno->id];
1926 int LdSlot = 0;
Evan Cheng81a03822007-11-17 00:40:40 +00001927 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
Evan Chengf2fbca62007-11-12 06:35:08 +00001928 bool isLoad = isLoadSS ||
Chris Lattner749c6f62008-01-07 07:27:27 +00001929 (DefIsReMat && ReMatDefMI->getDesc().isSimpleLoad());
Evan Cheng81a03822007-11-17 00:40:40 +00001930 rewriteInstructionsForSpills(li, TrySplit, I, ReMatOrigDefMI, ReMatDefMI,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001931 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chengd70dbb52008-02-22 09:24:50 +00001932 CanDelete, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001933 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001934 MBBVRegsMap, NewLIs, SSWeight);
Evan Chengf2fbca62007-11-12 06:35:08 +00001935 }
1936
Evan Cheng0cbb1162007-11-29 01:06:25 +00001937 // Insert spills / restores if we are splitting.
Evan Cheng419852c2008-04-03 16:39:43 +00001938 if (!TrySplit) {
Evan Cheng4cce6b42008-04-11 17:53:36 +00001939 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001940 return NewLIs;
Evan Cheng419852c2008-04-03 16:39:43 +00001941 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00001942
Evan Chengb50bb8c2007-12-05 08:16:32 +00001943 SmallPtrSet<LiveInterval*, 4> AddedKill;
Evan Chengaee4af62007-12-02 08:30:39 +00001944 SmallVector<unsigned, 2> Ops;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001945 if (NeedStackSlot) {
1946 int Id = SpillMBBs.find_first();
1947 while (Id != -1) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00001948 MachineBasicBlock *MBB = mf_->getBlockNumbered(Id);
1949 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001950 std::vector<SRInfo> &spills = SpillIdxes[Id];
1951 for (unsigned i = 0, e = spills.size(); i != e; ++i) {
1952 int index = spills[i].index;
1953 unsigned VReg = spills[i].vreg;
Evan Cheng597d10d2007-12-04 00:32:23 +00001954 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001955 bool isReMat = vrm.isReMaterialized(VReg);
1956 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengaee4af62007-12-02 08:30:39 +00001957 bool CanFold = false;
1958 bool FoundUse = false;
1959 Ops.clear();
Evan Chengcddbb832007-11-30 21:23:43 +00001960 if (spills[i].canFold) {
Evan Chengaee4af62007-12-02 08:30:39 +00001961 CanFold = true;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001962 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
1963 MachineOperand &MO = MI->getOperand(j);
Dan Gohmand735b802008-10-03 15:45:36 +00001964 if (!MO.isReg() || MO.getReg() != VReg)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001965 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001966
1967 Ops.push_back(j);
1968 if (MO.isDef())
Evan Chengcddbb832007-11-30 21:23:43 +00001969 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001970 if (isReMat ||
1971 (!FoundUse && !alsoFoldARestore(Id, index, VReg,
1972 RestoreMBBs, RestoreIdxes))) {
1973 // MI has two-address uses of the same register. If the use
1974 // isn't the first and only use in the BB, then we can't fold
1975 // it. FIXME: Move this to rewriteInstructionsForSpills.
1976 CanFold = false;
Evan Chengcddbb832007-11-30 21:23:43 +00001977 break;
1978 }
Evan Chengaee4af62007-12-02 08:30:39 +00001979 FoundUse = true;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001980 }
1981 }
1982 // Fold the store into the def if possible.
Evan Chengcddbb832007-11-30 21:23:43 +00001983 bool Folded = false;
Evan Chengaee4af62007-12-02 08:30:39 +00001984 if (CanFold && !Ops.empty()) {
1985 if (tryFoldMemoryOperand(MI, vrm, NULL, index, Ops, true, Slot,VReg)){
Evan Chengcddbb832007-11-30 21:23:43 +00001986 Folded = true;
Evan Chengf38d14f2007-12-05 09:05:34 +00001987 if (FoundUse > 0) {
Evan Chengaee4af62007-12-02 08:30:39 +00001988 // Also folded uses, do not issue a load.
1989 eraseRestoreInfo(Id, index, VReg, RestoreMBBs, RestoreIdxes);
Evan Chengf38d14f2007-12-05 09:05:34 +00001990 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
1991 }
Evan Cheng597d10d2007-12-04 00:32:23 +00001992 nI.removeRange(getDefIndex(index), getStoreIndex(index));
Evan Chengcddbb832007-11-30 21:23:43 +00001993 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001994 }
1995
Evan Cheng7e073ba2008-04-09 20:57:25 +00001996 // Otherwise tell the spiller to issue a spill.
Evan Chengb50bb8c2007-12-05 08:16:32 +00001997 if (!Folded) {
1998 LiveRange *LR = &nI.ranges[nI.ranges.size()-1];
1999 bool isKill = LR->end == getStoreIndex(index);
Evan Chengb0a6f622008-05-20 08:10:37 +00002000 if (!MI->registerDefIsDead(nI.reg))
2001 // No need to spill a dead def.
2002 vrm.addSpillPoint(VReg, isKill, MI);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002003 if (isKill)
2004 AddedKill.insert(&nI);
2005 }
Evan Cheng9c3c2212008-06-06 07:54:39 +00002006
2007 // Update spill slot weight.
2008 if (!isReMat)
Evan Chengc3417602008-06-21 06:45:54 +00002009 SSWeight += getSpillWeight(true, false, loopDepth);
Evan Cheng0cbb1162007-11-29 01:06:25 +00002010 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002011 Id = SpillMBBs.find_next(Id);
Evan Cheng0cbb1162007-11-29 01:06:25 +00002012 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002013 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002014
Evan Cheng1953d0c2007-11-29 10:12:14 +00002015 int Id = RestoreMBBs.find_first();
2016 while (Id != -1) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00002017 MachineBasicBlock *MBB = mf_->getBlockNumbered(Id);
2018 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
2019
Evan Cheng1953d0c2007-11-29 10:12:14 +00002020 std::vector<SRInfo> &restores = RestoreIdxes[Id];
2021 for (unsigned i = 0, e = restores.size(); i != e; ++i) {
2022 int index = restores[i].index;
2023 if (index == -1)
2024 continue;
2025 unsigned VReg = restores[i].vreg;
Evan Cheng597d10d2007-12-04 00:32:23 +00002026 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng9c3c2212008-06-06 07:54:39 +00002027 bool isReMat = vrm.isReMaterialized(VReg);
Evan Cheng81a03822007-11-17 00:40:40 +00002028 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengaee4af62007-12-02 08:30:39 +00002029 bool CanFold = false;
2030 Ops.clear();
Evan Chengcddbb832007-11-30 21:23:43 +00002031 if (restores[i].canFold) {
Evan Chengaee4af62007-12-02 08:30:39 +00002032 CanFold = true;
Evan Cheng81a03822007-11-17 00:40:40 +00002033 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
2034 MachineOperand &MO = MI->getOperand(j);
Dan Gohmand735b802008-10-03 15:45:36 +00002035 if (!MO.isReg() || MO.getReg() != VReg)
Evan Cheng81a03822007-11-17 00:40:40 +00002036 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00002037
Evan Cheng0cbb1162007-11-29 01:06:25 +00002038 if (MO.isDef()) {
Evan Chengaee4af62007-12-02 08:30:39 +00002039 // If this restore were to be folded, it would have been folded
2040 // already.
2041 CanFold = false;
Evan Cheng81a03822007-11-17 00:40:40 +00002042 break;
2043 }
Evan Chengaee4af62007-12-02 08:30:39 +00002044 Ops.push_back(j);
Evan Cheng81a03822007-11-17 00:40:40 +00002045 }
2046 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002047
2048 // Fold the load into the use if possible.
Evan Chengcddbb832007-11-30 21:23:43 +00002049 bool Folded = false;
Evan Chengaee4af62007-12-02 08:30:39 +00002050 if (CanFold && !Ops.empty()) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00002051 if (!isReMat)
Evan Chengaee4af62007-12-02 08:30:39 +00002052 Folded = tryFoldMemoryOperand(MI, vrm, NULL,index,Ops,true,Slot,VReg);
2053 else {
Evan Cheng0cbb1162007-11-29 01:06:25 +00002054 MachineInstr *ReMatDefMI = vrm.getReMaterializedMI(VReg);
2055 int LdSlot = 0;
2056 bool isLoadSS = tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
2057 // If the rematerializable def is a load, also try to fold it.
Chris Lattner749c6f62008-01-07 07:27:27 +00002058 if (isLoadSS || ReMatDefMI->getDesc().isSimpleLoad())
Evan Chengaee4af62007-12-02 08:30:39 +00002059 Folded = tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
2060 Ops, isLoadSS, LdSlot, VReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00002061 unsigned ImpUse = getReMatImplicitUse(li, ReMatDefMI);
2062 if (ImpUse) {
2063 // Re-matting an instruction with virtual register use. Add the
2064 // register as an implicit use on the use MI and update the register
Evan Cheng24d2f8a2008-03-31 07:53:30 +00002065 // interval's spill weight to HUGE_VALF to prevent it from being
2066 // spilled.
Evan Chengd70dbb52008-02-22 09:24:50 +00002067 LiveInterval &ImpLi = getInterval(ImpUse);
Evan Cheng24d2f8a2008-03-31 07:53:30 +00002068 ImpLi.weight = HUGE_VALF;
Evan Chengd70dbb52008-02-22 09:24:50 +00002069 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
2070 }
Evan Chengaee4af62007-12-02 08:30:39 +00002071 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002072 }
2073 // If folding is not possible / failed, then tell the spiller to issue a
2074 // load / rematerialization for us.
Evan Cheng597d10d2007-12-04 00:32:23 +00002075 if (Folded)
2076 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002077 else
Evan Cheng0cbb1162007-11-29 01:06:25 +00002078 vrm.addRestorePoint(VReg, MI);
Evan Cheng9c3c2212008-06-06 07:54:39 +00002079
2080 // Update spill slot weight.
2081 if (!isReMat)
Evan Chengc3417602008-06-21 06:45:54 +00002082 SSWeight += getSpillWeight(false, true, loopDepth);
Evan Cheng81a03822007-11-17 00:40:40 +00002083 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002084 Id = RestoreMBBs.find_next(Id);
Evan Cheng81a03822007-11-17 00:40:40 +00002085 }
2086
Evan Chengb50bb8c2007-12-05 08:16:32 +00002087 // Finalize intervals: add kills, finalize spill weights, and filter out
2088 // dead intervals.
Evan Cheng597d10d2007-12-04 00:32:23 +00002089 std::vector<LiveInterval*> RetNewLIs;
2090 for (unsigned i = 0, e = NewLIs.size(); i != e; ++i) {
2091 LiveInterval *LI = NewLIs[i];
2092 if (!LI->empty()) {
Owen Anderson496bac52008-07-23 19:47:27 +00002093 LI->weight /= InstrSlots::NUM * getApproximateInstructionCount(*LI);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002094 if (!AddedKill.count(LI)) {
2095 LiveRange *LR = &LI->ranges[LI->ranges.size()-1];
Evan Chengd120ffd2007-12-05 10:24:35 +00002096 unsigned LastUseIdx = getBaseIndex(LR->end);
2097 MachineInstr *LastUse = getInstructionFromIndex(LastUseIdx);
Evan Cheng6130f662008-03-05 00:59:57 +00002098 int UseIdx = LastUse->findRegisterUseOperandIdx(LI->reg, false);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002099 assert(UseIdx != -1);
Evan Chengd70dbb52008-02-22 09:24:50 +00002100 if (LastUse->getOperand(UseIdx).isImplicit() ||
2101 LastUse->getDesc().getOperandConstraint(UseIdx,TOI::TIED_TO) == -1){
Evan Chengb50bb8c2007-12-05 08:16:32 +00002102 LastUse->getOperand(UseIdx).setIsKill();
Evan Chengd120ffd2007-12-05 10:24:35 +00002103 vrm.addKillPoint(LI->reg, LastUseIdx);
Evan Chengadf85902007-12-05 09:51:10 +00002104 }
Evan Chengb50bb8c2007-12-05 08:16:32 +00002105 }
Evan Cheng597d10d2007-12-04 00:32:23 +00002106 RetNewLIs.push_back(LI);
2107 }
2108 }
Evan Cheng81a03822007-11-17 00:40:40 +00002109
Evan Cheng4cce6b42008-04-11 17:53:36 +00002110 handleSpilledImpDefs(li, vrm, rc, RetNewLIs);
Evan Cheng597d10d2007-12-04 00:32:23 +00002111 return RetNewLIs;
Evan Chengf2fbca62007-11-12 06:35:08 +00002112}
Evan Cheng676dd7c2008-03-11 07:19:34 +00002113
2114/// hasAllocatableSuperReg - Return true if the specified physical register has
2115/// any super register that's allocatable.
2116bool LiveIntervals::hasAllocatableSuperReg(unsigned Reg) const {
2117 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS)
2118 if (allocatableRegs_[*AS] && hasInterval(*AS))
2119 return true;
2120 return false;
2121}
2122
2123/// getRepresentativeReg - Find the largest super register of the specified
2124/// physical register.
2125unsigned LiveIntervals::getRepresentativeReg(unsigned Reg) const {
2126 // Find the largest super-register that is allocatable.
2127 unsigned BestReg = Reg;
2128 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS) {
2129 unsigned SuperReg = *AS;
2130 if (!hasAllocatableSuperReg(SuperReg) && hasInterval(SuperReg)) {
2131 BestReg = SuperReg;
2132 break;
2133 }
2134 }
2135 return BestReg;
2136}
2137
2138/// getNumConflictsWithPhysReg - Return the number of uses and defs of the
2139/// specified interval that conflicts with the specified physical register.
2140unsigned LiveIntervals::getNumConflictsWithPhysReg(const LiveInterval &li,
2141 unsigned PhysReg) const {
2142 unsigned NumConflicts = 0;
2143 const LiveInterval &pli = getInterval(getRepresentativeReg(PhysReg));
2144 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
2145 E = mri_->reg_end(); I != E; ++I) {
2146 MachineOperand &O = I.getOperand();
2147 MachineInstr *MI = O.getParent();
2148 unsigned Index = getInstructionIndex(MI);
2149 if (pli.liveAt(Index))
2150 ++NumConflicts;
2151 }
2152 return NumConflicts;
2153}
2154
2155/// spillPhysRegAroundRegDefsUses - Spill the specified physical register
2156/// around all defs and uses of the specified interval.
2157void LiveIntervals::spillPhysRegAroundRegDefsUses(const LiveInterval &li,
2158 unsigned PhysReg, VirtRegMap &vrm) {
2159 unsigned SpillReg = getRepresentativeReg(PhysReg);
2160
2161 for (const unsigned *AS = tri_->getAliasSet(PhysReg); *AS; ++AS)
2162 // If there are registers which alias PhysReg, but which are not a
2163 // sub-register of the chosen representative super register. Assert
2164 // since we can't handle it yet.
2165 assert(*AS == SpillReg || !allocatableRegs_[*AS] ||
2166 tri_->isSuperRegister(*AS, SpillReg));
2167
2168 LiveInterval &pli = getInterval(SpillReg);
2169 SmallPtrSet<MachineInstr*, 8> SeenMIs;
2170 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
2171 E = mri_->reg_end(); I != E; ++I) {
2172 MachineOperand &O = I.getOperand();
2173 MachineInstr *MI = O.getParent();
2174 if (SeenMIs.count(MI))
2175 continue;
2176 SeenMIs.insert(MI);
2177 unsigned Index = getInstructionIndex(MI);
2178 if (pli.liveAt(Index)) {
2179 vrm.addEmergencySpill(SpillReg, MI);
2180 pli.removeRange(getLoadIndex(Index), getStoreIndex(Index)+1);
2181 for (const unsigned* AS = tri_->getSubRegisters(SpillReg); *AS; ++AS) {
2182 if (!hasInterval(*AS))
2183 continue;
2184 LiveInterval &spli = getInterval(*AS);
2185 if (spli.liveAt(Index))
2186 spli.removeRange(getLoadIndex(Index), getStoreIndex(Index)+1);
2187 }
2188 }
2189 }
2190}
Owen Andersonc4dc1322008-06-05 17:15:43 +00002191
2192LiveRange LiveIntervals::addLiveRangeToEndOfBlock(unsigned reg,
2193 MachineInstr* startInst) {
2194 LiveInterval& Interval = getOrCreateInterval(reg);
2195 VNInfo* VN = Interval.getNextValue(
2196 getInstructionIndex(startInst) + InstrSlots::DEF,
2197 startInst, getVNInfoAllocator());
2198 VN->hasPHIKill = true;
2199 VN->kills.push_back(getMBBEndIdx(startInst->getParent()));
2200 LiveRange LR(getInstructionIndex(startInst) + InstrSlots::DEF,
2201 getMBBEndIdx(startInst->getParent()) + 1, VN);
2202 Interval.addRange(LR);
2203
2204 return LR;
2205}