blob: cb83194d97a8f06ebf720d07adad9d9009e98c2b [file] [log] [blame]
Chris Lattnera3b8b5c2004-07-23 17:56:30 +00001//===-- LiveIntervalAnalysis.cpp - Live Interval Analysis -----------------===//
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the LiveInterval analysis pass which is used
11// by the Linear Scan Register allocator. This pass linearizes the
12// basic blocks of the function in DFS order and uses the
13// LiveVariables pass to conservatively compute live intervals for
14// each virtual and physical register.
15//
16//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "liveintervals"
Chris Lattner3c3fe462005-09-21 04:19:09 +000019#include "llvm/CodeGen/LiveIntervalAnalysis.h"
Misha Brukman08a6c762004-09-03 18:25:53 +000020#include "VirtRegMap.h"
Chris Lattner015959e2004-05-01 21:24:39 +000021#include "llvm/Value.h"
Dan Gohman6d69ba82008-07-25 00:02:30 +000022#include "llvm/Analysis/AliasAnalysis.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000023#include "llvm/CodeGen/LiveVariables.h"
24#include "llvm/CodeGen/MachineFrameInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000025#include "llvm/CodeGen/MachineInstr.h"
Evan Cheng22f07ff2007-12-11 02:09:15 +000026#include "llvm/CodeGen/MachineLoopInfo.h"
Chris Lattner84bc5422007-12-31 04:13:23 +000027#include "llvm/CodeGen/MachineRegisterInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000028#include "llvm/CodeGen/Passes.h"
Dan Gohman6d69ba82008-07-25 00:02:30 +000029#include "llvm/CodeGen/PseudoSourceValue.h"
Dan Gohman6f0d0242008-02-10 18:45:23 +000030#include "llvm/Target/TargetRegisterInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000031#include "llvm/Target/TargetInstrInfo.h"
32#include "llvm/Target/TargetMachine.h"
Reid Spencer551ccae2004-09-01 22:55:40 +000033#include "llvm/Support/CommandLine.h"
34#include "llvm/Support/Debug.h"
35#include "llvm/ADT/Statistic.h"
36#include "llvm/ADT/STLExtras.h"
Alkis Evlogimenos20aa4742004-09-03 18:19:51 +000037#include <algorithm>
Jeff Cohen97af7512006-12-02 02:22:01 +000038#include <cmath>
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000039using namespace llvm;
40
Dan Gohman844731a2008-05-13 00:00:25 +000041// Hidden options for help debugging.
42static cl::opt<bool> DisableReMat("disable-rematerialization",
43 cl::init(false), cl::Hidden);
Evan Cheng81a03822007-11-17 00:40:40 +000044
Dan Gohman844731a2008-05-13 00:00:25 +000045static cl::opt<bool> SplitAtBB("split-intervals-at-bb",
46 cl::init(true), cl::Hidden);
47static cl::opt<int> SplitLimit("split-limit",
48 cl::init(-1), cl::Hidden);
Evan Chengbc165e42007-08-16 07:24:22 +000049
Dan Gohman4c8f8702008-07-25 15:08:37 +000050static cl::opt<bool> EnableAggressiveRemat("aggressive-remat", cl::Hidden);
51
Owen Andersonae339ba2008-08-19 00:17:30 +000052static cl::opt<bool> EnableFastSpilling("fast-spill",
53 cl::init(false), cl::Hidden);
54
Chris Lattnercd3245a2006-12-19 22:41:21 +000055STATISTIC(numIntervals, "Number of original intervals");
56STATISTIC(numIntervalsAfter, "Number of intervals after coalescing");
Evan Cheng0cbb1162007-11-29 01:06:25 +000057STATISTIC(numFolds , "Number of loads/stores folded into instructions");
58STATISTIC(numSplits , "Number of intervals split");
Chris Lattnercd3245a2006-12-19 22:41:21 +000059
Devang Patel19974732007-05-03 01:11:54 +000060char LiveIntervals::ID = 0;
Dan Gohman844731a2008-05-13 00:00:25 +000061static RegisterPass<LiveIntervals> X("liveintervals", "Live Interval Analysis");
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000062
Chris Lattnerf7da2c72006-08-24 22:43:55 +000063void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
Dan Gohman6d69ba82008-07-25 00:02:30 +000064 AU.addRequired<AliasAnalysis>();
65 AU.addPreserved<AliasAnalysis>();
David Greene25133302007-06-08 17:18:56 +000066 AU.addPreserved<LiveVariables>();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000067 AU.addRequired<LiveVariables>();
Bill Wendling67d65bb2008-01-04 20:54:55 +000068 AU.addPreservedID(MachineLoopInfoID);
69 AU.addPreservedID(MachineDominatorsID);
Owen Andersonaa111082008-08-06 20:58:38 +000070 AU.addPreservedID(PHIEliminationID);
71 AU.addRequiredID(PHIEliminationID);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000072 AU.addRequiredID(TwoAddressInstructionPassID);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000073 MachineFunctionPass::getAnalysisUsage(AU);
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000074}
75
Chris Lattnerf7da2c72006-08-24 22:43:55 +000076void LiveIntervals::releaseMemory() {
Owen Anderson03857b22008-08-13 21:49:13 +000077 // Free the live intervals themselves.
Owen Anderson20e28392008-08-13 22:08:30 +000078 for (DenseMap<unsigned, LiveInterval*>::iterator I = r2iMap_.begin(),
Owen Anderson03857b22008-08-13 21:49:13 +000079 E = r2iMap_.end(); I != E; ++I)
80 delete I->second;
81
Evan Cheng3f32d652008-06-04 09:18:41 +000082 MBB2IdxMap.clear();
Evan Cheng4ca980e2007-10-17 02:10:22 +000083 Idx2MBBMap.clear();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000084 mi2iMap_.clear();
85 i2miMap_.clear();
86 r2iMap_.clear();
Evan Chengdd199d22007-09-06 01:07:24 +000087 // Release VNInfo memroy regions after all VNInfo objects are dtor'd.
88 VNInfoAllocator.Reset();
Evan Cheng1ed99222008-07-19 00:37:25 +000089 while (!ClonedMIs.empty()) {
90 MachineInstr *MI = ClonedMIs.back();
91 ClonedMIs.pop_back();
92 mf_->DeleteMachineInstr(MI);
93 }
Alkis Evlogimenos08cec002004-01-31 19:59:32 +000094}
95
Owen Anderson80b3ce62008-05-28 20:54:50 +000096void LiveIntervals::computeNumbering() {
97 Index2MiMap OldI2MI = i2miMap_;
Owen Anderson7fbad272008-07-23 21:37:49 +000098 std::vector<IdxMBBPair> OldI2MBB = Idx2MBBMap;
Owen Anderson80b3ce62008-05-28 20:54:50 +000099
100 Idx2MBBMap.clear();
101 MBB2IdxMap.clear();
102 mi2iMap_.clear();
103 i2miMap_.clear();
104
Owen Andersona1566f22008-07-22 22:46:49 +0000105 FunctionSize = 0;
106
Chris Lattner428b92e2006-09-15 03:57:23 +0000107 // Number MachineInstrs and MachineBasicBlocks.
108 // Initialize MBB indexes to a sentinal.
Evan Cheng549f27d32007-08-13 23:45:17 +0000109 MBB2IdxMap.resize(mf_->getNumBlockIDs(), std::make_pair(~0U,~0U));
Chris Lattner428b92e2006-09-15 03:57:23 +0000110
111 unsigned MIIndex = 0;
112 for (MachineFunction::iterator MBB = mf_->begin(), E = mf_->end();
113 MBB != E; ++MBB) {
Evan Cheng549f27d32007-08-13 23:45:17 +0000114 unsigned StartIdx = MIIndex;
Evan Cheng0c9f92e2007-02-13 01:30:55 +0000115
Owen Anderson7fbad272008-07-23 21:37:49 +0000116 // Insert an empty slot at the beginning of each block.
117 MIIndex += InstrSlots::NUM;
118 i2miMap_.push_back(0);
119
Chris Lattner428b92e2006-09-15 03:57:23 +0000120 for (MachineBasicBlock::iterator I = MBB->begin(), E = MBB->end();
121 I != E; ++I) {
122 bool inserted = mi2iMap_.insert(std::make_pair(I, MIIndex)).second;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000123 assert(inserted && "multiple MachineInstr -> index mappings");
Chris Lattner428b92e2006-09-15 03:57:23 +0000124 i2miMap_.push_back(I);
125 MIIndex += InstrSlots::NUM;
Owen Andersona1566f22008-07-22 22:46:49 +0000126 FunctionSize++;
Owen Anderson7fbad272008-07-23 21:37:49 +0000127
128 // Insert an empty slot after every instruction.
Owen Anderson1fbb4542008-06-16 16:58:24 +0000129 MIIndex += InstrSlots::NUM;
130 i2miMap_.push_back(0);
Owen Anderson35578012008-06-16 07:10:49 +0000131 }
Owen Anderson7fbad272008-07-23 21:37:49 +0000132
Owen Anderson1fbb4542008-06-16 16:58:24 +0000133 // Set the MBB2IdxMap entry for this MBB.
134 MBB2IdxMap[MBB->getNumber()] = std::make_pair(StartIdx, MIIndex - 1);
135 Idx2MBBMap.push_back(std::make_pair(StartIdx, MBB));
Chris Lattner428b92e2006-09-15 03:57:23 +0000136 }
Evan Cheng4ca980e2007-10-17 02:10:22 +0000137 std::sort(Idx2MBBMap.begin(), Idx2MBBMap.end(), Idx2MBBCompare());
Owen Anderson80b3ce62008-05-28 20:54:50 +0000138
139 if (!OldI2MI.empty())
Owen Anderson788d0412008-08-06 18:35:45 +0000140 for (iterator OI = begin(), OE = end(); OI != OE; ++OI) {
Owen Anderson03857b22008-08-13 21:49:13 +0000141 for (LiveInterval::iterator LI = OI->second->begin(),
142 LE = OI->second->end(); LI != LE; ++LI) {
Owen Anderson4b5b2092008-05-29 18:15:49 +0000143
Owen Anderson7eec0c22008-05-29 23:01:22 +0000144 // Remap the start index of the live range to the corresponding new
145 // number, or our best guess at what it _should_ correspond to if the
146 // original instruction has been erased. This is either the following
147 // instruction or its predecessor.
Owen Anderson7fbad272008-07-23 21:37:49 +0000148 unsigned index = LI->start / InstrSlots::NUM;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000149 unsigned offset = LI->start % InstrSlots::NUM;
Owen Anderson0a7615a2008-07-25 23:06:59 +0000150 if (offset == InstrSlots::LOAD) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000151 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000152 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->start);
Owen Anderson7fbad272008-07-23 21:37:49 +0000153 // Take the pair containing the index
154 std::vector<IdxMBBPair>::const_iterator J =
Owen Andersona0c032f2008-07-29 21:15:44 +0000155 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000156
Owen Anderson7fbad272008-07-23 21:37:49 +0000157 LI->start = getMBBStartIdx(J->second);
158 } else {
159 LI->start = mi2iMap_[OldI2MI[index]] + offset;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000160 }
161
162 // Remap the ending index in the same way that we remapped the start,
163 // except for the final step where we always map to the immediately
164 // following instruction.
Owen Andersond7dcbec2008-07-25 19:50:48 +0000165 index = (LI->end - 1) / InstrSlots::NUM;
Owen Anderson7fbad272008-07-23 21:37:49 +0000166 offset = LI->end % InstrSlots::NUM;
Owen Anderson9382b932008-07-30 00:22:56 +0000167 if (offset == InstrSlots::LOAD) {
168 // VReg dies at end of block.
Owen Anderson7fbad272008-07-23 21:37:49 +0000169 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000170 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->end);
Owen Anderson9382b932008-07-30 00:22:56 +0000171 --I;
Owen Anderson7fbad272008-07-23 21:37:49 +0000172
Owen Anderson9382b932008-07-30 00:22:56 +0000173 LI->end = getMBBEndIdx(I->second) + 1;
Owen Anderson4b5b2092008-05-29 18:15:49 +0000174 } else {
Owen Andersond7dcbec2008-07-25 19:50:48 +0000175 unsigned idx = index;
Owen Anderson8d0cc0a2008-07-25 21:07:13 +0000176 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
177
178 if (index != OldI2MI.size())
179 LI->end = mi2iMap_[OldI2MI[index]] + (idx == index ? offset : 0);
180 else
181 LI->end = InstrSlots::NUM * i2miMap_.size();
Owen Anderson4b5b2092008-05-29 18:15:49 +0000182 }
Owen Anderson788d0412008-08-06 18:35:45 +0000183 }
184
Owen Anderson03857b22008-08-13 21:49:13 +0000185 for (LiveInterval::vni_iterator VNI = OI->second->vni_begin(),
186 VNE = OI->second->vni_end(); VNI != VNE; ++VNI) {
Owen Anderson788d0412008-08-06 18:35:45 +0000187 VNInfo* vni = *VNI;
Owen Anderson745825f42008-05-28 22:40:08 +0000188
Owen Anderson7eec0c22008-05-29 23:01:22 +0000189 // Remap the VNInfo def index, which works the same as the
Owen Anderson788d0412008-08-06 18:35:45 +0000190 // start indices above. VN's with special sentinel defs
191 // don't need to be remapped.
Owen Anderson91292392008-07-30 17:42:47 +0000192 if (vni->def != ~0U && vni->def != ~1U) {
Owen Anderson788d0412008-08-06 18:35:45 +0000193 unsigned index = vni->def / InstrSlots::NUM;
194 unsigned offset = vni->def % InstrSlots::NUM;
Owen Anderson91292392008-07-30 17:42:47 +0000195 if (offset == InstrSlots::LOAD) {
196 std::vector<IdxMBBPair>::const_iterator I =
Owen Anderson0a7615a2008-07-25 23:06:59 +0000197 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->def);
Owen Anderson91292392008-07-30 17:42:47 +0000198 // Take the pair containing the index
199 std::vector<IdxMBBPair>::const_iterator J =
Owen Andersona0c032f2008-07-29 21:15:44 +0000200 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000201
Owen Anderson91292392008-07-30 17:42:47 +0000202 vni->def = getMBBStartIdx(J->second);
203 } else {
204 vni->def = mi2iMap_[OldI2MI[index]] + offset;
205 }
Owen Anderson7eec0c22008-05-29 23:01:22 +0000206 }
Owen Anderson745825f42008-05-28 22:40:08 +0000207
Owen Anderson7eec0c22008-05-29 23:01:22 +0000208 // Remap the VNInfo kill indices, which works the same as
209 // the end indices above.
Owen Anderson4b5b2092008-05-29 18:15:49 +0000210 for (size_t i = 0; i < vni->kills.size(); ++i) {
Owen Anderson9382b932008-07-30 00:22:56 +0000211 // PHI kills don't need to be remapped.
212 if (!vni->kills[i]) continue;
213
Owen Anderson788d0412008-08-06 18:35:45 +0000214 unsigned index = (vni->kills[i]-1) / InstrSlots::NUM;
215 unsigned offset = vni->kills[i] % InstrSlots::NUM;
216 if (offset == InstrSlots::STORE) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000217 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000218 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->kills[i]);
Owen Anderson9382b932008-07-30 00:22:56 +0000219 --I;
Owen Anderson7fbad272008-07-23 21:37:49 +0000220
Owen Anderson788d0412008-08-06 18:35:45 +0000221 vni->kills[i] = getMBBEndIdx(I->second);
Owen Anderson7fbad272008-07-23 21:37:49 +0000222 } else {
Owen Andersond7dcbec2008-07-25 19:50:48 +0000223 unsigned idx = index;
Owen Anderson8d0cc0a2008-07-25 21:07:13 +0000224 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
225
226 if (index != OldI2MI.size())
227 vni->kills[i] = mi2iMap_[OldI2MI[index]] +
228 (idx == index ? offset : 0);
229 else
230 vni->kills[i] = InstrSlots::NUM * i2miMap_.size();
Owen Anderson7eec0c22008-05-29 23:01:22 +0000231 }
Owen Anderson4b5b2092008-05-29 18:15:49 +0000232 }
Owen Anderson80b3ce62008-05-28 20:54:50 +0000233 }
Owen Anderson788d0412008-08-06 18:35:45 +0000234 }
Owen Anderson80b3ce62008-05-28 20:54:50 +0000235}
Alkis Evlogimenosd6e40a62004-01-14 10:44:29 +0000236
Owen Anderson80b3ce62008-05-28 20:54:50 +0000237/// runOnMachineFunction - Register allocate the whole function
238///
239bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
240 mf_ = &fn;
241 mri_ = &mf_->getRegInfo();
242 tm_ = &fn.getTarget();
243 tri_ = tm_->getRegisterInfo();
244 tii_ = tm_->getInstrInfo();
Dan Gohman6d69ba82008-07-25 00:02:30 +0000245 aa_ = &getAnalysis<AliasAnalysis>();
Owen Anderson80b3ce62008-05-28 20:54:50 +0000246 lv_ = &getAnalysis<LiveVariables>();
247 allocatableRegs_ = tri_->getAllocatableSet(fn);
248
249 computeNumbering();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000250 computeIntervals();
Alkis Evlogimenos843b1602004-02-15 10:24:21 +0000251
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000252 numIntervals += getNumIntervals();
253
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000254 DOUT << "********** INTERVALS **********\n";
255 for (iterator I = begin(), E = end(); I != E; ++I) {
Owen Anderson03857b22008-08-13 21:49:13 +0000256 I->second->print(DOUT, tri_);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000257 DOUT << "\n";
258 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000259
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000260 numIntervalsAfter += getNumIntervals();
Chris Lattner70ca3582004-09-30 15:59:17 +0000261 DEBUG(dump());
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000262 return true;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000263}
264
Chris Lattner70ca3582004-09-30 15:59:17 +0000265/// print - Implement the dump method.
Reid Spencerce9653c2004-12-07 04:03:45 +0000266void LiveIntervals::print(std::ostream &O, const Module* ) const {
Chris Lattner70ca3582004-09-30 15:59:17 +0000267 O << "********** INTERVALS **********\n";
Chris Lattner8e7a7092005-07-27 23:03:38 +0000268 for (const_iterator I = begin(), E = end(); I != E; ++I) {
Owen Anderson03857b22008-08-13 21:49:13 +0000269 I->second->print(O, tri_);
Evan Cheng3f32d652008-06-04 09:18:41 +0000270 O << "\n";
Chris Lattner8e7a7092005-07-27 23:03:38 +0000271 }
Chris Lattner70ca3582004-09-30 15:59:17 +0000272
273 O << "********** MACHINEINSTRS **********\n";
274 for (MachineFunction::iterator mbbi = mf_->begin(), mbbe = mf_->end();
275 mbbi != mbbe; ++mbbi) {
276 O << ((Value*)mbbi->getBasicBlock())->getName() << ":\n";
277 for (MachineBasicBlock::iterator mii = mbbi->begin(),
278 mie = mbbi->end(); mii != mie; ++mii) {
Chris Lattner477e4552004-09-30 16:10:45 +0000279 O << getInstructionIndex(mii) << '\t' << *mii;
Chris Lattner70ca3582004-09-30 15:59:17 +0000280 }
281 }
282}
283
Evan Chengc92da382007-11-03 07:20:12 +0000284/// conflictsWithPhysRegDef - Returns true if the specified register
285/// is defined during the duration of the specified interval.
286bool LiveIntervals::conflictsWithPhysRegDef(const LiveInterval &li,
287 VirtRegMap &vrm, unsigned reg) {
288 for (LiveInterval::Ranges::const_iterator
289 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
290 for (unsigned index = getBaseIndex(I->start),
291 end = getBaseIndex(I->end-1) + InstrSlots::NUM; index != end;
292 index += InstrSlots::NUM) {
293 // skip deleted instructions
294 while (index != end && !getInstructionFromIndex(index))
295 index += InstrSlots::NUM;
296 if (index == end) break;
297
298 MachineInstr *MI = getInstructionFromIndex(index);
Evan Cheng5d446262007-11-15 08:13:29 +0000299 unsigned SrcReg, DstReg;
300 if (tii_->isMoveInstr(*MI, SrcReg, DstReg))
301 if (SrcReg == li.reg || DstReg == li.reg)
302 continue;
Evan Chengc92da382007-11-03 07:20:12 +0000303 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
304 MachineOperand& mop = MI->getOperand(i);
Evan Cheng5d446262007-11-15 08:13:29 +0000305 if (!mop.isRegister())
Evan Chengc92da382007-11-03 07:20:12 +0000306 continue;
307 unsigned PhysReg = mop.getReg();
Evan Cheng5d446262007-11-15 08:13:29 +0000308 if (PhysReg == 0 || PhysReg == li.reg)
Evan Chengc92da382007-11-03 07:20:12 +0000309 continue;
Dan Gohman6f0d0242008-02-10 18:45:23 +0000310 if (TargetRegisterInfo::isVirtualRegister(PhysReg)) {
Evan Cheng5d446262007-11-15 08:13:29 +0000311 if (!vrm.hasPhys(PhysReg))
312 continue;
Evan Chengc92da382007-11-03 07:20:12 +0000313 PhysReg = vrm.getPhys(PhysReg);
Evan Cheng5d446262007-11-15 08:13:29 +0000314 }
Dan Gohman6f0d0242008-02-10 18:45:23 +0000315 if (PhysReg && tri_->regsOverlap(PhysReg, reg))
Evan Chengc92da382007-11-03 07:20:12 +0000316 return true;
317 }
318 }
319 }
320
321 return false;
322}
323
Evan Cheng549f27d32007-08-13 23:45:17 +0000324void LiveIntervals::printRegName(unsigned reg) const {
Dan Gohman6f0d0242008-02-10 18:45:23 +0000325 if (TargetRegisterInfo::isPhysicalRegister(reg))
Bill Wendlinge6d088a2008-02-26 21:47:57 +0000326 cerr << tri_->getName(reg);
Evan Cheng549f27d32007-08-13 23:45:17 +0000327 else
328 cerr << "%reg" << reg;
329}
330
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000331void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000332 MachineBasicBlock::iterator mi,
Owen Anderson6b098de2008-06-25 23:39:39 +0000333 unsigned MIIdx, MachineOperand& MO,
Evan Chengef0732d2008-07-10 07:35:43 +0000334 unsigned MOIdx,
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000335 LiveInterval &interval) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000336 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000337 LiveVariables::VarInfo& vi = lv_->getVarInfo(interval.reg);
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000338
Evan Cheng419852c2008-04-03 16:39:43 +0000339 if (mi->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
340 DOUT << "is a implicit_def\n";
341 return;
342 }
343
Alkis Evlogimenos70651572004-08-04 09:46:56 +0000344 // Virtual registers may be defined multiple times (due to phi
345 // elimination and 2-addr elimination). Much of what we do only has to be
346 // done once for the vreg. We use an empty interval to detect the first
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000347 // time we see a vreg.
348 if (interval.empty()) {
349 // Get the Idx of the defining instructions.
Chris Lattner6b128bd2006-09-03 08:07:11 +0000350 unsigned defIndex = getDefIndex(MIIdx);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000351 VNInfo *ValNo;
Evan Chengc8d044e2008-02-15 18:24:29 +0000352 MachineInstr *CopyMI = NULL;
Chris Lattner91725b72006-08-31 05:54:43 +0000353 unsigned SrcReg, DstReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000354 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000355 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Chengc8d044e2008-02-15 18:24:29 +0000356 tii_->isMoveInstr(*mi, SrcReg, DstReg))
357 CopyMI = mi;
358 ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000359
360 assert(ValNo->id == 0 && "First value in interval is not 0?");
Chris Lattner7ac2d312004-07-24 02:59:07 +0000361
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000362 // Loop over all of the blocks that the vreg is defined in. There are
363 // two cases we have to handle here. The most common case is a vreg
364 // whose lifetime is contained within a basic block. In this case there
365 // will be a single kill, in MBB, which comes after the definition.
366 if (vi.Kills.size() == 1 && vi.Kills[0]->getParent() == mbb) {
367 // FIXME: what about dead vars?
368 unsigned killIdx;
369 if (vi.Kills[0] != mi)
370 killIdx = getUseIndex(getInstructionIndex(vi.Kills[0]))+1;
371 else
372 killIdx = defIndex+1;
Chris Lattner6097d132004-07-19 02:15:56 +0000373
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000374 // If the kill happens after the definition, we have an intra-block
375 // live range.
376 if (killIdx > defIndex) {
Evan Cheng61de82d2007-02-15 05:59:24 +0000377 assert(vi.AliveBlocks.none() &&
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000378 "Shouldn't be alive across any blocks!");
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000379 LiveRange LR(defIndex, killIdx, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000380 interval.addRange(LR);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000381 DOUT << " +" << LR << "\n";
Evan Chengf3bb2e62007-09-05 21:46:51 +0000382 interval.addKill(ValNo, killIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000383 return;
384 }
Alkis Evlogimenosdd2cc652003-12-18 08:48:48 +0000385 }
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000386
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000387 // The other case we handle is when a virtual register lives to the end
388 // of the defining block, potentially live across some blocks, then is
389 // live into some number of blocks, but gets killed. Start by adding a
390 // range that goes from this definition to the end of the defining block.
Owen Anderson7fbad272008-07-23 21:37:49 +0000391 LiveRange NewLR(defIndex, getMBBEndIdx(mbb)+1, ValNo);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000392 DOUT << " +" << NewLR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000393 interval.addRange(NewLR);
394
395 // Iterate over all of the blocks that the variable is completely
396 // live in, adding [insrtIndex(begin), instrIndex(end)+4) to the
397 // live interval.
398 for (unsigned i = 0, e = vi.AliveBlocks.size(); i != e; ++i) {
399 if (vi.AliveBlocks[i]) {
Owen Anderson31ec8412008-06-16 19:32:40 +0000400 LiveRange LR(getMBBStartIdx(i),
Evan Chengf26e8552008-06-17 20:13:36 +0000401 getMBBEndIdx(i)+1, // MBB ends at -1.
Owen Anderson31ec8412008-06-16 19:32:40 +0000402 ValNo);
403 interval.addRange(LR);
404 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000405 }
406 }
407
408 // Finally, this virtual register is live from the start of any killing
409 // block to the 'use' slot of the killing instruction.
410 for (unsigned i = 0, e = vi.Kills.size(); i != e; ++i) {
411 MachineInstr *Kill = vi.Kills[i];
Evan Cheng8df78602007-08-08 03:00:28 +0000412 unsigned killIdx = getUseIndex(getInstructionIndex(Kill))+1;
Chris Lattner428b92e2006-09-15 03:57:23 +0000413 LiveRange LR(getMBBStartIdx(Kill->getParent()),
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000414 killIdx, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000415 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000416 interval.addKill(ValNo, killIdx);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000417 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000418 }
419
420 } else {
421 // If this is the second time we see a virtual register definition, it
422 // must be due to phi elimination or two addr elimination. If this is
Evan Chengbf105c82006-11-03 03:04:46 +0000423 // the result of two address elimination, then the vreg is one of the
424 // def-and-use register operand.
Evan Chengef0732d2008-07-10 07:35:43 +0000425 if (mi->isRegReDefinedByTwoAddr(interval.reg, MOIdx)) {
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000426 // If this is a two-address definition, then we have already processed
427 // the live range. The only problem is that we didn't realize there
428 // are actually two values in the live interval. Because of this we
429 // need to take the LiveRegion that defines this register and split it
430 // into two values.
Evan Chenga07cec92008-01-10 08:22:10 +0000431 assert(interval.containsOneValue());
432 unsigned DefIndex = getDefIndex(interval.getValNumInfo(0)->def);
Chris Lattner6b128bd2006-09-03 08:07:11 +0000433 unsigned RedefIndex = getDefIndex(MIIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000434
Evan Cheng4f8ff162007-08-11 00:59:19 +0000435 const LiveRange *OldLR = interval.getLiveRangeContaining(RedefIndex-1);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000436 VNInfo *OldValNo = OldLR->valno;
Evan Cheng4f8ff162007-08-11 00:59:19 +0000437
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000438 // Delete the initial value, which should be short and continuous,
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000439 // because the 2-addr copy must be in the same MBB as the redef.
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000440 interval.removeRange(DefIndex, RedefIndex);
Alkis Evlogimenos70651572004-08-04 09:46:56 +0000441
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000442 // Two-address vregs should always only be redefined once. This means
443 // that at this point, there should be exactly one value number in it.
444 assert(interval.containsOneValue() && "Unexpected 2-addr liveint!");
445
Chris Lattner91725b72006-08-31 05:54:43 +0000446 // The new value number (#1) is defined by the instruction we claimed
447 // defined value #0.
Evan Chengc8d044e2008-02-15 18:24:29 +0000448 VNInfo *ValNo = interval.getNextValue(OldValNo->def, OldValNo->copy,
449 VNInfoAllocator);
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000450
Chris Lattner91725b72006-08-31 05:54:43 +0000451 // Value#0 is now defined by the 2-addr instruction.
Evan Chengc8d044e2008-02-15 18:24:29 +0000452 OldValNo->def = RedefIndex;
453 OldValNo->copy = 0;
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000454
455 // Add the new live interval which replaces the range for the input copy.
456 LiveRange LR(DefIndex, RedefIndex, ValNo);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000457 DOUT << " replace range with " << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000458 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000459 interval.addKill(ValNo, RedefIndex);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000460
461 // If this redefinition is dead, we need to add a dummy unit live
462 // range covering the def slot.
Owen Anderson6b098de2008-06-25 23:39:39 +0000463 if (MO.isDead())
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000464 interval.addRange(LiveRange(RedefIndex, RedefIndex+1, OldValNo));
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000465
Evan Cheng56fdd7a2007-03-15 21:19:28 +0000466 DOUT << " RESULT: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +0000467 interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000468
469 } else {
470 // Otherwise, this must be because of phi elimination. If this is the
471 // first redefinition of the vreg that we have seen, go back and change
472 // the live range in the PHI block to be a different value number.
473 if (interval.containsOneValue()) {
474 assert(vi.Kills.size() == 1 &&
475 "PHI elimination vreg should have one kill, the PHI itself!");
476
477 // Remove the old range that we now know has an incorrect number.
Evan Chengf3bb2e62007-09-05 21:46:51 +0000478 VNInfo *VNI = interval.getValNumInfo(0);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000479 MachineInstr *Killer = vi.Kills[0];
Chris Lattner428b92e2006-09-15 03:57:23 +0000480 unsigned Start = getMBBStartIdx(Killer->getParent());
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000481 unsigned End = getUseIndex(getInstructionIndex(Killer))+1;
Evan Cheng56fdd7a2007-03-15 21:19:28 +0000482 DOUT << " Removing [" << Start << "," << End << "] from: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +0000483 interval.print(DOUT, tri_); DOUT << "\n";
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000484 interval.removeRange(Start, End);
Evan Chengc3fc7d92007-11-29 09:49:23 +0000485 VNI->hasPHIKill = true;
Dan Gohman6f0d0242008-02-10 18:45:23 +0000486 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000487
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000488 // Replace the interval with one of a NEW value number. Note that this
489 // value number isn't actually defined by an instruction, weird huh? :)
Evan Chengf3bb2e62007-09-05 21:46:51 +0000490 LiveRange LR(Start, End, interval.getNextValue(~0, 0, VNInfoAllocator));
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000491 DOUT << " replace range with " << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000492 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000493 interval.addKill(LR.valno, End);
Dan Gohman6f0d0242008-02-10 18:45:23 +0000494 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000495 }
496
497 // In the case of PHI elimination, each variable definition is only
498 // live until the end of the block. We've already taken care of the
499 // rest of the live range.
Chris Lattner6b128bd2006-09-03 08:07:11 +0000500 unsigned defIndex = getDefIndex(MIIdx);
Chris Lattner91725b72006-08-31 05:54:43 +0000501
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000502 VNInfo *ValNo;
Evan Chengc8d044e2008-02-15 18:24:29 +0000503 MachineInstr *CopyMI = NULL;
Chris Lattner91725b72006-08-31 05:54:43 +0000504 unsigned SrcReg, DstReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000505 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000506 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Chengc8d044e2008-02-15 18:24:29 +0000507 tii_->isMoveInstr(*mi, SrcReg, DstReg))
508 CopyMI = mi;
509 ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
Chris Lattner91725b72006-08-31 05:54:43 +0000510
Owen Anderson7fbad272008-07-23 21:37:49 +0000511 unsigned killIndex = getMBBEndIdx(mbb) + 1;
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000512 LiveRange LR(defIndex, killIndex, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000513 interval.addRange(LR);
Evan Chengc3fc7d92007-11-29 09:49:23 +0000514 interval.addKill(ValNo, killIndex);
515 ValNo->hasPHIKill = true;
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000516 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000517 }
518 }
519
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000520 DOUT << '\n';
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000521}
522
Chris Lattnerf35fef72004-07-23 21:24:19 +0000523void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000524 MachineBasicBlock::iterator mi,
Chris Lattner6b128bd2006-09-03 08:07:11 +0000525 unsigned MIIdx,
Owen Anderson6b098de2008-06-25 23:39:39 +0000526 MachineOperand& MO,
Chris Lattner91725b72006-08-31 05:54:43 +0000527 LiveInterval &interval,
Evan Chengc8d044e2008-02-15 18:24:29 +0000528 MachineInstr *CopyMI) {
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000529 // A physical register cannot be live across basic block, so its
530 // lifetime must end somewhere in its defining basic block.
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000531 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
Alkis Evlogimenos02ba13c2004-01-31 23:13:30 +0000532
Chris Lattner6b128bd2006-09-03 08:07:11 +0000533 unsigned baseIndex = MIIdx;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000534 unsigned start = getDefIndex(baseIndex);
535 unsigned end = start;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000536
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000537 // If it is not used after definition, it is considered dead at
538 // the instruction defining it. Hence its interval is:
539 // [defSlot(def), defSlot(def)+1)
Owen Anderson6b098de2008-06-25 23:39:39 +0000540 if (MO.isDead()) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000541 DOUT << " dead";
Chris Lattnerab4b66d2005-08-23 22:51:41 +0000542 end = getDefIndex(start) + 1;
543 goto exit;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000544 }
545
546 // If it is not dead on definition, it must be killed by a
547 // subsequent instruction. Hence its interval is:
548 // [defSlot(def), useSlot(kill)+1)
Owen Anderson7fbad272008-07-23 21:37:49 +0000549 baseIndex += InstrSlots::NUM;
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000550 while (++mi != MBB->end()) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000551 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
552 getInstructionFromIndex(baseIndex) == 0)
553 baseIndex += InstrSlots::NUM;
Evan Cheng6130f662008-03-05 00:59:57 +0000554 if (mi->killsRegister(interval.reg, tri_)) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000555 DOUT << " killed";
Chris Lattnerab4b66d2005-08-23 22:51:41 +0000556 end = getUseIndex(baseIndex) + 1;
557 goto exit;
Evan Cheng6130f662008-03-05 00:59:57 +0000558 } else if (mi->modifiesRegister(interval.reg, tri_)) {
Evan Cheng9a1956a2006-11-15 20:54:11 +0000559 // Another instruction redefines the register before it is ever read.
560 // Then the register is essentially dead at the instruction that defines
561 // it. Hence its interval is:
562 // [defSlot(def), defSlot(def)+1)
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000563 DOUT << " dead";
Evan Cheng9a1956a2006-11-15 20:54:11 +0000564 end = getDefIndex(start) + 1;
565 goto exit;
Alkis Evlogimenosaf254732004-01-13 22:26:14 +0000566 }
Owen Anderson7fbad272008-07-23 21:37:49 +0000567
568 baseIndex += InstrSlots::NUM;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000569 }
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000570
571 // The only case we should have a dead physreg here without a killing or
572 // instruction where we know it's dead is if it is live-in to the function
573 // and never used.
Evan Chengc8d044e2008-02-15 18:24:29 +0000574 assert(!CopyMI && "physreg was not killed in defining block!");
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000575 end = getDefIndex(start) + 1; // It's dead.
Alkis Evlogimenos02ba13c2004-01-31 23:13:30 +0000576
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000577exit:
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000578 assert(start < end && "did not find end of interval?");
Chris Lattnerf768bba2005-03-09 23:05:19 +0000579
Evan Cheng24a3cc42007-04-25 07:30:23 +0000580 // Already exists? Extend old live interval.
581 LiveInterval::iterator OldLR = interval.FindLiveRangeContaining(start);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000582 VNInfo *ValNo = (OldLR != interval.end())
Evan Chengc8d044e2008-02-15 18:24:29 +0000583 ? OldLR->valno : interval.getNextValue(start, CopyMI, VNInfoAllocator);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000584 LiveRange LR(start, end, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000585 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000586 interval.addKill(LR.valno, end);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000587 DOUT << " +" << LR << '\n';
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000588}
589
Chris Lattnerf35fef72004-07-23 21:24:19 +0000590void LiveIntervals::handleRegisterDef(MachineBasicBlock *MBB,
591 MachineBasicBlock::iterator MI,
Chris Lattner6b128bd2006-09-03 08:07:11 +0000592 unsigned MIIdx,
Evan Chengef0732d2008-07-10 07:35:43 +0000593 MachineOperand& MO,
594 unsigned MOIdx) {
Owen Anderson6b098de2008-06-25 23:39:39 +0000595 if (TargetRegisterInfo::isVirtualRegister(MO.getReg()))
Evan Chengef0732d2008-07-10 07:35:43 +0000596 handleVirtualRegisterDef(MBB, MI, MIIdx, MO, MOIdx,
Owen Anderson6b098de2008-06-25 23:39:39 +0000597 getOrCreateInterval(MO.getReg()));
598 else if (allocatableRegs_[MO.getReg()]) {
Evan Chengc8d044e2008-02-15 18:24:29 +0000599 MachineInstr *CopyMI = NULL;
Chris Lattner91725b72006-08-31 05:54:43 +0000600 unsigned SrcReg, DstReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000601 if (MI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000602 MI->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Evan Chengc8d044e2008-02-15 18:24:29 +0000603 tii_->isMoveInstr(*MI, SrcReg, DstReg))
604 CopyMI = MI;
Owen Anderson6b098de2008-06-25 23:39:39 +0000605 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
606 getOrCreateInterval(MO.getReg()), CopyMI);
Evan Cheng24a3cc42007-04-25 07:30:23 +0000607 // Def of a register also defines its sub-registers.
Owen Anderson6b098de2008-06-25 23:39:39 +0000608 for (const unsigned* AS = tri_->getSubRegisters(MO.getReg()); *AS; ++AS)
Evan Cheng6130f662008-03-05 00:59:57 +0000609 // If MI also modifies the sub-register explicitly, avoid processing it
610 // more than once. Do not pass in TRI here so it checks for exact match.
611 if (!MI->modifiesRegister(*AS))
Owen Anderson6b098de2008-06-25 23:39:39 +0000612 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
613 getOrCreateInterval(*AS), 0);
Chris Lattnerf35fef72004-07-23 21:24:19 +0000614 }
Alkis Evlogimenos4d46e1e2004-01-31 14:37:41 +0000615}
616
Evan Chengb371f452007-02-19 21:49:54 +0000617void LiveIntervals::handleLiveInRegister(MachineBasicBlock *MBB,
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000618 unsigned MIIdx,
Evan Cheng24a3cc42007-04-25 07:30:23 +0000619 LiveInterval &interval, bool isAlias) {
Evan Chengb371f452007-02-19 21:49:54 +0000620 DOUT << "\t\tlivein register: "; DEBUG(printRegName(interval.reg));
621
622 // Look for kills, if it reaches a def before it's killed, then it shouldn't
623 // be considered a livein.
624 MachineBasicBlock::iterator mi = MBB->begin();
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000625 unsigned baseIndex = MIIdx;
626 unsigned start = baseIndex;
Owen Anderson99500ae2008-09-15 22:00:38 +0000627 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
628 getInstructionFromIndex(baseIndex) == 0)
629 baseIndex += InstrSlots::NUM;
630 unsigned end = baseIndex;
631
Evan Chengb371f452007-02-19 21:49:54 +0000632 while (mi != MBB->end()) {
Evan Cheng6130f662008-03-05 00:59:57 +0000633 if (mi->killsRegister(interval.reg, tri_)) {
Evan Chengb371f452007-02-19 21:49:54 +0000634 DOUT << " killed";
635 end = getUseIndex(baseIndex) + 1;
636 goto exit;
Evan Cheng6130f662008-03-05 00:59:57 +0000637 } else if (mi->modifiesRegister(interval.reg, tri_)) {
Evan Chengb371f452007-02-19 21:49:54 +0000638 // Another instruction redefines the register before it is ever read.
639 // Then the register is essentially dead at the instruction that defines
640 // it. Hence its interval is:
641 // [defSlot(def), defSlot(def)+1)
642 DOUT << " dead";
643 end = getDefIndex(start) + 1;
644 goto exit;
645 }
646
647 baseIndex += InstrSlots::NUM;
Owen Anderson7fbad272008-07-23 21:37:49 +0000648 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
649 getInstructionFromIndex(baseIndex) == 0)
650 baseIndex += InstrSlots::NUM;
Evan Chengb371f452007-02-19 21:49:54 +0000651 ++mi;
652 }
653
654exit:
Evan Cheng75611fb2007-06-27 01:16:36 +0000655 // Live-in register might not be used at all.
656 if (end == MIIdx) {
Evan Cheng292da942007-06-27 18:47:28 +0000657 if (isAlias) {
658 DOUT << " dead";
Evan Cheng75611fb2007-06-27 01:16:36 +0000659 end = getDefIndex(MIIdx) + 1;
Evan Cheng292da942007-06-27 18:47:28 +0000660 } else {
661 DOUT << " live through";
662 end = baseIndex;
663 }
Evan Cheng24a3cc42007-04-25 07:30:23 +0000664 }
665
Owen Anderson99500ae2008-09-15 22:00:38 +0000666 LiveRange LR(start, end, interval.getNextValue(~0U, 0, VNInfoAllocator));
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000667 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000668 interval.addKill(LR.valno, end);
Evan Cheng24c2e5c2007-08-08 07:03:29 +0000669 DOUT << " +" << LR << '\n';
Evan Chengb371f452007-02-19 21:49:54 +0000670}
671
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000672/// computeIntervals - computes the live intervals for virtual
Alkis Evlogimenos4d46e1e2004-01-31 14:37:41 +0000673/// registers. for some ordering of the machine instructions [1,N] a
Alkis Evlogimenos08cec002004-01-31 19:59:32 +0000674/// live interval is an interval [i, j) where 1 <= i <= j < N for
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000675/// which a variable is live
Dale Johannesen91aac102008-09-17 21:13:11 +0000676void LiveIntervals::computeIntervals() {
677 AsmsThatEarlyClobber.clear();
678 AsmsWithEarlyClobberConflict.clear();
679
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000680 DOUT << "********** COMPUTING LIVE INTERVALS **********\n"
681 << "********** Function: "
682 << ((Value*)mf_->getFunction())->getName() << '\n';
Chris Lattner6b128bd2006-09-03 08:07:11 +0000683 // Track the index of the current machine instr.
684 unsigned MIIndex = 0;
Owen Anderson7fbad272008-07-23 21:37:49 +0000685
Chris Lattner428b92e2006-09-15 03:57:23 +0000686 for (MachineFunction::iterator MBBI = mf_->begin(), E = mf_->end();
687 MBBI != E; ++MBBI) {
688 MachineBasicBlock *MBB = MBBI;
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000689 DOUT << ((Value*)MBB->getBasicBlock())->getName() << ":\n";
Alkis Evlogimenos6b4edba2003-12-21 20:19:10 +0000690
Chris Lattner428b92e2006-09-15 03:57:23 +0000691 MachineBasicBlock::iterator MI = MBB->begin(), miEnd = MBB->end();
Evan Cheng0c9f92e2007-02-13 01:30:55 +0000692
Dan Gohmancb406c22007-10-03 19:26:29 +0000693 // Create intervals for live-ins to this BB first.
694 for (MachineBasicBlock::const_livein_iterator LI = MBB->livein_begin(),
695 LE = MBB->livein_end(); LI != LE; ++LI) {
696 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*LI));
697 // Multiple live-ins can alias the same register.
Dan Gohman6f0d0242008-02-10 18:45:23 +0000698 for (const unsigned* AS = tri_->getSubRegisters(*LI); *AS; ++AS)
Dan Gohmancb406c22007-10-03 19:26:29 +0000699 if (!hasInterval(*AS))
700 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*AS),
701 true);
Chris Lattnerdffb2e82006-09-04 18:27:40 +0000702 }
703
Owen Anderson99500ae2008-09-15 22:00:38 +0000704 // Skip over empty initial indices.
705 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
706 getInstructionFromIndex(MIIndex) == 0)
707 MIIndex += InstrSlots::NUM;
708
Chris Lattner428b92e2006-09-15 03:57:23 +0000709 for (; MI != miEnd; ++MI) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000710 DOUT << MIIndex << "\t" << *MI;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000711
Evan Cheng438f7bc2006-11-10 08:43:01 +0000712 // Handle defs.
Chris Lattner428b92e2006-09-15 03:57:23 +0000713 for (int i = MI->getNumOperands() - 1; i >= 0; --i) {
714 MachineOperand &MO = MI->getOperand(i);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000715 // handle register defs - build intervals
Dale Johannesen91aac102008-09-17 21:13:11 +0000716 if (MO.isRegister() && MO.getReg() && MO.isDef()) {
Evan Chengef0732d2008-07-10 07:35:43 +0000717 handleRegisterDef(MBB, MI, MIIndex, MO, i);
Dale Johannesen91aac102008-09-17 21:13:11 +0000718 if (MO.isEarlyClobber()) {
719 AsmsThatEarlyClobber.insert(std::make_pair(MO.getReg(), MI));
720 }
721 }
722 if (MO.isRegister() && !MO.isDef() &&
723 MO.getReg() && TargetRegisterInfo::isVirtualRegister(MO.getReg()) &&
724 MO.overlapsEarlyClobber()) {
725 AsmsWithEarlyClobberConflict.insert(std::make_pair(MO.getReg(), MI));
726 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000727 }
Chris Lattner6b128bd2006-09-03 08:07:11 +0000728
729 MIIndex += InstrSlots::NUM;
Owen Anderson7fbad272008-07-23 21:37:49 +0000730
731 // Skip over empty indices.
732 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
733 getInstructionFromIndex(MIIndex) == 0)
734 MIIndex += InstrSlots::NUM;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000735 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000736 }
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000737}
Alkis Evlogimenosb27ef242003-12-05 10:38:28 +0000738
Evan Cheng4ca980e2007-10-17 02:10:22 +0000739bool LiveIntervals::findLiveInMBBs(const LiveRange &LR,
Evan Chenga5bfc972007-10-17 06:53:44 +0000740 SmallVectorImpl<MachineBasicBlock*> &MBBs) const {
Evan Cheng4ca980e2007-10-17 02:10:22 +0000741 std::vector<IdxMBBPair>::const_iterator I =
742 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), LR.start);
743
744 bool ResVal = false;
745 while (I != Idx2MBBMap.end()) {
746 if (LR.end <= I->first)
747 break;
748 MBBs.push_back(I->second);
749 ResVal = true;
750 ++I;
751 }
752 return ResVal;
753}
754
Dale Johannesen91aac102008-09-17 21:13:11 +0000755/// noEarlyclobberConflict - see whether virtual reg VReg has a conflict with
756/// hard reg HReg because of earlyclobbers.
757///
758/// Earlyclobber operands may not be assigned the same register as
759/// each other, or as earlyclobber-conflict operands (i.e. those that
760/// are non-earlyclobbered inputs to an asm that also has earlyclobbers).
761///
762/// Thus there are two cases to check for:
763/// 1. VReg is an earlyclobber-conflict register and HReg is an earlyclobber
764/// register in some asm that also has VReg as an input.
765/// 2. VReg is an earlyclobber register and HReg is an earlyclobber-conflict
766/// input elsewhere in some asm.
767/// In both cases HReg can be assigned by the user, or assigned early in
768/// register allocation.
769///
770/// Dropping the distinction between earlyclobber and earlyclobber-conflict,
771/// keeping only one multimap, looks promising, but two earlyclobber-conflict
772/// operands may be assigned the same register if they happen to contain the
773/// same value, and that implementation would prevent this.
774///
775bool LiveIntervals::noEarlyclobberConflict(unsigned VReg, VirtRegMap &vrm,
776 unsigned HReg) {
777 typedef std::multimap<unsigned, MachineInstr*>::iterator It;
778
779 // Short circuit the most common case.
780 if (AsmsWithEarlyClobberConflict.size()!=0) {
781 std::pair<It, It> x = AsmsWithEarlyClobberConflict.equal_range(VReg);
782 for (It I = x.first; I!=x.second; I++) {
783 MachineInstr* MI = I->second;
784 for (int i = MI->getNumOperands() - 1; i >= 0; --i) {
785 MachineOperand &MO = MI->getOperand(i);
786 if (MO.isRegister() && MO.isEarlyClobber()) {
787 unsigned PhysReg = MO.getReg();
788 if (PhysReg && TargetRegisterInfo::isVirtualRegister(PhysReg)) {
789 if (!vrm.hasPhys(PhysReg))
790 continue;
791 PhysReg = vrm.getPhys(PhysReg);
792 }
793 if (PhysReg==HReg)
794 return false;
795 }
796 }
797 }
798 }
799 // Short circuit the most common case.
800 if (AsmsThatEarlyClobber.size()!=0) {
801 std::pair<It, It> x = AsmsThatEarlyClobber.equal_range(VReg);
802 for (It I = x.first; I!=x.second; I++) {
803 MachineInstr* MI = I->second;
804 for (int i = MI->getNumOperands() - 1; i >= 0; --i) {
805 MachineOperand &MO = MI->getOperand(i);
806 if (MO.isRegister() && MO.overlapsEarlyClobber()) {
807 unsigned PhysReg = MO.getReg();
808 if (PhysReg && TargetRegisterInfo::isVirtualRegister(PhysReg)) {
809 if (!vrm.hasPhys(PhysReg))
810 continue;
811 PhysReg = vrm.getPhys(PhysReg);
812 }
813 if (PhysReg==HReg)
814 return false;
815 }
816 }
817 }
818 }
819 return true;
820}
Evan Cheng4ca980e2007-10-17 02:10:22 +0000821
Owen Anderson03857b22008-08-13 21:49:13 +0000822LiveInterval* LiveIntervals::createInterval(unsigned reg) {
Dan Gohman6f0d0242008-02-10 18:45:23 +0000823 float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ?
Jim Laskey7902c752006-11-07 12:25:45 +0000824 HUGE_VALF : 0.0F;
Owen Anderson03857b22008-08-13 21:49:13 +0000825 return new LiveInterval(reg, Weight);
Alkis Evlogimenos9a8b4902004-04-09 18:07:57 +0000826}
Evan Chengf2fbca62007-11-12 06:35:08 +0000827
Evan Chengc8d044e2008-02-15 18:24:29 +0000828/// getVNInfoSourceReg - Helper function that parses the specified VNInfo
829/// copy field and returns the source register that defines it.
830unsigned LiveIntervals::getVNInfoSourceReg(const VNInfo *VNI) const {
831 if (!VNI->copy)
832 return 0;
833
834 if (VNI->copy->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG)
835 return VNI->copy->getOperand(1).getReg();
Evan Cheng7e073ba2008-04-09 20:57:25 +0000836 if (VNI->copy->getOpcode() == TargetInstrInfo::INSERT_SUBREG)
837 return VNI->copy->getOperand(2).getReg();
Evan Chengc8d044e2008-02-15 18:24:29 +0000838 unsigned SrcReg, DstReg;
839 if (tii_->isMoveInstr(*VNI->copy, SrcReg, DstReg))
840 return SrcReg;
841 assert(0 && "Unrecognized copy instruction!");
842 return 0;
843}
Evan Chengf2fbca62007-11-12 06:35:08 +0000844
845//===----------------------------------------------------------------------===//
846// Register allocator hooks.
847//
848
Evan Chengd70dbb52008-02-22 09:24:50 +0000849/// getReMatImplicitUse - If the remat definition MI has one (for now, we only
850/// allow one) virtual register operand, then its uses are implicitly using
851/// the register. Returns the virtual register.
852unsigned LiveIntervals::getReMatImplicitUse(const LiveInterval &li,
853 MachineInstr *MI) const {
854 unsigned RegOp = 0;
855 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
856 MachineOperand &MO = MI->getOperand(i);
857 if (!MO.isRegister() || !MO.isUse())
858 continue;
859 unsigned Reg = MO.getReg();
860 if (Reg == 0 || Reg == li.reg)
861 continue;
862 // FIXME: For now, only remat MI with at most one register operand.
863 assert(!RegOp &&
864 "Can't rematerialize instruction with multiple register operand!");
865 RegOp = MO.getReg();
Dan Gohman6d69ba82008-07-25 00:02:30 +0000866#ifndef NDEBUG
Evan Chengd70dbb52008-02-22 09:24:50 +0000867 break;
Dan Gohman6d69ba82008-07-25 00:02:30 +0000868#endif
Evan Chengd70dbb52008-02-22 09:24:50 +0000869 }
870 return RegOp;
871}
872
873/// isValNoAvailableAt - Return true if the val# of the specified interval
874/// which reaches the given instruction also reaches the specified use index.
875bool LiveIntervals::isValNoAvailableAt(const LiveInterval &li, MachineInstr *MI,
876 unsigned UseIdx) const {
877 unsigned Index = getInstructionIndex(MI);
878 VNInfo *ValNo = li.FindLiveRangeContaining(Index)->valno;
879 LiveInterval::const_iterator UI = li.FindLiveRangeContaining(UseIdx);
880 return UI != li.end() && UI->valno == ValNo;
881}
882
Evan Chengf2fbca62007-11-12 06:35:08 +0000883/// isReMaterializable - Returns true if the definition MI of the specified
884/// val# of the specified interval is re-materializable.
885bool LiveIntervals::isReMaterializable(const LiveInterval &li,
Evan Cheng5ef3a042007-12-06 00:01:56 +0000886 const VNInfo *ValNo, MachineInstr *MI,
887 bool &isLoad) {
Evan Chengf2fbca62007-11-12 06:35:08 +0000888 if (DisableReMat)
889 return false;
890
Evan Cheng20ccded2008-03-15 00:19:36 +0000891 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF)
Evan Chengd70dbb52008-02-22 09:24:50 +0000892 return true;
Evan Chengdd3465e2008-02-23 01:44:27 +0000893
894 int FrameIdx = 0;
895 if (tii_->isLoadFromStackSlot(MI, FrameIdx) &&
Evan Cheng249ded32008-02-23 03:38:34 +0000896 mf_->getFrameInfo()->isImmutableObjectIndex(FrameIdx))
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000897 // FIXME: Let target specific isReallyTriviallyReMaterializable determines
898 // this but remember this is not safe to fold into a two-address
899 // instruction.
Evan Cheng249ded32008-02-23 03:38:34 +0000900 // This is a load from fixed stack slot. It can be rematerialized.
Evan Chengdd3465e2008-02-23 01:44:27 +0000901 return true;
Evan Chengdd3465e2008-02-23 01:44:27 +0000902
Dan Gohman6d69ba82008-07-25 00:02:30 +0000903 // If the target-specific rules don't identify an instruction as
904 // being trivially rematerializable, use some target-independent
905 // rules.
906 if (!MI->getDesc().isRematerializable() ||
907 !tii_->isTriviallyReMaterializable(MI)) {
Dan Gohman4c8f8702008-07-25 15:08:37 +0000908 if (!EnableAggressiveRemat)
909 return false;
Evan Chengd70dbb52008-02-22 09:24:50 +0000910
Dan Gohman0471a792008-07-28 18:43:51 +0000911 // If the instruction accesses memory but the memoperands have been lost,
Dan Gohman6d69ba82008-07-25 00:02:30 +0000912 // we can't analyze it.
913 const TargetInstrDesc &TID = MI->getDesc();
914 if ((TID.mayLoad() || TID.mayStore()) && MI->memoperands_empty())
915 return false;
916
917 // Avoid instructions obviously unsafe for remat.
918 if (TID.hasUnmodeledSideEffects() || TID.isNotDuplicable())
919 return false;
920
921 // If the instruction accesses memory and the memory could be non-constant,
922 // assume the instruction is not rematerializable.
Dan Gohmanfed90b62008-07-28 21:51:04 +0000923 for (std::list<MachineMemOperand>::const_iterator I = MI->memoperands_begin(),
Dan Gohman6d69ba82008-07-25 00:02:30 +0000924 E = MI->memoperands_end(); I != E; ++I) {
925 const MachineMemOperand &MMO = *I;
926 if (MMO.isVolatile() || MMO.isStore())
927 return false;
928 const Value *V = MMO.getValue();
929 if (!V)
930 return false;
931 if (const PseudoSourceValue *PSV = dyn_cast<PseudoSourceValue>(V)) {
932 if (!PSV->isConstant(mf_->getFrameInfo()))
Evan Chengd70dbb52008-02-22 09:24:50 +0000933 return false;
Dan Gohman6d69ba82008-07-25 00:02:30 +0000934 } else if (!aa_->pointsToConstantMemory(V))
935 return false;
936 }
937
938 // If any of the registers accessed are non-constant, conservatively assume
939 // the instruction is not rematerializable.
940 unsigned ImpUse = 0;
941 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
942 const MachineOperand &MO = MI->getOperand(i);
Dan Gohman014278e2008-09-13 17:58:21 +0000943 if (MO.isRegister()) {
Dan Gohman6d69ba82008-07-25 00:02:30 +0000944 unsigned Reg = MO.getReg();
945 if (Reg == 0)
946 continue;
947 if (TargetRegisterInfo::isPhysicalRegister(Reg))
948 return false;
949
950 // Only allow one def, and that in the first operand.
951 if (MO.isDef() != (i == 0))
952 return false;
953
954 // Only allow constant-valued registers.
955 bool IsLiveIn = mri_->isLiveIn(Reg);
956 MachineRegisterInfo::def_iterator I = mri_->def_begin(Reg),
957 E = mri_->def_end();
958
959 // For the def, it should be the only def.
960 if (MO.isDef() && (next(I) != E || IsLiveIn))
961 return false;
962
963 if (MO.isUse()) {
964 // Only allow one use other register use, as that's all the
965 // remat mechanisms support currently.
966 if (Reg != li.reg) {
967 if (ImpUse == 0)
968 ImpUse = Reg;
969 else if (Reg != ImpUse)
970 return false;
971 }
972 // For uses, there should be only one associate def.
973 if (I != E && (next(I) != E || IsLiveIn))
974 return false;
975 }
Evan Chengd70dbb52008-02-22 09:24:50 +0000976 }
977 }
Evan Cheng5ef3a042007-12-06 00:01:56 +0000978 }
Evan Chengf2fbca62007-11-12 06:35:08 +0000979
Dan Gohman6d69ba82008-07-25 00:02:30 +0000980 unsigned ImpUse = getReMatImplicitUse(li, MI);
981 if (ImpUse) {
982 const LiveInterval &ImpLi = getInterval(ImpUse);
983 for (MachineRegisterInfo::use_iterator ri = mri_->use_begin(li.reg),
984 re = mri_->use_end(); ri != re; ++ri) {
985 MachineInstr *UseMI = &*ri;
986 unsigned UseIdx = getInstructionIndex(UseMI);
987 if (li.FindLiveRangeContaining(UseIdx)->valno != ValNo)
988 continue;
989 if (!isValNoAvailableAt(ImpLi, MI, UseIdx))
990 return false;
991 }
992 }
993 return true;
Evan Cheng5ef3a042007-12-06 00:01:56 +0000994}
995
996/// isReMaterializable - Returns true if every definition of MI of every
997/// val# of the specified interval is re-materializable.
998bool LiveIntervals::isReMaterializable(const LiveInterval &li, bool &isLoad) {
999 isLoad = false;
1000 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
1001 i != e; ++i) {
1002 const VNInfo *VNI = *i;
1003 unsigned DefIdx = VNI->def;
1004 if (DefIdx == ~1U)
1005 continue; // Dead val#.
1006 // Is the def for the val# rematerializable?
1007 if (DefIdx == ~0u)
1008 return false;
1009 MachineInstr *ReMatDefMI = getInstructionFromIndex(DefIdx);
1010 bool DefIsLoad = false;
Evan Chengd70dbb52008-02-22 09:24:50 +00001011 if (!ReMatDefMI ||
1012 !isReMaterializable(li, VNI, ReMatDefMI, DefIsLoad))
Evan Cheng5ef3a042007-12-06 00:01:56 +00001013 return false;
1014 isLoad |= DefIsLoad;
Evan Chengf2fbca62007-11-12 06:35:08 +00001015 }
1016 return true;
1017}
1018
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001019/// FilterFoldedOps - Filter out two-address use operands. Return
1020/// true if it finds any issue with the operands that ought to prevent
1021/// folding.
1022static bool FilterFoldedOps(MachineInstr *MI,
1023 SmallVector<unsigned, 2> &Ops,
1024 unsigned &MRInfo,
1025 SmallVector<unsigned, 2> &FoldOps) {
Chris Lattner749c6f62008-01-07 07:27:27 +00001026 const TargetInstrDesc &TID = MI->getDesc();
Evan Cheng6e141fd2007-12-12 23:12:09 +00001027
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001028 MRInfo = 0;
Evan Chengaee4af62007-12-02 08:30:39 +00001029 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
1030 unsigned OpIdx = Ops[i];
Evan Chengd70dbb52008-02-22 09:24:50 +00001031 MachineOperand &MO = MI->getOperand(OpIdx);
Evan Chengaee4af62007-12-02 08:30:39 +00001032 // FIXME: fold subreg use.
Evan Chengd70dbb52008-02-22 09:24:50 +00001033 if (MO.getSubReg())
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001034 return true;
Evan Chengd70dbb52008-02-22 09:24:50 +00001035 if (MO.isDef())
Evan Chengaee4af62007-12-02 08:30:39 +00001036 MRInfo |= (unsigned)VirtRegMap::isMod;
1037 else {
1038 // Filter out two-address use operand(s).
Evan Chengd70dbb52008-02-22 09:24:50 +00001039 if (!MO.isImplicit() &&
1040 TID.getOperandConstraint(OpIdx, TOI::TIED_TO) != -1) {
Evan Chengaee4af62007-12-02 08:30:39 +00001041 MRInfo = VirtRegMap::isModRef;
1042 continue;
1043 }
1044 MRInfo |= (unsigned)VirtRegMap::isRef;
1045 }
1046 FoldOps.push_back(OpIdx);
Evan Chenge62f97c2007-12-01 02:07:52 +00001047 }
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001048 return false;
1049}
1050
1051
1052/// tryFoldMemoryOperand - Attempts to fold either a spill / restore from
1053/// slot / to reg or any rematerialized load into ith operand of specified
1054/// MI. If it is successul, MI is updated with the newly created MI and
1055/// returns true.
1056bool LiveIntervals::tryFoldMemoryOperand(MachineInstr* &MI,
1057 VirtRegMap &vrm, MachineInstr *DefMI,
1058 unsigned InstrIdx,
1059 SmallVector<unsigned, 2> &Ops,
1060 bool isSS, int Slot, unsigned Reg) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001061 // If it is an implicit def instruction, just delete it.
Evan Cheng20ccded2008-03-15 00:19:36 +00001062 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001063 RemoveMachineInstrFromMaps(MI);
1064 vrm.RemoveMachineInstrFromMaps(MI);
1065 MI->eraseFromParent();
1066 ++numFolds;
1067 return true;
1068 }
1069
1070 // Filter the list of operand indexes that are to be folded. Abort if
1071 // any operand will prevent folding.
1072 unsigned MRInfo = 0;
1073 SmallVector<unsigned, 2> FoldOps;
1074 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
1075 return false;
Evan Chenge62f97c2007-12-01 02:07:52 +00001076
Evan Cheng427f4c12008-03-31 23:19:51 +00001077 // The only time it's safe to fold into a two address instruction is when
1078 // it's folding reload and spill from / into a spill stack slot.
1079 if (DefMI && (MRInfo & VirtRegMap::isMod))
Evan Cheng249ded32008-02-23 03:38:34 +00001080 return false;
1081
Evan Chengf2f8c2a2008-02-08 22:05:27 +00001082 MachineInstr *fmi = isSS ? tii_->foldMemoryOperand(*mf_, MI, FoldOps, Slot)
1083 : tii_->foldMemoryOperand(*mf_, MI, FoldOps, DefMI);
Evan Chengf2fbca62007-11-12 06:35:08 +00001084 if (fmi) {
Evan Chengd3653122008-02-27 03:04:06 +00001085 // Remember this instruction uses the spill slot.
1086 if (isSS) vrm.addSpillSlotUse(Slot, fmi);
1087
Evan Chengf2fbca62007-11-12 06:35:08 +00001088 // Attempt to fold the memory reference into the instruction. If
1089 // we can do this, we don't need to insert spill code.
Evan Chengf2fbca62007-11-12 06:35:08 +00001090 MachineBasicBlock &MBB = *MI->getParent();
Evan Cheng84802932008-01-10 08:24:38 +00001091 if (isSS && !mf_->getFrameInfo()->isImmutableObjectIndex(Slot))
Evan Chengaee4af62007-12-02 08:30:39 +00001092 vrm.virtFolded(Reg, MI, fmi, (VirtRegMap::ModRef)MRInfo);
Evan Cheng81a03822007-11-17 00:40:40 +00001093 vrm.transferSpillPts(MI, fmi);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001094 vrm.transferRestorePts(MI, fmi);
Evan Chengc1f53c72008-03-11 21:34:46 +00001095 vrm.transferEmergencySpills(MI, fmi);
Evan Chengf2fbca62007-11-12 06:35:08 +00001096 mi2iMap_.erase(MI);
Evan Chengcddbb832007-11-30 21:23:43 +00001097 i2miMap_[InstrIdx /InstrSlots::NUM] = fmi;
1098 mi2iMap_[fmi] = InstrIdx;
Evan Chengf2fbca62007-11-12 06:35:08 +00001099 MI = MBB.insert(MBB.erase(MI), fmi);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001100 ++numFolds;
Evan Chengf2fbca62007-11-12 06:35:08 +00001101 return true;
1102 }
1103 return false;
1104}
1105
Evan Cheng018f9b02007-12-05 03:22:34 +00001106/// canFoldMemoryOperand - Returns true if the specified load / store
1107/// folding is possible.
1108bool LiveIntervals::canFoldMemoryOperand(MachineInstr *MI,
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001109 SmallVector<unsigned, 2> &Ops,
Evan Cheng3c75ba82008-04-01 21:37:32 +00001110 bool ReMat) const {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001111 // Filter the list of operand indexes that are to be folded. Abort if
1112 // any operand will prevent folding.
1113 unsigned MRInfo = 0;
Evan Cheng018f9b02007-12-05 03:22:34 +00001114 SmallVector<unsigned, 2> FoldOps;
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001115 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
1116 return false;
Evan Cheng018f9b02007-12-05 03:22:34 +00001117
Evan Cheng3c75ba82008-04-01 21:37:32 +00001118 // It's only legal to remat for a use, not a def.
1119 if (ReMat && (MRInfo & VirtRegMap::isMod))
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001120 return false;
Evan Cheng018f9b02007-12-05 03:22:34 +00001121
Evan Chengd70dbb52008-02-22 09:24:50 +00001122 return tii_->canFoldMemoryOperand(MI, FoldOps);
1123}
1124
Evan Cheng81a03822007-11-17 00:40:40 +00001125bool LiveIntervals::intervalIsInOneMBB(const LiveInterval &li) const {
1126 SmallPtrSet<MachineBasicBlock*, 4> MBBs;
1127 for (LiveInterval::Ranges::const_iterator
1128 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1129 std::vector<IdxMBBPair>::const_iterator II =
1130 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), I->start);
1131 if (II == Idx2MBBMap.end())
1132 continue;
1133 if (I->end > II->first) // crossing a MBB.
1134 return false;
1135 MBBs.insert(II->second);
1136 if (MBBs.size() > 1)
1137 return false;
1138 }
1139 return true;
1140}
1141
Evan Chengd70dbb52008-02-22 09:24:50 +00001142/// rewriteImplicitOps - Rewrite implicit use operands of MI (i.e. uses of
1143/// interval on to-be re-materialized operands of MI) with new register.
1144void LiveIntervals::rewriteImplicitOps(const LiveInterval &li,
1145 MachineInstr *MI, unsigned NewVReg,
1146 VirtRegMap &vrm) {
1147 // There is an implicit use. That means one of the other operand is
1148 // being remat'ed and the remat'ed instruction has li.reg as an
1149 // use operand. Make sure we rewrite that as well.
1150 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1151 MachineOperand &MO = MI->getOperand(i);
1152 if (!MO.isRegister())
1153 continue;
1154 unsigned Reg = MO.getReg();
1155 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
1156 continue;
1157 if (!vrm.isReMaterialized(Reg))
1158 continue;
1159 MachineInstr *ReMatMI = vrm.getReMaterializedMI(Reg);
Evan Cheng6130f662008-03-05 00:59:57 +00001160 MachineOperand *UseMO = ReMatMI->findRegisterUseOperand(li.reg);
1161 if (UseMO)
1162 UseMO->setReg(NewVReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001163 }
1164}
1165
Evan Chengf2fbca62007-11-12 06:35:08 +00001166/// rewriteInstructionForSpills, rewriteInstructionsForSpills - Helper functions
1167/// for addIntervalsForSpills to rewrite uses / defs for the given live range.
Evan Cheng018f9b02007-12-05 03:22:34 +00001168bool LiveIntervals::
Evan Chengd70dbb52008-02-22 09:24:50 +00001169rewriteInstructionForSpills(const LiveInterval &li, const VNInfo *VNI,
1170 bool TrySplit, unsigned index, unsigned end, MachineInstr *MI,
Evan Cheng81a03822007-11-17 00:40:40 +00001171 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Chengf2fbca62007-11-12 06:35:08 +00001172 unsigned Slot, int LdSlot,
1173 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chengd70dbb52008-02-22 09:24:50 +00001174 VirtRegMap &vrm,
Evan Chengf2fbca62007-11-12 06:35:08 +00001175 const TargetRegisterClass* rc,
1176 SmallVector<int, 4> &ReMatIds,
Evan Cheng22f07ff2007-12-11 02:09:15 +00001177 const MachineLoopInfo *loopInfo,
Evan Cheng313d4b82008-02-23 00:33:04 +00001178 unsigned &NewVReg, unsigned ImpUse, bool &HasDef, bool &HasUse,
Owen Anderson28998312008-08-13 22:28:50 +00001179 DenseMap<unsigned,unsigned> &MBBVRegsMap,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001180 std::vector<LiveInterval*> &NewLIs, float &SSWeight) {
1181 MachineBasicBlock *MBB = MI->getParent();
1182 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Cheng018f9b02007-12-05 03:22:34 +00001183 bool CanFold = false;
Evan Chengf2fbca62007-11-12 06:35:08 +00001184 RestartInstruction:
1185 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
1186 MachineOperand& mop = MI->getOperand(i);
1187 if (!mop.isRegister())
1188 continue;
1189 unsigned Reg = mop.getReg();
1190 unsigned RegI = Reg;
Dan Gohman6f0d0242008-02-10 18:45:23 +00001191 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
Evan Chengf2fbca62007-11-12 06:35:08 +00001192 continue;
Evan Chengf2fbca62007-11-12 06:35:08 +00001193 if (Reg != li.reg)
1194 continue;
1195
1196 bool TryFold = !DefIsReMat;
Evan Chengcb3c3302007-11-29 23:02:50 +00001197 bool FoldSS = true; // Default behavior unless it's a remat.
Evan Chengf2fbca62007-11-12 06:35:08 +00001198 int FoldSlot = Slot;
1199 if (DefIsReMat) {
1200 // If this is the rematerializable definition MI itself and
1201 // all of its uses are rematerialized, simply delete it.
Evan Cheng81a03822007-11-17 00:40:40 +00001202 if (MI == ReMatOrigDefMI && CanDelete) {
Evan Chengcddbb832007-11-30 21:23:43 +00001203 DOUT << "\t\t\t\tErasing re-materlizable def: ";
1204 DOUT << MI << '\n';
Evan Chengf2fbca62007-11-12 06:35:08 +00001205 RemoveMachineInstrFromMaps(MI);
Evan Chengcada2452007-11-28 01:28:46 +00001206 vrm.RemoveMachineInstrFromMaps(MI);
Evan Chengf2fbca62007-11-12 06:35:08 +00001207 MI->eraseFromParent();
1208 break;
1209 }
1210
1211 // If def for this use can't be rematerialized, then try folding.
Evan Cheng0cbb1162007-11-29 01:06:25 +00001212 // If def is rematerializable and it's a load, also try folding.
Evan Chengcb3c3302007-11-29 23:02:50 +00001213 TryFold = !ReMatDefMI || (ReMatDefMI && (MI == ReMatOrigDefMI || isLoad));
Evan Chengf2fbca62007-11-12 06:35:08 +00001214 if (isLoad) {
1215 // Try fold loads (from stack slot, constant pool, etc.) into uses.
1216 FoldSS = isLoadSS;
1217 FoldSlot = LdSlot;
1218 }
1219 }
1220
Evan Chengf2fbca62007-11-12 06:35:08 +00001221 // Scan all of the operands of this instruction rewriting operands
1222 // to use NewVReg instead of li.reg as appropriate. We do this for
1223 // two reasons:
1224 //
1225 // 1. If the instr reads the same spilled vreg multiple times, we
1226 // want to reuse the NewVReg.
1227 // 2. If the instr is a two-addr instruction, we are required to
1228 // keep the src/dst regs pinned.
1229 //
1230 // Keep track of whether we replace a use and/or def so that we can
1231 // create the spill interval with the appropriate range.
Evan Chengcddbb832007-11-30 21:23:43 +00001232
Evan Cheng81a03822007-11-17 00:40:40 +00001233 HasUse = mop.isUse();
1234 HasDef = mop.isDef();
Evan Chengaee4af62007-12-02 08:30:39 +00001235 SmallVector<unsigned, 2> Ops;
1236 Ops.push_back(i);
Evan Chengf2fbca62007-11-12 06:35:08 +00001237 for (unsigned j = i+1, e = MI->getNumOperands(); j != e; ++j) {
Evan Chengaee4af62007-12-02 08:30:39 +00001238 const MachineOperand &MOj = MI->getOperand(j);
1239 if (!MOj.isRegister())
Evan Chengf2fbca62007-11-12 06:35:08 +00001240 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001241 unsigned RegJ = MOj.getReg();
Dan Gohman6f0d0242008-02-10 18:45:23 +00001242 if (RegJ == 0 || TargetRegisterInfo::isPhysicalRegister(RegJ))
Evan Chengf2fbca62007-11-12 06:35:08 +00001243 continue;
1244 if (RegJ == RegI) {
Evan Chengaee4af62007-12-02 08:30:39 +00001245 Ops.push_back(j);
1246 HasUse |= MOj.isUse();
1247 HasDef |= MOj.isDef();
Evan Chengf2fbca62007-11-12 06:35:08 +00001248 }
1249 }
1250
Evan Cheng79a796c2008-07-12 01:56:02 +00001251 if (HasUse && !li.liveAt(getUseIndex(index)))
1252 // Must be defined by an implicit def. It should not be spilled. Note,
1253 // this is for correctness reason. e.g.
1254 // 8 %reg1024<def> = IMPLICIT_DEF
1255 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1256 // The live range [12, 14) are not part of the r1024 live interval since
1257 // it's defined by an implicit def. It will not conflicts with live
1258 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengb9890ae2008-07-12 02:22:07 +00001259 // easily see a situation where both registers are reloaded before
Evan Cheng79a796c2008-07-12 01:56:02 +00001260 // the INSERT_SUBREG and both target registers that would overlap.
1261 HasUse = false;
1262
Evan Cheng9c3c2212008-06-06 07:54:39 +00001263 // Update stack slot spill weight if we are splitting.
Evan Chengc3417602008-06-21 06:45:54 +00001264 float Weight = getSpillWeight(HasDef, HasUse, loopDepth);
Evan Cheng9c3c2212008-06-06 07:54:39 +00001265 if (!TrySplit)
1266 SSWeight += Weight;
1267
1268 if (!TryFold)
1269 CanFold = false;
1270 else {
Evan Cheng018f9b02007-12-05 03:22:34 +00001271 // Do not fold load / store here if we are splitting. We'll find an
1272 // optimal point to insert a load / store later.
1273 if (!TrySplit) {
1274 if (tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
1275 Ops, FoldSS, FoldSlot, Reg)) {
1276 // Folding the load/store can completely change the instruction in
1277 // unpredictable ways, rescan it from the beginning.
1278 HasUse = false;
1279 HasDef = false;
1280 CanFold = false;
Evan Cheng9c3c2212008-06-06 07:54:39 +00001281 if (isRemoved(MI)) {
1282 SSWeight -= Weight;
Evan Cheng7e073ba2008-04-09 20:57:25 +00001283 break;
Evan Cheng9c3c2212008-06-06 07:54:39 +00001284 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001285 goto RestartInstruction;
1286 }
1287 } else {
Evan Cheng9c3c2212008-06-06 07:54:39 +00001288 // We'll try to fold it later if it's profitable.
Evan Cheng3c75ba82008-04-01 21:37:32 +00001289 CanFold = canFoldMemoryOperand(MI, Ops, DefIsReMat);
Evan Cheng018f9b02007-12-05 03:22:34 +00001290 }
Evan Cheng9c3c2212008-06-06 07:54:39 +00001291 }
Evan Chengcddbb832007-11-30 21:23:43 +00001292
1293 // Create a new virtual register for the spill interval.
1294 bool CreatedNewVReg = false;
1295 if (NewVReg == 0) {
Evan Chengd70dbb52008-02-22 09:24:50 +00001296 NewVReg = mri_->createVirtualRegister(rc);
Evan Chengcddbb832007-11-30 21:23:43 +00001297 vrm.grow();
1298 CreatedNewVReg = true;
1299 }
1300 mop.setReg(NewVReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001301 if (mop.isImplicit())
1302 rewriteImplicitOps(li, MI, NewVReg, vrm);
Evan Chengcddbb832007-11-30 21:23:43 +00001303
1304 // Reuse NewVReg for other reads.
Evan Chengd70dbb52008-02-22 09:24:50 +00001305 for (unsigned j = 0, e = Ops.size(); j != e; ++j) {
1306 MachineOperand &mopj = MI->getOperand(Ops[j]);
1307 mopj.setReg(NewVReg);
1308 if (mopj.isImplicit())
1309 rewriteImplicitOps(li, MI, NewVReg, vrm);
1310 }
Evan Chengcddbb832007-11-30 21:23:43 +00001311
Evan Cheng81a03822007-11-17 00:40:40 +00001312 if (CreatedNewVReg) {
1313 if (DefIsReMat) {
1314 vrm.setVirtIsReMaterialized(NewVReg, ReMatDefMI/*, CanDelete*/);
Evan Chengd70dbb52008-02-22 09:24:50 +00001315 if (ReMatIds[VNI->id] == VirtRegMap::MAX_STACK_SLOT) {
Evan Cheng81a03822007-11-17 00:40:40 +00001316 // Each valnum may have its own remat id.
Evan Chengd70dbb52008-02-22 09:24:50 +00001317 ReMatIds[VNI->id] = vrm.assignVirtReMatId(NewVReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001318 } else {
Evan Chengd70dbb52008-02-22 09:24:50 +00001319 vrm.assignVirtReMatId(NewVReg, ReMatIds[VNI->id]);
Evan Cheng81a03822007-11-17 00:40:40 +00001320 }
1321 if (!CanDelete || (HasUse && HasDef)) {
1322 // If this is a two-addr instruction then its use operands are
1323 // rematerializable but its def is not. It should be assigned a
1324 // stack slot.
1325 vrm.assignVirt2StackSlot(NewVReg, Slot);
1326 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001327 } else {
Evan Chengf2fbca62007-11-12 06:35:08 +00001328 vrm.assignVirt2StackSlot(NewVReg, Slot);
1329 }
Evan Chengcb3c3302007-11-29 23:02:50 +00001330 } else if (HasUse && HasDef &&
1331 vrm.getStackSlot(NewVReg) == VirtRegMap::NO_STACK_SLOT) {
1332 // If this interval hasn't been assigned a stack slot (because earlier
1333 // def is a deleted remat def), do it now.
1334 assert(Slot != VirtRegMap::NO_STACK_SLOT);
1335 vrm.assignVirt2StackSlot(NewVReg, Slot);
Evan Chengf2fbca62007-11-12 06:35:08 +00001336 }
1337
Evan Cheng313d4b82008-02-23 00:33:04 +00001338 // Re-matting an instruction with virtual register use. Add the
1339 // register as an implicit use on the use MI.
1340 if (DefIsReMat && ImpUse)
1341 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
1342
Evan Chengf2fbca62007-11-12 06:35:08 +00001343 // create a new register interval for this spill / remat.
1344 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001345 if (CreatedNewVReg) {
1346 NewLIs.push_back(&nI);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001347 MBBVRegsMap.insert(std::make_pair(MI->getParent()->getNumber(), NewVReg));
Evan Cheng81a03822007-11-17 00:40:40 +00001348 if (TrySplit)
1349 vrm.setIsSplitFromReg(NewVReg, li.reg);
1350 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001351
1352 if (HasUse) {
Evan Cheng81a03822007-11-17 00:40:40 +00001353 if (CreatedNewVReg) {
1354 LiveRange LR(getLoadIndex(index), getUseIndex(index)+1,
1355 nI.getNextValue(~0U, 0, VNInfoAllocator));
1356 DOUT << " +" << LR;
1357 nI.addRange(LR);
1358 } else {
1359 // Extend the split live interval to this def / use.
1360 unsigned End = getUseIndex(index)+1;
1361 LiveRange LR(nI.ranges[nI.ranges.size()-1].end, End,
1362 nI.getValNumInfo(nI.getNumValNums()-1));
1363 DOUT << " +" << LR;
1364 nI.addRange(LR);
1365 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001366 }
1367 if (HasDef) {
1368 LiveRange LR(getDefIndex(index), getStoreIndex(index),
1369 nI.getNextValue(~0U, 0, VNInfoAllocator));
1370 DOUT << " +" << LR;
1371 nI.addRange(LR);
1372 }
Evan Cheng81a03822007-11-17 00:40:40 +00001373
Evan Chengf2fbca62007-11-12 06:35:08 +00001374 DOUT << "\t\t\t\tAdded new interval: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +00001375 nI.print(DOUT, tri_);
Evan Chengf2fbca62007-11-12 06:35:08 +00001376 DOUT << '\n';
1377 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001378 return CanFold;
Evan Chengf2fbca62007-11-12 06:35:08 +00001379}
Evan Cheng81a03822007-11-17 00:40:40 +00001380bool LiveIntervals::anyKillInMBBAfterIdx(const LiveInterval &li,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001381 const VNInfo *VNI,
1382 MachineBasicBlock *MBB, unsigned Idx) const {
Evan Cheng81a03822007-11-17 00:40:40 +00001383 unsigned End = getMBBEndIdx(MBB);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001384 for (unsigned j = 0, ee = VNI->kills.size(); j != ee; ++j) {
1385 unsigned KillIdx = VNI->kills[j];
1386 if (KillIdx > Idx && KillIdx < End)
1387 return true;
Evan Cheng81a03822007-11-17 00:40:40 +00001388 }
1389 return false;
1390}
1391
Evan Cheng063284c2008-02-21 00:34:19 +00001392/// RewriteInfo - Keep track of machine instrs that will be rewritten
1393/// during spilling.
Dan Gohman844731a2008-05-13 00:00:25 +00001394namespace {
1395 struct RewriteInfo {
1396 unsigned Index;
1397 MachineInstr *MI;
1398 bool HasUse;
1399 bool HasDef;
1400 RewriteInfo(unsigned i, MachineInstr *mi, bool u, bool d)
1401 : Index(i), MI(mi), HasUse(u), HasDef(d) {}
1402 };
Evan Cheng063284c2008-02-21 00:34:19 +00001403
Dan Gohman844731a2008-05-13 00:00:25 +00001404 struct RewriteInfoCompare {
1405 bool operator()(const RewriteInfo &LHS, const RewriteInfo &RHS) const {
1406 return LHS.Index < RHS.Index;
1407 }
1408 };
1409}
Evan Cheng063284c2008-02-21 00:34:19 +00001410
Evan Chengf2fbca62007-11-12 06:35:08 +00001411void LiveIntervals::
Evan Cheng81a03822007-11-17 00:40:40 +00001412rewriteInstructionsForSpills(const LiveInterval &li, bool TrySplit,
Evan Chengf2fbca62007-11-12 06:35:08 +00001413 LiveInterval::Ranges::const_iterator &I,
Evan Cheng81a03822007-11-17 00:40:40 +00001414 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Chengf2fbca62007-11-12 06:35:08 +00001415 unsigned Slot, int LdSlot,
1416 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chengd70dbb52008-02-22 09:24:50 +00001417 VirtRegMap &vrm,
Evan Chengf2fbca62007-11-12 06:35:08 +00001418 const TargetRegisterClass* rc,
1419 SmallVector<int, 4> &ReMatIds,
Evan Cheng22f07ff2007-12-11 02:09:15 +00001420 const MachineLoopInfo *loopInfo,
Evan Cheng81a03822007-11-17 00:40:40 +00001421 BitVector &SpillMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001422 DenseMap<unsigned, std::vector<SRInfo> > &SpillIdxes,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001423 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001424 DenseMap<unsigned, std::vector<SRInfo> > &RestoreIdxes,
1425 DenseMap<unsigned,unsigned> &MBBVRegsMap,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001426 std::vector<LiveInterval*> &NewLIs, float &SSWeight) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001427 bool AllCanFold = true;
Evan Cheng81a03822007-11-17 00:40:40 +00001428 unsigned NewVReg = 0;
Evan Cheng063284c2008-02-21 00:34:19 +00001429 unsigned start = getBaseIndex(I->start);
Evan Chengf2fbca62007-11-12 06:35:08 +00001430 unsigned end = getBaseIndex(I->end-1) + InstrSlots::NUM;
Evan Chengf2fbca62007-11-12 06:35:08 +00001431
Evan Cheng063284c2008-02-21 00:34:19 +00001432 // First collect all the def / use in this live range that will be rewritten.
Evan Cheng7e073ba2008-04-09 20:57:25 +00001433 // Make sure they are sorted according to instruction index.
Evan Cheng063284c2008-02-21 00:34:19 +00001434 std::vector<RewriteInfo> RewriteMIs;
Evan Chengd70dbb52008-02-22 09:24:50 +00001435 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1436 re = mri_->reg_end(); ri != re; ) {
Evan Cheng419852c2008-04-03 16:39:43 +00001437 MachineInstr *MI = &*ri;
Evan Cheng063284c2008-02-21 00:34:19 +00001438 MachineOperand &O = ri.getOperand();
1439 ++ri;
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001440 assert(!O.isImplicit() && "Spilling register that's used as implicit use?");
Evan Cheng063284c2008-02-21 00:34:19 +00001441 unsigned index = getInstructionIndex(MI);
1442 if (index < start || index >= end)
1443 continue;
Evan Cheng79a796c2008-07-12 01:56:02 +00001444 if (O.isUse() && !li.liveAt(getUseIndex(index)))
1445 // Must be defined by an implicit def. It should not be spilled. Note,
1446 // this is for correctness reason. e.g.
1447 // 8 %reg1024<def> = IMPLICIT_DEF
1448 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1449 // The live range [12, 14) are not part of the r1024 live interval since
1450 // it's defined by an implicit def. It will not conflicts with live
1451 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengb9890ae2008-07-12 02:22:07 +00001452 // easily see a situation where both registers are reloaded before
Evan Cheng79a796c2008-07-12 01:56:02 +00001453 // the INSERT_SUBREG and both target registers that would overlap.
1454 continue;
Evan Cheng063284c2008-02-21 00:34:19 +00001455 RewriteMIs.push_back(RewriteInfo(index, MI, O.isUse(), O.isDef()));
1456 }
1457 std::sort(RewriteMIs.begin(), RewriteMIs.end(), RewriteInfoCompare());
1458
Evan Cheng313d4b82008-02-23 00:33:04 +00001459 unsigned ImpUse = DefIsReMat ? getReMatImplicitUse(li, ReMatDefMI) : 0;
Evan Cheng063284c2008-02-21 00:34:19 +00001460 // Now rewrite the defs and uses.
1461 for (unsigned i = 0, e = RewriteMIs.size(); i != e; ) {
1462 RewriteInfo &rwi = RewriteMIs[i];
1463 ++i;
1464 unsigned index = rwi.Index;
1465 bool MIHasUse = rwi.HasUse;
1466 bool MIHasDef = rwi.HasDef;
1467 MachineInstr *MI = rwi.MI;
1468 // If MI def and/or use the same register multiple times, then there
1469 // are multiple entries.
Evan Cheng313d4b82008-02-23 00:33:04 +00001470 unsigned NumUses = MIHasUse;
Evan Cheng063284c2008-02-21 00:34:19 +00001471 while (i != e && RewriteMIs[i].MI == MI) {
1472 assert(RewriteMIs[i].Index == index);
Evan Cheng313d4b82008-02-23 00:33:04 +00001473 bool isUse = RewriteMIs[i].HasUse;
1474 if (isUse) ++NumUses;
1475 MIHasUse |= isUse;
Evan Cheng063284c2008-02-21 00:34:19 +00001476 MIHasDef |= RewriteMIs[i].HasDef;
1477 ++i;
1478 }
Evan Cheng81a03822007-11-17 00:40:40 +00001479 MachineBasicBlock *MBB = MI->getParent();
Evan Cheng313d4b82008-02-23 00:33:04 +00001480
Evan Cheng0a891ed2008-05-23 23:00:04 +00001481 if (ImpUse && MI != ReMatDefMI) {
Evan Cheng313d4b82008-02-23 00:33:04 +00001482 // Re-matting an instruction with virtual register use. Update the
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001483 // register interval's spill weight to HUGE_VALF to prevent it from
1484 // being spilled.
Evan Cheng313d4b82008-02-23 00:33:04 +00001485 LiveInterval &ImpLi = getInterval(ImpUse);
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001486 ImpLi.weight = HUGE_VALF;
Evan Cheng313d4b82008-02-23 00:33:04 +00001487 }
1488
Evan Cheng063284c2008-02-21 00:34:19 +00001489 unsigned MBBId = MBB->getNumber();
Evan Cheng018f9b02007-12-05 03:22:34 +00001490 unsigned ThisVReg = 0;
Evan Cheng70306f82007-12-03 09:58:48 +00001491 if (TrySplit) {
Owen Anderson28998312008-08-13 22:28:50 +00001492 DenseMap<unsigned,unsigned>::iterator NVI = MBBVRegsMap.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001493 if (NVI != MBBVRegsMap.end()) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001494 ThisVReg = NVI->second;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001495 // One common case:
1496 // x = use
1497 // ...
1498 // ...
1499 // def = ...
1500 // = use
1501 // It's better to start a new interval to avoid artifically
1502 // extend the new interval.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001503 if (MIHasDef && !MIHasUse) {
1504 MBBVRegsMap.erase(MBB->getNumber());
Evan Cheng018f9b02007-12-05 03:22:34 +00001505 ThisVReg = 0;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001506 }
1507 }
Evan Chengcada2452007-11-28 01:28:46 +00001508 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001509
1510 bool IsNew = ThisVReg == 0;
1511 if (IsNew) {
1512 // This ends the previous live interval. If all of its def / use
1513 // can be folded, give it a low spill weight.
1514 if (NewVReg && TrySplit && AllCanFold) {
1515 LiveInterval &nI = getOrCreateInterval(NewVReg);
1516 nI.weight /= 10.0F;
1517 }
1518 AllCanFold = true;
1519 }
1520 NewVReg = ThisVReg;
1521
Evan Cheng81a03822007-11-17 00:40:40 +00001522 bool HasDef = false;
1523 bool HasUse = false;
Evan Chengd70dbb52008-02-22 09:24:50 +00001524 bool CanFold = rewriteInstructionForSpills(li, I->valno, TrySplit,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001525 index, end, MI, ReMatOrigDefMI, ReMatDefMI,
1526 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
1527 CanDelete, vrm, rc, ReMatIds, loopInfo, NewVReg,
1528 ImpUse, HasDef, HasUse, MBBVRegsMap, NewLIs, SSWeight);
Evan Cheng81a03822007-11-17 00:40:40 +00001529 if (!HasDef && !HasUse)
1530 continue;
1531
Evan Cheng018f9b02007-12-05 03:22:34 +00001532 AllCanFold &= CanFold;
1533
Evan Cheng81a03822007-11-17 00:40:40 +00001534 // Update weight of spill interval.
1535 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng70306f82007-12-03 09:58:48 +00001536 if (!TrySplit) {
Evan Cheng81a03822007-11-17 00:40:40 +00001537 // The spill weight is now infinity as it cannot be spilled again.
1538 nI.weight = HUGE_VALF;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001539 continue;
Evan Cheng81a03822007-11-17 00:40:40 +00001540 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001541
1542 // Keep track of the last def and first use in each MBB.
Evan Cheng0cbb1162007-11-29 01:06:25 +00001543 if (HasDef) {
1544 if (MI != ReMatOrigDefMI || !CanDelete) {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001545 bool HasKill = false;
1546 if (!HasUse)
1547 HasKill = anyKillInMBBAfterIdx(li, I->valno, MBB, getDefIndex(index));
1548 else {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001549 // If this is a two-address code, then this index starts a new VNInfo.
Evan Cheng3f32d652008-06-04 09:18:41 +00001550 const VNInfo *VNI = li.findDefinedVNInfo(getDefIndex(index));
Evan Cheng0cbb1162007-11-29 01:06:25 +00001551 if (VNI)
1552 HasKill = anyKillInMBBAfterIdx(li, VNI, MBB, getDefIndex(index));
1553 }
Owen Anderson28998312008-08-13 22:28:50 +00001554 DenseMap<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Chenge3110d02007-12-01 04:42:39 +00001555 SpillIdxes.find(MBBId);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001556 if (!HasKill) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001557 if (SII == SpillIdxes.end()) {
1558 std::vector<SRInfo> S;
1559 S.push_back(SRInfo(index, NewVReg, true));
1560 SpillIdxes.insert(std::make_pair(MBBId, S));
1561 } else if (SII->second.back().vreg != NewVReg) {
1562 SII->second.push_back(SRInfo(index, NewVReg, true));
1563 } else if ((int)index > SII->second.back().index) {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001564 // If there is an earlier def and this is a two-address
1565 // instruction, then it's not possible to fold the store (which
1566 // would also fold the load).
Evan Cheng1953d0c2007-11-29 10:12:14 +00001567 SRInfo &Info = SII->second.back();
1568 Info.index = index;
1569 Info.canFold = !HasUse;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001570 }
1571 SpillMBBs.set(MBBId);
Evan Chenge3110d02007-12-01 04:42:39 +00001572 } else if (SII != SpillIdxes.end() &&
1573 SII->second.back().vreg == NewVReg &&
1574 (int)index > SII->second.back().index) {
1575 // There is an earlier def that's not killed (must be two-address).
1576 // The spill is no longer needed.
1577 SII->second.pop_back();
1578 if (SII->second.empty()) {
1579 SpillIdxes.erase(MBBId);
1580 SpillMBBs.reset(MBBId);
1581 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001582 }
1583 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001584 }
1585
1586 if (HasUse) {
Owen Anderson28998312008-08-13 22:28:50 +00001587 DenseMap<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Cheng0cbb1162007-11-29 01:06:25 +00001588 SpillIdxes.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001589 if (SII != SpillIdxes.end() &&
1590 SII->second.back().vreg == NewVReg &&
1591 (int)index > SII->second.back().index)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001592 // Use(s) following the last def, it's not safe to fold the spill.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001593 SII->second.back().canFold = false;
Owen Anderson28998312008-08-13 22:28:50 +00001594 DenseMap<unsigned, std::vector<SRInfo> >::iterator RII =
Evan Cheng0cbb1162007-11-29 01:06:25 +00001595 RestoreIdxes.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001596 if (RII != RestoreIdxes.end() && RII->second.back().vreg == NewVReg)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001597 // If we are splitting live intervals, only fold if it's the first
1598 // use and there isn't another use later in the MBB.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001599 RII->second.back().canFold = false;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001600 else if (IsNew) {
1601 // Only need a reload if there isn't an earlier def / use.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001602 if (RII == RestoreIdxes.end()) {
1603 std::vector<SRInfo> Infos;
1604 Infos.push_back(SRInfo(index, NewVReg, true));
1605 RestoreIdxes.insert(std::make_pair(MBBId, Infos));
1606 } else {
1607 RII->second.push_back(SRInfo(index, NewVReg, true));
1608 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001609 RestoreMBBs.set(MBBId);
1610 }
1611 }
1612
1613 // Update spill weight.
Evan Cheng22f07ff2007-12-11 02:09:15 +00001614 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Chengc3417602008-06-21 06:45:54 +00001615 nI.weight += getSpillWeight(HasDef, HasUse, loopDepth);
Evan Chengf2fbca62007-11-12 06:35:08 +00001616 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001617
1618 if (NewVReg && TrySplit && AllCanFold) {
1619 // If all of its def / use can be folded, give it a low spill weight.
1620 LiveInterval &nI = getOrCreateInterval(NewVReg);
1621 nI.weight /= 10.0F;
1622 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001623}
1624
Evan Cheng1953d0c2007-11-29 10:12:14 +00001625bool LiveIntervals::alsoFoldARestore(int Id, int index, unsigned vr,
1626 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001627 DenseMap<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001628 if (!RestoreMBBs[Id])
1629 return false;
1630 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1631 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1632 if (Restores[i].index == index &&
1633 Restores[i].vreg == vr &&
1634 Restores[i].canFold)
1635 return true;
1636 return false;
1637}
1638
1639void LiveIntervals::eraseRestoreInfo(int Id, int index, unsigned vr,
1640 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001641 DenseMap<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001642 if (!RestoreMBBs[Id])
1643 return;
1644 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1645 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1646 if (Restores[i].index == index && Restores[i].vreg)
1647 Restores[i].index = -1;
1648}
Evan Cheng81a03822007-11-17 00:40:40 +00001649
Evan Cheng4cce6b42008-04-11 17:53:36 +00001650/// handleSpilledImpDefs - Remove IMPLICIT_DEF instructions which are being
1651/// spilled and create empty intervals for their uses.
1652void
1653LiveIntervals::handleSpilledImpDefs(const LiveInterval &li, VirtRegMap &vrm,
1654 const TargetRegisterClass* rc,
1655 std::vector<LiveInterval*> &NewLIs) {
Evan Cheng419852c2008-04-03 16:39:43 +00001656 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1657 re = mri_->reg_end(); ri != re; ) {
Evan Cheng4cce6b42008-04-11 17:53:36 +00001658 MachineOperand &O = ri.getOperand();
Evan Cheng419852c2008-04-03 16:39:43 +00001659 MachineInstr *MI = &*ri;
1660 ++ri;
Evan Cheng4cce6b42008-04-11 17:53:36 +00001661 if (O.isDef()) {
1662 assert(MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF &&
1663 "Register def was not rewritten?");
1664 RemoveMachineInstrFromMaps(MI);
1665 vrm.RemoveMachineInstrFromMaps(MI);
1666 MI->eraseFromParent();
1667 } else {
1668 // This must be an use of an implicit_def so it's not part of the live
1669 // interval. Create a new empty live interval for it.
1670 // FIXME: Can we simply erase some of the instructions? e.g. Stores?
1671 unsigned NewVReg = mri_->createVirtualRegister(rc);
1672 vrm.grow();
1673 vrm.setIsImplicitlyDefined(NewVReg);
1674 NewLIs.push_back(&getOrCreateInterval(NewVReg));
1675 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1676 MachineOperand &MO = MI->getOperand(i);
Dan Gohman014278e2008-09-13 17:58:21 +00001677 if (MO.isRegister() && MO.getReg() == li.reg)
Evan Cheng4cce6b42008-04-11 17:53:36 +00001678 MO.setReg(NewVReg);
1679 }
1680 }
Evan Cheng419852c2008-04-03 16:39:43 +00001681 }
1682}
1683
Owen Anderson133f10f2008-08-18 19:52:22 +00001684namespace {
1685 struct LISorter {
1686 bool operator()(LiveInterval* A, LiveInterval* B) {
1687 return A->beginNumber() < B->beginNumber();
1688 }
1689 };
1690}
Evan Cheng81a03822007-11-17 00:40:40 +00001691
Evan Chengf2fbca62007-11-12 06:35:08 +00001692std::vector<LiveInterval*> LiveIntervals::
Owen Andersond6664312008-08-18 18:05:32 +00001693addIntervalsForSpillsFast(const LiveInterval &li,
1694 const MachineLoopInfo *loopInfo,
1695 VirtRegMap &vrm, float& SSWeight) {
Owen Anderson17197312008-08-18 23:41:04 +00001696 unsigned slot = vrm.assignVirt2StackSlot(li.reg);
Owen Andersond6664312008-08-18 18:05:32 +00001697
1698 std::vector<LiveInterval*> added;
1699
1700 assert(li.weight != HUGE_VALF &&
1701 "attempt to spill already spilled interval!");
1702
1703 DOUT << "\t\t\t\tadding intervals for spills for interval: ";
1704 DEBUG(li.dump());
1705 DOUT << '\n';
1706
1707 const TargetRegisterClass* rc = mri_->getRegClass(li.reg);
1708
Owen Anderson9a032932008-08-18 21:20:32 +00001709 SSWeight = 0.0f;
1710
Owen Andersona41e47a2008-08-19 22:12:11 +00001711 MachineRegisterInfo::reg_iterator RI = mri_->reg_begin(li.reg);
1712 while (RI != mri_->reg_end()) {
1713 MachineInstr* MI = &*RI;
1714
1715 SmallVector<unsigned, 2> Indices;
1716 bool HasUse = false;
1717 bool HasDef = false;
1718
1719 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
1720 MachineOperand& mop = MI->getOperand(i);
Dan Gohman014278e2008-09-13 17:58:21 +00001721 if (!mop.isRegister() || mop.getReg() != li.reg) continue;
Owen Andersona41e47a2008-08-19 22:12:11 +00001722
1723 HasUse |= MI->getOperand(i).isUse();
1724 HasDef |= MI->getOperand(i).isDef();
1725
1726 Indices.push_back(i);
1727 }
1728
1729 if (!tryFoldMemoryOperand(MI, vrm, NULL, getInstructionIndex(MI),
1730 Indices, true, slot, li.reg)) {
1731 unsigned NewVReg = mri_->createVirtualRegister(rc);
Owen Anderson9a032932008-08-18 21:20:32 +00001732 vrm.grow();
Owen Anderson17197312008-08-18 23:41:04 +00001733 vrm.assignVirt2StackSlot(NewVReg, slot);
1734
Owen Andersona41e47a2008-08-19 22:12:11 +00001735 // create a new register for this spill
1736 LiveInterval &nI = getOrCreateInterval(NewVReg);
Owen Andersond6664312008-08-18 18:05:32 +00001737
Owen Andersona41e47a2008-08-19 22:12:11 +00001738 // the spill weight is now infinity as it
1739 // cannot be spilled again
1740 nI.weight = HUGE_VALF;
1741
1742 // Rewrite register operands to use the new vreg.
1743 for (SmallVectorImpl<unsigned>::iterator I = Indices.begin(),
1744 E = Indices.end(); I != E; ++I) {
1745 MI->getOperand(*I).setReg(NewVReg);
1746
1747 if (MI->getOperand(*I).isUse())
1748 MI->getOperand(*I).setIsKill(true);
1749 }
1750
1751 // Fill in the new live interval.
1752 unsigned index = getInstructionIndex(MI);
1753 if (HasUse) {
1754 LiveRange LR(getLoadIndex(index), getUseIndex(index),
1755 nI.getNextValue(~0U, 0, getVNInfoAllocator()));
1756 DOUT << " +" << LR;
1757 nI.addRange(LR);
1758 vrm.addRestorePoint(NewVReg, MI);
1759 }
1760 if (HasDef) {
1761 LiveRange LR(getDefIndex(index), getStoreIndex(index),
1762 nI.getNextValue(~0U, 0, getVNInfoAllocator()));
1763 DOUT << " +" << LR;
1764 nI.addRange(LR);
1765 vrm.addSpillPoint(NewVReg, true, MI);
1766 }
1767
Owen Anderson17197312008-08-18 23:41:04 +00001768 added.push_back(&nI);
Owen Anderson8dc2cbe2008-08-18 18:38:12 +00001769
Owen Andersona41e47a2008-08-19 22:12:11 +00001770 DOUT << "\t\t\t\tadded new interval: ";
1771 DEBUG(nI.dump());
1772 DOUT << '\n';
1773
1774 unsigned loopDepth = loopInfo->getLoopDepth(MI->getParent());
1775 if (HasUse) {
1776 if (HasDef)
1777 SSWeight += getSpillWeight(true, true, loopDepth);
1778 else
1779 SSWeight += getSpillWeight(false, true, loopDepth);
1780 } else
1781 SSWeight += getSpillWeight(true, false, loopDepth);
1782 }
Owen Anderson9a032932008-08-18 21:20:32 +00001783
Owen Anderson9a032932008-08-18 21:20:32 +00001784
Owen Andersona41e47a2008-08-19 22:12:11 +00001785 RI = mri_->reg_begin(li.reg);
Owen Andersond6664312008-08-18 18:05:32 +00001786 }
Owen Andersond6664312008-08-18 18:05:32 +00001787
Owen Andersona41e47a2008-08-19 22:12:11 +00001788 // Clients expect the new intervals to be returned in sorted order.
Owen Anderson133f10f2008-08-18 19:52:22 +00001789 std::sort(added.begin(), added.end(), LISorter());
1790
Owen Andersond6664312008-08-18 18:05:32 +00001791 return added;
1792}
1793
1794std::vector<LiveInterval*> LiveIntervals::
Evan Cheng81a03822007-11-17 00:40:40 +00001795addIntervalsForSpills(const LiveInterval &li,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001796 const MachineLoopInfo *loopInfo, VirtRegMap &vrm,
1797 float &SSWeight) {
Owen Andersonae339ba2008-08-19 00:17:30 +00001798
1799 if (EnableFastSpilling)
1800 return addIntervalsForSpillsFast(li, loopInfo, vrm, SSWeight);
1801
Evan Chengf2fbca62007-11-12 06:35:08 +00001802 assert(li.weight != HUGE_VALF &&
1803 "attempt to spill already spilled interval!");
1804
1805 DOUT << "\t\t\t\tadding intervals for spills for interval: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +00001806 li.print(DOUT, tri_);
Evan Chengf2fbca62007-11-12 06:35:08 +00001807 DOUT << '\n';
1808
Evan Cheng9c3c2212008-06-06 07:54:39 +00001809 // Spill slot weight.
1810 SSWeight = 0.0f;
1811
Evan Cheng81a03822007-11-17 00:40:40 +00001812 // Each bit specify whether it a spill is required in the MBB.
1813 BitVector SpillMBBs(mf_->getNumBlockIDs());
Owen Anderson28998312008-08-13 22:28:50 +00001814 DenseMap<unsigned, std::vector<SRInfo> > SpillIdxes;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001815 BitVector RestoreMBBs(mf_->getNumBlockIDs());
Owen Anderson28998312008-08-13 22:28:50 +00001816 DenseMap<unsigned, std::vector<SRInfo> > RestoreIdxes;
1817 DenseMap<unsigned,unsigned> MBBVRegsMap;
Evan Chengf2fbca62007-11-12 06:35:08 +00001818 std::vector<LiveInterval*> NewLIs;
Evan Chengd70dbb52008-02-22 09:24:50 +00001819 const TargetRegisterClass* rc = mri_->getRegClass(li.reg);
Evan Chengf2fbca62007-11-12 06:35:08 +00001820
1821 unsigned NumValNums = li.getNumValNums();
1822 SmallVector<MachineInstr*, 4> ReMatDefs;
1823 ReMatDefs.resize(NumValNums, NULL);
1824 SmallVector<MachineInstr*, 4> ReMatOrigDefs;
1825 ReMatOrigDefs.resize(NumValNums, NULL);
1826 SmallVector<int, 4> ReMatIds;
1827 ReMatIds.resize(NumValNums, VirtRegMap::MAX_STACK_SLOT);
1828 BitVector ReMatDelete(NumValNums);
1829 unsigned Slot = VirtRegMap::MAX_STACK_SLOT;
1830
Evan Cheng81a03822007-11-17 00:40:40 +00001831 // Spilling a split live interval. It cannot be split any further. Also,
1832 // it's also guaranteed to be a single val# / range interval.
1833 if (vrm.getPreSplitReg(li.reg)) {
1834 vrm.setIsSplitFromReg(li.reg, 0);
Evan Chengd120ffd2007-12-05 10:24:35 +00001835 // Unset the split kill marker on the last use.
1836 unsigned KillIdx = vrm.getKillPoint(li.reg);
1837 if (KillIdx) {
1838 MachineInstr *KillMI = getInstructionFromIndex(KillIdx);
1839 assert(KillMI && "Last use disappeared?");
1840 int KillOp = KillMI->findRegisterUseOperandIdx(li.reg, true);
1841 assert(KillOp != -1 && "Last use disappeared?");
Chris Lattnerf7382302007-12-30 21:56:09 +00001842 KillMI->getOperand(KillOp).setIsKill(false);
Evan Chengd120ffd2007-12-05 10:24:35 +00001843 }
Evan Chengadf85902007-12-05 09:51:10 +00001844 vrm.removeKillPoint(li.reg);
Evan Cheng81a03822007-11-17 00:40:40 +00001845 bool DefIsReMat = vrm.isReMaterialized(li.reg);
1846 Slot = vrm.getStackSlot(li.reg);
1847 assert(Slot != VirtRegMap::MAX_STACK_SLOT);
1848 MachineInstr *ReMatDefMI = DefIsReMat ?
1849 vrm.getReMaterializedMI(li.reg) : NULL;
1850 int LdSlot = 0;
1851 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
1852 bool isLoad = isLoadSS ||
Chris Lattner749c6f62008-01-07 07:27:27 +00001853 (DefIsReMat && (ReMatDefMI->getDesc().isSimpleLoad()));
Evan Cheng81a03822007-11-17 00:40:40 +00001854 bool IsFirstRange = true;
1855 for (LiveInterval::Ranges::const_iterator
1856 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1857 // If this is a split live interval with multiple ranges, it means there
1858 // are two-address instructions that re-defined the value. Only the
1859 // first def can be rematerialized!
1860 if (IsFirstRange) {
Evan Chengcb3c3302007-11-29 23:02:50 +00001861 // Note ReMatOrigDefMI has already been deleted.
Evan Cheng81a03822007-11-17 00:40:40 +00001862 rewriteInstructionsForSpills(li, false, I, NULL, ReMatDefMI,
1863 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chengd70dbb52008-02-22 09:24:50 +00001864 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001865 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001866 MBBVRegsMap, NewLIs, SSWeight);
Evan Cheng81a03822007-11-17 00:40:40 +00001867 } else {
1868 rewriteInstructionsForSpills(li, false, I, NULL, 0,
1869 Slot, 0, false, false, false,
Evan Chengd70dbb52008-02-22 09:24:50 +00001870 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001871 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001872 MBBVRegsMap, NewLIs, SSWeight);
Evan Cheng81a03822007-11-17 00:40:40 +00001873 }
1874 IsFirstRange = false;
1875 }
Evan Cheng419852c2008-04-03 16:39:43 +00001876
Evan Cheng9c3c2212008-06-06 07:54:39 +00001877 SSWeight = 0.0f; // Already accounted for when split.
Evan Cheng4cce6b42008-04-11 17:53:36 +00001878 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001879 return NewLIs;
1880 }
1881
1882 bool TrySplit = SplitAtBB && !intervalIsInOneMBB(li);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001883 if (SplitLimit != -1 && (int)numSplits >= SplitLimit)
1884 TrySplit = false;
1885 if (TrySplit)
1886 ++numSplits;
Evan Chengf2fbca62007-11-12 06:35:08 +00001887 bool NeedStackSlot = false;
1888 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
1889 i != e; ++i) {
1890 const VNInfo *VNI = *i;
1891 unsigned VN = VNI->id;
1892 unsigned DefIdx = VNI->def;
1893 if (DefIdx == ~1U)
1894 continue; // Dead val#.
1895 // Is the def for the val# rematerializable?
Evan Cheng81a03822007-11-17 00:40:40 +00001896 MachineInstr *ReMatDefMI = (DefIdx == ~0u)
1897 ? 0 : getInstructionFromIndex(DefIdx);
Evan Cheng5ef3a042007-12-06 00:01:56 +00001898 bool dummy;
1899 if (ReMatDefMI && isReMaterializable(li, VNI, ReMatDefMI, dummy)) {
Evan Chengf2fbca62007-11-12 06:35:08 +00001900 // Remember how to remat the def of this val#.
Evan Cheng81a03822007-11-17 00:40:40 +00001901 ReMatOrigDefs[VN] = ReMatDefMI;
Dan Gohman2c3f7ae2008-07-17 23:49:46 +00001902 // Original def may be modified so we have to make a copy here.
Evan Cheng1ed99222008-07-19 00:37:25 +00001903 MachineInstr *Clone = mf_->CloneMachineInstr(ReMatDefMI);
1904 ClonedMIs.push_back(Clone);
1905 ReMatDefs[VN] = Clone;
Evan Chengf2fbca62007-11-12 06:35:08 +00001906
1907 bool CanDelete = true;
Evan Chengc3fc7d92007-11-29 09:49:23 +00001908 if (VNI->hasPHIKill) {
1909 // A kill is a phi node, not all of its uses can be rematerialized.
Evan Chengf2fbca62007-11-12 06:35:08 +00001910 // It must not be deleted.
Evan Chengc3fc7d92007-11-29 09:49:23 +00001911 CanDelete = false;
1912 // Need a stack slot if there is any live range where uses cannot be
1913 // rematerialized.
1914 NeedStackSlot = true;
Evan Chengf2fbca62007-11-12 06:35:08 +00001915 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001916 if (CanDelete)
1917 ReMatDelete.set(VN);
1918 } else {
1919 // Need a stack slot if there is any live range where uses cannot be
1920 // rematerialized.
1921 NeedStackSlot = true;
1922 }
1923 }
1924
1925 // One stack slot per live interval.
Evan Cheng81a03822007-11-17 00:40:40 +00001926 if (NeedStackSlot && vrm.getPreSplitReg(li.reg) == 0)
Evan Chengf2fbca62007-11-12 06:35:08 +00001927 Slot = vrm.assignVirt2StackSlot(li.reg);
1928
1929 // Create new intervals and rewrite defs and uses.
1930 for (LiveInterval::Ranges::const_iterator
1931 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
Evan Cheng81a03822007-11-17 00:40:40 +00001932 MachineInstr *ReMatDefMI = ReMatDefs[I->valno->id];
1933 MachineInstr *ReMatOrigDefMI = ReMatOrigDefs[I->valno->id];
1934 bool DefIsReMat = ReMatDefMI != NULL;
Evan Chengf2fbca62007-11-12 06:35:08 +00001935 bool CanDelete = ReMatDelete[I->valno->id];
1936 int LdSlot = 0;
Evan Cheng81a03822007-11-17 00:40:40 +00001937 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
Evan Chengf2fbca62007-11-12 06:35:08 +00001938 bool isLoad = isLoadSS ||
Chris Lattner749c6f62008-01-07 07:27:27 +00001939 (DefIsReMat && ReMatDefMI->getDesc().isSimpleLoad());
Evan Cheng81a03822007-11-17 00:40:40 +00001940 rewriteInstructionsForSpills(li, TrySplit, I, ReMatOrigDefMI, ReMatDefMI,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001941 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chengd70dbb52008-02-22 09:24:50 +00001942 CanDelete, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001943 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001944 MBBVRegsMap, NewLIs, SSWeight);
Evan Chengf2fbca62007-11-12 06:35:08 +00001945 }
1946
Evan Cheng0cbb1162007-11-29 01:06:25 +00001947 // Insert spills / restores if we are splitting.
Evan Cheng419852c2008-04-03 16:39:43 +00001948 if (!TrySplit) {
Evan Cheng4cce6b42008-04-11 17:53:36 +00001949 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001950 return NewLIs;
Evan Cheng419852c2008-04-03 16:39:43 +00001951 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00001952
Evan Chengb50bb8c2007-12-05 08:16:32 +00001953 SmallPtrSet<LiveInterval*, 4> AddedKill;
Evan Chengaee4af62007-12-02 08:30:39 +00001954 SmallVector<unsigned, 2> Ops;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001955 if (NeedStackSlot) {
1956 int Id = SpillMBBs.find_first();
1957 while (Id != -1) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00001958 MachineBasicBlock *MBB = mf_->getBlockNumbered(Id);
1959 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001960 std::vector<SRInfo> &spills = SpillIdxes[Id];
1961 for (unsigned i = 0, e = spills.size(); i != e; ++i) {
1962 int index = spills[i].index;
1963 unsigned VReg = spills[i].vreg;
Evan Cheng597d10d2007-12-04 00:32:23 +00001964 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001965 bool isReMat = vrm.isReMaterialized(VReg);
1966 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengaee4af62007-12-02 08:30:39 +00001967 bool CanFold = false;
1968 bool FoundUse = false;
1969 Ops.clear();
Evan Chengcddbb832007-11-30 21:23:43 +00001970 if (spills[i].canFold) {
Evan Chengaee4af62007-12-02 08:30:39 +00001971 CanFold = true;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001972 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
1973 MachineOperand &MO = MI->getOperand(j);
1974 if (!MO.isRegister() || MO.getReg() != VReg)
1975 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001976
1977 Ops.push_back(j);
1978 if (MO.isDef())
Evan Chengcddbb832007-11-30 21:23:43 +00001979 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001980 if (isReMat ||
1981 (!FoundUse && !alsoFoldARestore(Id, index, VReg,
1982 RestoreMBBs, RestoreIdxes))) {
1983 // MI has two-address uses of the same register. If the use
1984 // isn't the first and only use in the BB, then we can't fold
1985 // it. FIXME: Move this to rewriteInstructionsForSpills.
1986 CanFold = false;
Evan Chengcddbb832007-11-30 21:23:43 +00001987 break;
1988 }
Evan Chengaee4af62007-12-02 08:30:39 +00001989 FoundUse = true;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001990 }
1991 }
1992 // Fold the store into the def if possible.
Evan Chengcddbb832007-11-30 21:23:43 +00001993 bool Folded = false;
Evan Chengaee4af62007-12-02 08:30:39 +00001994 if (CanFold && !Ops.empty()) {
1995 if (tryFoldMemoryOperand(MI, vrm, NULL, index, Ops, true, Slot,VReg)){
Evan Chengcddbb832007-11-30 21:23:43 +00001996 Folded = true;
Evan Chengf38d14f2007-12-05 09:05:34 +00001997 if (FoundUse > 0) {
Evan Chengaee4af62007-12-02 08:30:39 +00001998 // Also folded uses, do not issue a load.
1999 eraseRestoreInfo(Id, index, VReg, RestoreMBBs, RestoreIdxes);
Evan Chengf38d14f2007-12-05 09:05:34 +00002000 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
2001 }
Evan Cheng597d10d2007-12-04 00:32:23 +00002002 nI.removeRange(getDefIndex(index), getStoreIndex(index));
Evan Chengcddbb832007-11-30 21:23:43 +00002003 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002004 }
2005
Evan Cheng7e073ba2008-04-09 20:57:25 +00002006 // Otherwise tell the spiller to issue a spill.
Evan Chengb50bb8c2007-12-05 08:16:32 +00002007 if (!Folded) {
2008 LiveRange *LR = &nI.ranges[nI.ranges.size()-1];
2009 bool isKill = LR->end == getStoreIndex(index);
Evan Chengb0a6f622008-05-20 08:10:37 +00002010 if (!MI->registerDefIsDead(nI.reg))
2011 // No need to spill a dead def.
2012 vrm.addSpillPoint(VReg, isKill, MI);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002013 if (isKill)
2014 AddedKill.insert(&nI);
2015 }
Evan Cheng9c3c2212008-06-06 07:54:39 +00002016
2017 // Update spill slot weight.
2018 if (!isReMat)
Evan Chengc3417602008-06-21 06:45:54 +00002019 SSWeight += getSpillWeight(true, false, loopDepth);
Evan Cheng0cbb1162007-11-29 01:06:25 +00002020 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002021 Id = SpillMBBs.find_next(Id);
Evan Cheng0cbb1162007-11-29 01:06:25 +00002022 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002023 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002024
Evan Cheng1953d0c2007-11-29 10:12:14 +00002025 int Id = RestoreMBBs.find_first();
2026 while (Id != -1) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00002027 MachineBasicBlock *MBB = mf_->getBlockNumbered(Id);
2028 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
2029
Evan Cheng1953d0c2007-11-29 10:12:14 +00002030 std::vector<SRInfo> &restores = RestoreIdxes[Id];
2031 for (unsigned i = 0, e = restores.size(); i != e; ++i) {
2032 int index = restores[i].index;
2033 if (index == -1)
2034 continue;
2035 unsigned VReg = restores[i].vreg;
Evan Cheng597d10d2007-12-04 00:32:23 +00002036 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng9c3c2212008-06-06 07:54:39 +00002037 bool isReMat = vrm.isReMaterialized(VReg);
Evan Cheng81a03822007-11-17 00:40:40 +00002038 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengaee4af62007-12-02 08:30:39 +00002039 bool CanFold = false;
2040 Ops.clear();
Evan Chengcddbb832007-11-30 21:23:43 +00002041 if (restores[i].canFold) {
Evan Chengaee4af62007-12-02 08:30:39 +00002042 CanFold = true;
Evan Cheng81a03822007-11-17 00:40:40 +00002043 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
2044 MachineOperand &MO = MI->getOperand(j);
2045 if (!MO.isRegister() || MO.getReg() != VReg)
2046 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00002047
Evan Cheng0cbb1162007-11-29 01:06:25 +00002048 if (MO.isDef()) {
Evan Chengaee4af62007-12-02 08:30:39 +00002049 // If this restore were to be folded, it would have been folded
2050 // already.
2051 CanFold = false;
Evan Cheng81a03822007-11-17 00:40:40 +00002052 break;
2053 }
Evan Chengaee4af62007-12-02 08:30:39 +00002054 Ops.push_back(j);
Evan Cheng81a03822007-11-17 00:40:40 +00002055 }
2056 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002057
2058 // Fold the load into the use if possible.
Evan Chengcddbb832007-11-30 21:23:43 +00002059 bool Folded = false;
Evan Chengaee4af62007-12-02 08:30:39 +00002060 if (CanFold && !Ops.empty()) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00002061 if (!isReMat)
Evan Chengaee4af62007-12-02 08:30:39 +00002062 Folded = tryFoldMemoryOperand(MI, vrm, NULL,index,Ops,true,Slot,VReg);
2063 else {
Evan Cheng0cbb1162007-11-29 01:06:25 +00002064 MachineInstr *ReMatDefMI = vrm.getReMaterializedMI(VReg);
2065 int LdSlot = 0;
2066 bool isLoadSS = tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
2067 // If the rematerializable def is a load, also try to fold it.
Chris Lattner749c6f62008-01-07 07:27:27 +00002068 if (isLoadSS || ReMatDefMI->getDesc().isSimpleLoad())
Evan Chengaee4af62007-12-02 08:30:39 +00002069 Folded = tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
2070 Ops, isLoadSS, LdSlot, VReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00002071 unsigned ImpUse = getReMatImplicitUse(li, ReMatDefMI);
2072 if (ImpUse) {
2073 // Re-matting an instruction with virtual register use. Add the
2074 // register as an implicit use on the use MI and update the register
Evan Cheng24d2f8a2008-03-31 07:53:30 +00002075 // interval's spill weight to HUGE_VALF to prevent it from being
2076 // spilled.
Evan Chengd70dbb52008-02-22 09:24:50 +00002077 LiveInterval &ImpLi = getInterval(ImpUse);
Evan Cheng24d2f8a2008-03-31 07:53:30 +00002078 ImpLi.weight = HUGE_VALF;
Evan Chengd70dbb52008-02-22 09:24:50 +00002079 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
2080 }
Evan Chengaee4af62007-12-02 08:30:39 +00002081 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002082 }
2083 // If folding is not possible / failed, then tell the spiller to issue a
2084 // load / rematerialization for us.
Evan Cheng597d10d2007-12-04 00:32:23 +00002085 if (Folded)
2086 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002087 else
Evan Cheng0cbb1162007-11-29 01:06:25 +00002088 vrm.addRestorePoint(VReg, MI);
Evan Cheng9c3c2212008-06-06 07:54:39 +00002089
2090 // Update spill slot weight.
2091 if (!isReMat)
Evan Chengc3417602008-06-21 06:45:54 +00002092 SSWeight += getSpillWeight(false, true, loopDepth);
Evan Cheng81a03822007-11-17 00:40:40 +00002093 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002094 Id = RestoreMBBs.find_next(Id);
Evan Cheng81a03822007-11-17 00:40:40 +00002095 }
2096
Evan Chengb50bb8c2007-12-05 08:16:32 +00002097 // Finalize intervals: add kills, finalize spill weights, and filter out
2098 // dead intervals.
Evan Cheng597d10d2007-12-04 00:32:23 +00002099 std::vector<LiveInterval*> RetNewLIs;
2100 for (unsigned i = 0, e = NewLIs.size(); i != e; ++i) {
2101 LiveInterval *LI = NewLIs[i];
2102 if (!LI->empty()) {
Owen Anderson496bac52008-07-23 19:47:27 +00002103 LI->weight /= InstrSlots::NUM * getApproximateInstructionCount(*LI);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002104 if (!AddedKill.count(LI)) {
2105 LiveRange *LR = &LI->ranges[LI->ranges.size()-1];
Evan Chengd120ffd2007-12-05 10:24:35 +00002106 unsigned LastUseIdx = getBaseIndex(LR->end);
2107 MachineInstr *LastUse = getInstructionFromIndex(LastUseIdx);
Evan Cheng6130f662008-03-05 00:59:57 +00002108 int UseIdx = LastUse->findRegisterUseOperandIdx(LI->reg, false);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002109 assert(UseIdx != -1);
Evan Chengd70dbb52008-02-22 09:24:50 +00002110 if (LastUse->getOperand(UseIdx).isImplicit() ||
2111 LastUse->getDesc().getOperandConstraint(UseIdx,TOI::TIED_TO) == -1){
Evan Chengb50bb8c2007-12-05 08:16:32 +00002112 LastUse->getOperand(UseIdx).setIsKill();
Evan Chengd120ffd2007-12-05 10:24:35 +00002113 vrm.addKillPoint(LI->reg, LastUseIdx);
Evan Chengadf85902007-12-05 09:51:10 +00002114 }
Evan Chengb50bb8c2007-12-05 08:16:32 +00002115 }
Evan Cheng597d10d2007-12-04 00:32:23 +00002116 RetNewLIs.push_back(LI);
2117 }
2118 }
Evan Cheng81a03822007-11-17 00:40:40 +00002119
Evan Cheng4cce6b42008-04-11 17:53:36 +00002120 handleSpilledImpDefs(li, vrm, rc, RetNewLIs);
Evan Cheng597d10d2007-12-04 00:32:23 +00002121 return RetNewLIs;
Evan Chengf2fbca62007-11-12 06:35:08 +00002122}
Evan Cheng676dd7c2008-03-11 07:19:34 +00002123
2124/// hasAllocatableSuperReg - Return true if the specified physical register has
2125/// any super register that's allocatable.
2126bool LiveIntervals::hasAllocatableSuperReg(unsigned Reg) const {
2127 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS)
2128 if (allocatableRegs_[*AS] && hasInterval(*AS))
2129 return true;
2130 return false;
2131}
2132
2133/// getRepresentativeReg - Find the largest super register of the specified
2134/// physical register.
2135unsigned LiveIntervals::getRepresentativeReg(unsigned Reg) const {
2136 // Find the largest super-register that is allocatable.
2137 unsigned BestReg = Reg;
2138 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS) {
2139 unsigned SuperReg = *AS;
2140 if (!hasAllocatableSuperReg(SuperReg) && hasInterval(SuperReg)) {
2141 BestReg = SuperReg;
2142 break;
2143 }
2144 }
2145 return BestReg;
2146}
2147
2148/// getNumConflictsWithPhysReg - Return the number of uses and defs of the
2149/// specified interval that conflicts with the specified physical register.
2150unsigned LiveIntervals::getNumConflictsWithPhysReg(const LiveInterval &li,
2151 unsigned PhysReg) const {
2152 unsigned NumConflicts = 0;
2153 const LiveInterval &pli = getInterval(getRepresentativeReg(PhysReg));
2154 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
2155 E = mri_->reg_end(); I != E; ++I) {
2156 MachineOperand &O = I.getOperand();
2157 MachineInstr *MI = O.getParent();
2158 unsigned Index = getInstructionIndex(MI);
2159 if (pli.liveAt(Index))
2160 ++NumConflicts;
2161 }
2162 return NumConflicts;
2163}
2164
2165/// spillPhysRegAroundRegDefsUses - Spill the specified physical register
2166/// around all defs and uses of the specified interval.
2167void LiveIntervals::spillPhysRegAroundRegDefsUses(const LiveInterval &li,
2168 unsigned PhysReg, VirtRegMap &vrm) {
2169 unsigned SpillReg = getRepresentativeReg(PhysReg);
2170
2171 for (const unsigned *AS = tri_->getAliasSet(PhysReg); *AS; ++AS)
2172 // If there are registers which alias PhysReg, but which are not a
2173 // sub-register of the chosen representative super register. Assert
2174 // since we can't handle it yet.
2175 assert(*AS == SpillReg || !allocatableRegs_[*AS] ||
2176 tri_->isSuperRegister(*AS, SpillReg));
2177
2178 LiveInterval &pli = getInterval(SpillReg);
2179 SmallPtrSet<MachineInstr*, 8> SeenMIs;
2180 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
2181 E = mri_->reg_end(); I != E; ++I) {
2182 MachineOperand &O = I.getOperand();
2183 MachineInstr *MI = O.getParent();
2184 if (SeenMIs.count(MI))
2185 continue;
2186 SeenMIs.insert(MI);
2187 unsigned Index = getInstructionIndex(MI);
2188 if (pli.liveAt(Index)) {
2189 vrm.addEmergencySpill(SpillReg, MI);
2190 pli.removeRange(getLoadIndex(Index), getStoreIndex(Index)+1);
2191 for (const unsigned* AS = tri_->getSubRegisters(SpillReg); *AS; ++AS) {
2192 if (!hasInterval(*AS))
2193 continue;
2194 LiveInterval &spli = getInterval(*AS);
2195 if (spli.liveAt(Index))
2196 spli.removeRange(getLoadIndex(Index), getStoreIndex(Index)+1);
2197 }
2198 }
2199 }
2200}
Owen Andersonc4dc1322008-06-05 17:15:43 +00002201
2202LiveRange LiveIntervals::addLiveRangeToEndOfBlock(unsigned reg,
2203 MachineInstr* startInst) {
2204 LiveInterval& Interval = getOrCreateInterval(reg);
2205 VNInfo* VN = Interval.getNextValue(
2206 getInstructionIndex(startInst) + InstrSlots::DEF,
2207 startInst, getVNInfoAllocator());
2208 VN->hasPHIKill = true;
2209 VN->kills.push_back(getMBBEndIdx(startInst->getParent()));
2210 LiveRange LR(getInstructionIndex(startInst) + InstrSlots::DEF,
2211 getMBBEndIdx(startInst->getParent()) + 1, VN);
2212 Interval.addRange(LR);
2213
2214 return LR;
2215}