blob: d6931df896dc4c05d63c4514114ee9e878175f13 [file] [log] [blame]
Chris Lattnera3b8b5c2004-07-23 17:56:30 +00001//===-- LiveIntervalAnalysis.cpp - Live Interval Analysis -----------------===//
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the LiveInterval analysis pass which is used
11// by the Linear Scan Register allocator. This pass linearizes the
12// basic blocks of the function in DFS order and uses the
13// LiveVariables pass to conservatively compute live intervals for
14// each virtual and physical register.
15//
16//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "liveintervals"
Chris Lattner3c3fe462005-09-21 04:19:09 +000019#include "llvm/CodeGen/LiveIntervalAnalysis.h"
Misha Brukman08a6c762004-09-03 18:25:53 +000020#include "VirtRegMap.h"
Chris Lattner015959e2004-05-01 21:24:39 +000021#include "llvm/Value.h"
Dan Gohman6d69ba82008-07-25 00:02:30 +000022#include "llvm/Analysis/AliasAnalysis.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000023#include "llvm/CodeGen/LiveVariables.h"
24#include "llvm/CodeGen/MachineFrameInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000025#include "llvm/CodeGen/MachineInstr.h"
Evan Cheng22f07ff2007-12-11 02:09:15 +000026#include "llvm/CodeGen/MachineLoopInfo.h"
Chris Lattner84bc5422007-12-31 04:13:23 +000027#include "llvm/CodeGen/MachineRegisterInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000028#include "llvm/CodeGen/Passes.h"
Dan Gohman6d69ba82008-07-25 00:02:30 +000029#include "llvm/CodeGen/PseudoSourceValue.h"
Dan Gohman6f0d0242008-02-10 18:45:23 +000030#include "llvm/Target/TargetRegisterInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000031#include "llvm/Target/TargetInstrInfo.h"
32#include "llvm/Target/TargetMachine.h"
Owen Anderson95dad832008-10-07 20:22:28 +000033#include "llvm/Target/TargetOptions.h"
Reid Spencer551ccae2004-09-01 22:55:40 +000034#include "llvm/Support/CommandLine.h"
35#include "llvm/Support/Debug.h"
36#include "llvm/ADT/Statistic.h"
37#include "llvm/ADT/STLExtras.h"
Alkis Evlogimenos20aa4742004-09-03 18:19:51 +000038#include <algorithm>
Lang Hamesf41538d2009-06-02 16:53:25 +000039#include <limits>
Jeff Cohen97af7512006-12-02 02:22:01 +000040#include <cmath>
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000041using namespace llvm;
42
Dan Gohman844731a2008-05-13 00:00:25 +000043// Hidden options for help debugging.
44static cl::opt<bool> DisableReMat("disable-rematerialization",
45 cl::init(false), cl::Hidden);
Evan Cheng81a03822007-11-17 00:40:40 +000046
Dan Gohman844731a2008-05-13 00:00:25 +000047static cl::opt<bool> SplitAtBB("split-intervals-at-bb",
48 cl::init(true), cl::Hidden);
49static cl::opt<int> SplitLimit("split-limit",
50 cl::init(-1), cl::Hidden);
Evan Chengbc165e42007-08-16 07:24:22 +000051
Dan Gohman4c8f8702008-07-25 15:08:37 +000052static cl::opt<bool> EnableAggressiveRemat("aggressive-remat", cl::Hidden);
53
Owen Andersonae339ba2008-08-19 00:17:30 +000054static cl::opt<bool> EnableFastSpilling("fast-spill",
55 cl::init(false), cl::Hidden);
56
Chris Lattnercd3245a2006-12-19 22:41:21 +000057STATISTIC(numIntervals, "Number of original intervals");
Evan Cheng0cbb1162007-11-29 01:06:25 +000058STATISTIC(numFolds , "Number of loads/stores folded into instructions");
59STATISTIC(numSplits , "Number of intervals split");
Chris Lattnercd3245a2006-12-19 22:41:21 +000060
Devang Patel19974732007-05-03 01:11:54 +000061char LiveIntervals::ID = 0;
Dan Gohman844731a2008-05-13 00:00:25 +000062static RegisterPass<LiveIntervals> X("liveintervals", "Live Interval Analysis");
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000063
Chris Lattnerf7da2c72006-08-24 22:43:55 +000064void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
Dan Gohman6d69ba82008-07-25 00:02:30 +000065 AU.addRequired<AliasAnalysis>();
66 AU.addPreserved<AliasAnalysis>();
David Greene25133302007-06-08 17:18:56 +000067 AU.addPreserved<LiveVariables>();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000068 AU.addRequired<LiveVariables>();
Bill Wendling67d65bb2008-01-04 20:54:55 +000069 AU.addPreservedID(MachineLoopInfoID);
70 AU.addPreservedID(MachineDominatorsID);
Owen Anderson95dad832008-10-07 20:22:28 +000071
72 if (!StrongPHIElim) {
73 AU.addPreservedID(PHIEliminationID);
74 AU.addRequiredID(PHIEliminationID);
75 }
76
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000077 AU.addRequiredID(TwoAddressInstructionPassID);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000078 MachineFunctionPass::getAnalysisUsage(AU);
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000079}
80
Chris Lattnerf7da2c72006-08-24 22:43:55 +000081void LiveIntervals::releaseMemory() {
Owen Anderson03857b22008-08-13 21:49:13 +000082 // Free the live intervals themselves.
Owen Anderson20e28392008-08-13 22:08:30 +000083 for (DenseMap<unsigned, LiveInterval*>::iterator I = r2iMap_.begin(),
Owen Anderson03857b22008-08-13 21:49:13 +000084 E = r2iMap_.end(); I != E; ++I)
85 delete I->second;
86
Evan Cheng3f32d652008-06-04 09:18:41 +000087 MBB2IdxMap.clear();
Evan Cheng4ca980e2007-10-17 02:10:22 +000088 Idx2MBBMap.clear();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000089 mi2iMap_.clear();
90 i2miMap_.clear();
91 r2iMap_.clear();
Evan Chengdd199d22007-09-06 01:07:24 +000092 // Release VNInfo memroy regions after all VNInfo objects are dtor'd.
93 VNInfoAllocator.Reset();
Evan Cheng1ed99222008-07-19 00:37:25 +000094 while (!ClonedMIs.empty()) {
95 MachineInstr *MI = ClonedMIs.back();
96 ClonedMIs.pop_back();
97 mf_->DeleteMachineInstr(MI);
98 }
Alkis Evlogimenos08cec002004-01-31 19:59:32 +000099}
100
Owen Anderson80b3ce62008-05-28 20:54:50 +0000101void LiveIntervals::computeNumbering() {
102 Index2MiMap OldI2MI = i2miMap_;
Owen Anderson7fbad272008-07-23 21:37:49 +0000103 std::vector<IdxMBBPair> OldI2MBB = Idx2MBBMap;
Owen Anderson80b3ce62008-05-28 20:54:50 +0000104
105 Idx2MBBMap.clear();
106 MBB2IdxMap.clear();
107 mi2iMap_.clear();
108 i2miMap_.clear();
109
Owen Andersona1566f22008-07-22 22:46:49 +0000110 FunctionSize = 0;
111
Chris Lattner428b92e2006-09-15 03:57:23 +0000112 // Number MachineInstrs and MachineBasicBlocks.
113 // Initialize MBB indexes to a sentinal.
Evan Cheng549f27d32007-08-13 23:45:17 +0000114 MBB2IdxMap.resize(mf_->getNumBlockIDs(), std::make_pair(~0U,~0U));
Chris Lattner428b92e2006-09-15 03:57:23 +0000115
116 unsigned MIIndex = 0;
117 for (MachineFunction::iterator MBB = mf_->begin(), E = mf_->end();
118 MBB != E; ++MBB) {
Evan Cheng549f27d32007-08-13 23:45:17 +0000119 unsigned StartIdx = MIIndex;
Evan Cheng0c9f92e2007-02-13 01:30:55 +0000120
Owen Anderson7fbad272008-07-23 21:37:49 +0000121 // Insert an empty slot at the beginning of each block.
122 MIIndex += InstrSlots::NUM;
123 i2miMap_.push_back(0);
124
Chris Lattner428b92e2006-09-15 03:57:23 +0000125 for (MachineBasicBlock::iterator I = MBB->begin(), E = MBB->end();
126 I != E; ++I) {
127 bool inserted = mi2iMap_.insert(std::make_pair(I, MIIndex)).second;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000128 assert(inserted && "multiple MachineInstr -> index mappings");
Devang Patel59500c82008-11-21 20:00:59 +0000129 inserted = true;
Chris Lattner428b92e2006-09-15 03:57:23 +0000130 i2miMap_.push_back(I);
131 MIIndex += InstrSlots::NUM;
Owen Andersona1566f22008-07-22 22:46:49 +0000132 FunctionSize++;
Owen Anderson7fbad272008-07-23 21:37:49 +0000133
Evan Cheng4ed43292008-10-18 05:21:37 +0000134 // Insert max(1, numdefs) empty slots after every instruction.
Evan Cheng99fe34b2008-10-18 05:18:55 +0000135 unsigned Slots = I->getDesc().getNumDefs();
136 if (Slots == 0)
137 Slots = 1;
138 MIIndex += InstrSlots::NUM * Slots;
139 while (Slots--)
140 i2miMap_.push_back(0);
Owen Anderson35578012008-06-16 07:10:49 +0000141 }
Owen Anderson7fbad272008-07-23 21:37:49 +0000142
Owen Anderson1fbb4542008-06-16 16:58:24 +0000143 // Set the MBB2IdxMap entry for this MBB.
144 MBB2IdxMap[MBB->getNumber()] = std::make_pair(StartIdx, MIIndex - 1);
145 Idx2MBBMap.push_back(std::make_pair(StartIdx, MBB));
Chris Lattner428b92e2006-09-15 03:57:23 +0000146 }
Evan Cheng4ca980e2007-10-17 02:10:22 +0000147 std::sort(Idx2MBBMap.begin(), Idx2MBBMap.end(), Idx2MBBCompare());
Owen Anderson80b3ce62008-05-28 20:54:50 +0000148
149 if (!OldI2MI.empty())
Owen Anderson788d0412008-08-06 18:35:45 +0000150 for (iterator OI = begin(), OE = end(); OI != OE; ++OI) {
Owen Anderson03857b22008-08-13 21:49:13 +0000151 for (LiveInterval::iterator LI = OI->second->begin(),
152 LE = OI->second->end(); LI != LE; ++LI) {
Owen Anderson4b5b2092008-05-29 18:15:49 +0000153
Owen Anderson7eec0c22008-05-29 23:01:22 +0000154 // Remap the start index of the live range to the corresponding new
155 // number, or our best guess at what it _should_ correspond to if the
156 // original instruction has been erased. This is either the following
157 // instruction or its predecessor.
Owen Anderson7fbad272008-07-23 21:37:49 +0000158 unsigned index = LI->start / InstrSlots::NUM;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000159 unsigned offset = LI->start % InstrSlots::NUM;
Owen Anderson0a7615a2008-07-25 23:06:59 +0000160 if (offset == InstrSlots::LOAD) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000161 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000162 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->start);
Owen Anderson7fbad272008-07-23 21:37:49 +0000163 // Take the pair containing the index
164 std::vector<IdxMBBPair>::const_iterator J =
Owen Andersona0c032f2008-07-29 21:15:44 +0000165 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000166
Owen Anderson7fbad272008-07-23 21:37:49 +0000167 LI->start = getMBBStartIdx(J->second);
168 } else {
169 LI->start = mi2iMap_[OldI2MI[index]] + offset;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000170 }
171
172 // Remap the ending index in the same way that we remapped the start,
173 // except for the final step where we always map to the immediately
174 // following instruction.
Owen Andersond7dcbec2008-07-25 19:50:48 +0000175 index = (LI->end - 1) / InstrSlots::NUM;
Owen Anderson7fbad272008-07-23 21:37:49 +0000176 offset = LI->end % InstrSlots::NUM;
Owen Anderson9382b932008-07-30 00:22:56 +0000177 if (offset == InstrSlots::LOAD) {
178 // VReg dies at end of block.
Owen Anderson7fbad272008-07-23 21:37:49 +0000179 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000180 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->end);
Owen Anderson9382b932008-07-30 00:22:56 +0000181 --I;
Owen Anderson7fbad272008-07-23 21:37:49 +0000182
Owen Anderson9382b932008-07-30 00:22:56 +0000183 LI->end = getMBBEndIdx(I->second) + 1;
Owen Anderson4b5b2092008-05-29 18:15:49 +0000184 } else {
Owen Andersond7dcbec2008-07-25 19:50:48 +0000185 unsigned idx = index;
Owen Anderson8d0cc0a2008-07-25 21:07:13 +0000186 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
187
188 if (index != OldI2MI.size())
189 LI->end = mi2iMap_[OldI2MI[index]] + (idx == index ? offset : 0);
190 else
191 LI->end = InstrSlots::NUM * i2miMap_.size();
Owen Anderson4b5b2092008-05-29 18:15:49 +0000192 }
Owen Anderson788d0412008-08-06 18:35:45 +0000193 }
194
Owen Anderson03857b22008-08-13 21:49:13 +0000195 for (LiveInterval::vni_iterator VNI = OI->second->vni_begin(),
196 VNE = OI->second->vni_end(); VNI != VNE; ++VNI) {
Owen Anderson788d0412008-08-06 18:35:45 +0000197 VNInfo* vni = *VNI;
Owen Anderson745825f42008-05-28 22:40:08 +0000198
Owen Anderson7eec0c22008-05-29 23:01:22 +0000199 // Remap the VNInfo def index, which works the same as the
Owen Anderson788d0412008-08-06 18:35:45 +0000200 // start indices above. VN's with special sentinel defs
201 // don't need to be remapped.
Lang Hames857c4e02009-06-17 21:01:20 +0000202 if (vni->isDefAccurate() && !vni->isUnused()) {
Owen Anderson788d0412008-08-06 18:35:45 +0000203 unsigned index = vni->def / InstrSlots::NUM;
204 unsigned offset = vni->def % InstrSlots::NUM;
Owen Anderson91292392008-07-30 17:42:47 +0000205 if (offset == InstrSlots::LOAD) {
206 std::vector<IdxMBBPair>::const_iterator I =
Owen Anderson0a7615a2008-07-25 23:06:59 +0000207 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->def);
Owen Anderson91292392008-07-30 17:42:47 +0000208 // Take the pair containing the index
209 std::vector<IdxMBBPair>::const_iterator J =
Owen Andersona0c032f2008-07-29 21:15:44 +0000210 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000211
Owen Anderson91292392008-07-30 17:42:47 +0000212 vni->def = getMBBStartIdx(J->second);
213 } else {
214 vni->def = mi2iMap_[OldI2MI[index]] + offset;
215 }
Owen Anderson7eec0c22008-05-29 23:01:22 +0000216 }
Owen Anderson745825f42008-05-28 22:40:08 +0000217
Owen Anderson7eec0c22008-05-29 23:01:22 +0000218 // Remap the VNInfo kill indices, which works the same as
219 // the end indices above.
Owen Anderson4b5b2092008-05-29 18:15:49 +0000220 for (size_t i = 0; i < vni->kills.size(); ++i) {
Owen Anderson9382b932008-07-30 00:22:56 +0000221 // PHI kills don't need to be remapped.
222 if (!vni->kills[i]) continue;
223
Owen Anderson788d0412008-08-06 18:35:45 +0000224 unsigned index = (vni->kills[i]-1) / InstrSlots::NUM;
225 unsigned offset = vni->kills[i] % InstrSlots::NUM;
Owen Anderson309c6162008-09-30 22:51:54 +0000226 if (offset == InstrSlots::LOAD) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000227 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000228 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->kills[i]);
Owen Anderson9382b932008-07-30 00:22:56 +0000229 --I;
Owen Anderson7fbad272008-07-23 21:37:49 +0000230
Owen Anderson788d0412008-08-06 18:35:45 +0000231 vni->kills[i] = getMBBEndIdx(I->second);
Owen Anderson7fbad272008-07-23 21:37:49 +0000232 } else {
Owen Andersond7dcbec2008-07-25 19:50:48 +0000233 unsigned idx = index;
Owen Anderson8d0cc0a2008-07-25 21:07:13 +0000234 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
235
236 if (index != OldI2MI.size())
237 vni->kills[i] = mi2iMap_[OldI2MI[index]] +
238 (idx == index ? offset : 0);
239 else
240 vni->kills[i] = InstrSlots::NUM * i2miMap_.size();
Owen Anderson7eec0c22008-05-29 23:01:22 +0000241 }
Owen Anderson4b5b2092008-05-29 18:15:49 +0000242 }
Owen Anderson80b3ce62008-05-28 20:54:50 +0000243 }
Owen Anderson788d0412008-08-06 18:35:45 +0000244 }
Owen Anderson80b3ce62008-05-28 20:54:50 +0000245}
Alkis Evlogimenosd6e40a62004-01-14 10:44:29 +0000246
Lang Hamesf41538d2009-06-02 16:53:25 +0000247void LiveIntervals::scaleNumbering(int factor) {
248 // Need to
249 // * scale MBB begin and end points
250 // * scale all ranges.
251 // * Update VNI structures.
252 // * Scale instruction numberings
253
254 // Scale the MBB indices.
255 Idx2MBBMap.clear();
256 for (MachineFunction::iterator MBB = mf_->begin(), MBBE = mf_->end();
257 MBB != MBBE; ++MBB) {
258 std::pair<unsigned, unsigned> &mbbIndices = MBB2IdxMap[MBB->getNumber()];
259 mbbIndices.first = InstrSlots::scale(mbbIndices.first, factor);
260 mbbIndices.second = InstrSlots::scale(mbbIndices.second, factor);
261 Idx2MBBMap.push_back(std::make_pair(mbbIndices.first, MBB));
262 }
263 std::sort(Idx2MBBMap.begin(), Idx2MBBMap.end(), Idx2MBBCompare());
264
265 // Scale the intervals.
266 for (iterator LI = begin(), LE = end(); LI != LE; ++LI) {
267 LI->second->scaleNumbering(factor);
268 }
269
270 // Scale MachineInstrs.
271 Mi2IndexMap oldmi2iMap = mi2iMap_;
272 unsigned highestSlot = 0;
273 for (Mi2IndexMap::iterator MI = oldmi2iMap.begin(), ME = oldmi2iMap.end();
274 MI != ME; ++MI) {
275 unsigned newSlot = InstrSlots::scale(MI->second, factor);
276 mi2iMap_[MI->first] = newSlot;
277 highestSlot = std::max(highestSlot, newSlot);
278 }
279
280 i2miMap_.clear();
281 i2miMap_.resize(highestSlot + 1);
282 for (Mi2IndexMap::iterator MI = mi2iMap_.begin(), ME = mi2iMap_.end();
283 MI != ME; ++MI) {
284 i2miMap_[MI->second] = MI->first;
285 }
286
287}
288
289
Owen Anderson80b3ce62008-05-28 20:54:50 +0000290/// runOnMachineFunction - Register allocate the whole function
291///
292bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
293 mf_ = &fn;
294 mri_ = &mf_->getRegInfo();
295 tm_ = &fn.getTarget();
296 tri_ = tm_->getRegisterInfo();
297 tii_ = tm_->getInstrInfo();
Dan Gohman6d69ba82008-07-25 00:02:30 +0000298 aa_ = &getAnalysis<AliasAnalysis>();
Owen Anderson80b3ce62008-05-28 20:54:50 +0000299 lv_ = &getAnalysis<LiveVariables>();
300 allocatableRegs_ = tri_->getAllocatableSet(fn);
301
302 computeNumbering();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000303 computeIntervals();
Alkis Evlogimenos843b1602004-02-15 10:24:21 +0000304
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000305 numIntervals += getNumIntervals();
306
Chris Lattner70ca3582004-09-30 15:59:17 +0000307 DEBUG(dump());
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000308 return true;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000309}
310
Chris Lattner70ca3582004-09-30 15:59:17 +0000311/// print - Implement the dump method.
Reid Spencerce9653c2004-12-07 04:03:45 +0000312void LiveIntervals::print(std::ostream &O, const Module* ) const {
Chris Lattner70ca3582004-09-30 15:59:17 +0000313 O << "********** INTERVALS **********\n";
Chris Lattner8e7a7092005-07-27 23:03:38 +0000314 for (const_iterator I = begin(), E = end(); I != E; ++I) {
Owen Anderson03857b22008-08-13 21:49:13 +0000315 I->second->print(O, tri_);
Evan Cheng3f32d652008-06-04 09:18:41 +0000316 O << "\n";
Chris Lattner8e7a7092005-07-27 23:03:38 +0000317 }
Chris Lattner70ca3582004-09-30 15:59:17 +0000318
319 O << "********** MACHINEINSTRS **********\n";
320 for (MachineFunction::iterator mbbi = mf_->begin(), mbbe = mf_->end();
321 mbbi != mbbe; ++mbbi) {
322 O << ((Value*)mbbi->getBasicBlock())->getName() << ":\n";
323 for (MachineBasicBlock::iterator mii = mbbi->begin(),
324 mie = mbbi->end(); mii != mie; ++mii) {
Chris Lattner477e4552004-09-30 16:10:45 +0000325 O << getInstructionIndex(mii) << '\t' << *mii;
Chris Lattner70ca3582004-09-30 15:59:17 +0000326 }
327 }
328}
329
Evan Chengc92da382007-11-03 07:20:12 +0000330/// conflictsWithPhysRegDef - Returns true if the specified register
331/// is defined during the duration of the specified interval.
332bool LiveIntervals::conflictsWithPhysRegDef(const LiveInterval &li,
333 VirtRegMap &vrm, unsigned reg) {
334 for (LiveInterval::Ranges::const_iterator
335 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
336 for (unsigned index = getBaseIndex(I->start),
337 end = getBaseIndex(I->end-1) + InstrSlots::NUM; index != end;
338 index += InstrSlots::NUM) {
339 // skip deleted instructions
340 while (index != end && !getInstructionFromIndex(index))
341 index += InstrSlots::NUM;
342 if (index == end) break;
343
344 MachineInstr *MI = getInstructionFromIndex(index);
Evan Cheng04ee5a12009-01-20 19:12:24 +0000345 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg;
346 if (tii_->isMoveInstr(*MI, SrcReg, DstReg, SrcSubReg, DstSubReg))
Evan Cheng5d446262007-11-15 08:13:29 +0000347 if (SrcReg == li.reg || DstReg == li.reg)
348 continue;
Evan Chengc92da382007-11-03 07:20:12 +0000349 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
350 MachineOperand& mop = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +0000351 if (!mop.isReg())
Evan Chengc92da382007-11-03 07:20:12 +0000352 continue;
353 unsigned PhysReg = mop.getReg();
Evan Cheng5d446262007-11-15 08:13:29 +0000354 if (PhysReg == 0 || PhysReg == li.reg)
Evan Chengc92da382007-11-03 07:20:12 +0000355 continue;
Dan Gohman6f0d0242008-02-10 18:45:23 +0000356 if (TargetRegisterInfo::isVirtualRegister(PhysReg)) {
Evan Cheng5d446262007-11-15 08:13:29 +0000357 if (!vrm.hasPhys(PhysReg))
358 continue;
Evan Chengc92da382007-11-03 07:20:12 +0000359 PhysReg = vrm.getPhys(PhysReg);
Evan Cheng5d446262007-11-15 08:13:29 +0000360 }
Dan Gohman6f0d0242008-02-10 18:45:23 +0000361 if (PhysReg && tri_->regsOverlap(PhysReg, reg))
Evan Chengc92da382007-11-03 07:20:12 +0000362 return true;
363 }
364 }
365 }
366
367 return false;
368}
369
Evan Cheng8f90b6e2009-01-07 02:08:57 +0000370/// conflictsWithPhysRegRef - Similar to conflictsWithPhysRegRef except
371/// it can check use as well.
372bool LiveIntervals::conflictsWithPhysRegRef(LiveInterval &li,
373 unsigned Reg, bool CheckUse,
374 SmallPtrSet<MachineInstr*,32> &JoinedCopies) {
375 for (LiveInterval::Ranges::const_iterator
376 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
377 for (unsigned index = getBaseIndex(I->start),
378 end = getBaseIndex(I->end-1) + InstrSlots::NUM; index != end;
379 index += InstrSlots::NUM) {
380 // Skip deleted instructions.
381 MachineInstr *MI = 0;
382 while (index != end) {
383 MI = getInstructionFromIndex(index);
384 if (MI)
385 break;
386 index += InstrSlots::NUM;
387 }
388 if (index == end) break;
389
390 if (JoinedCopies.count(MI))
391 continue;
392 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
393 MachineOperand& MO = MI->getOperand(i);
394 if (!MO.isReg())
395 continue;
396 if (MO.isUse() && !CheckUse)
397 continue;
398 unsigned PhysReg = MO.getReg();
399 if (PhysReg == 0 || TargetRegisterInfo::isVirtualRegister(PhysReg))
400 continue;
401 if (tri_->isSubRegister(Reg, PhysReg))
402 return true;
403 }
404 }
405 }
406
407 return false;
408}
409
410
Evan Cheng549f27d32007-08-13 23:45:17 +0000411void LiveIntervals::printRegName(unsigned reg) const {
Dan Gohman6f0d0242008-02-10 18:45:23 +0000412 if (TargetRegisterInfo::isPhysicalRegister(reg))
Bill Wendlinge6d088a2008-02-26 21:47:57 +0000413 cerr << tri_->getName(reg);
Evan Cheng549f27d32007-08-13 23:45:17 +0000414 else
415 cerr << "%reg" << reg;
416}
417
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000418void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000419 MachineBasicBlock::iterator mi,
Owen Anderson6b098de2008-06-25 23:39:39 +0000420 unsigned MIIdx, MachineOperand& MO,
Evan Chengef0732d2008-07-10 07:35:43 +0000421 unsigned MOIdx,
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000422 LiveInterval &interval) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000423 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000424 LiveVariables::VarInfo& vi = lv_->getVarInfo(interval.reg);
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000425
Evan Cheng419852c2008-04-03 16:39:43 +0000426 if (mi->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
427 DOUT << "is a implicit_def\n";
428 return;
429 }
430
Alkis Evlogimenos70651572004-08-04 09:46:56 +0000431 // Virtual registers may be defined multiple times (due to phi
432 // elimination and 2-addr elimination). Much of what we do only has to be
433 // done once for the vreg. We use an empty interval to detect the first
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000434 // time we see a vreg.
435 if (interval.empty()) {
436 // Get the Idx of the defining instructions.
Chris Lattner6b128bd2006-09-03 08:07:11 +0000437 unsigned defIndex = getDefIndex(MIIdx);
Dale Johannesen86b49f82008-09-24 01:07:17 +0000438 // Earlyclobbers move back one.
439 if (MO.isEarlyClobber())
440 defIndex = getUseIndex(MIIdx);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000441 VNInfo *ValNo;
Evan Chengc8d044e2008-02-15 18:24:29 +0000442 MachineInstr *CopyMI = NULL;
Evan Cheng04ee5a12009-01-20 19:12:24 +0000443 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000444 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000445 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Dan Gohman97121ba2009-04-08 00:15:30 +0000446 mi->getOpcode() == TargetInstrInfo::SUBREG_TO_REG ||
Evan Cheng04ee5a12009-01-20 19:12:24 +0000447 tii_->isMoveInstr(*mi, SrcReg, DstReg, SrcSubReg, DstSubReg))
Evan Chengc8d044e2008-02-15 18:24:29 +0000448 CopyMI = mi;
Evan Cheng5379f412008-12-19 20:58:01 +0000449 // Earlyclobbers move back one.
Lang Hames857c4e02009-06-17 21:01:20 +0000450 ValNo = interval.getNextValue(defIndex, CopyMI, true, VNInfoAllocator);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000451
452 assert(ValNo->id == 0 && "First value in interval is not 0?");
Chris Lattner7ac2d312004-07-24 02:59:07 +0000453
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000454 // Loop over all of the blocks that the vreg is defined in. There are
455 // two cases we have to handle here. The most common case is a vreg
456 // whose lifetime is contained within a basic block. In this case there
457 // will be a single kill, in MBB, which comes after the definition.
458 if (vi.Kills.size() == 1 && vi.Kills[0]->getParent() == mbb) {
459 // FIXME: what about dead vars?
460 unsigned killIdx;
461 if (vi.Kills[0] != mi)
462 killIdx = getUseIndex(getInstructionIndex(vi.Kills[0]))+1;
463 else
464 killIdx = defIndex+1;
Chris Lattner6097d132004-07-19 02:15:56 +0000465
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000466 // If the kill happens after the definition, we have an intra-block
467 // live range.
468 if (killIdx > defIndex) {
Jeffrey Yasskin493a3d02009-05-26 18:27:15 +0000469 assert(vi.AliveBlocks.empty() &&
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000470 "Shouldn't be alive across any blocks!");
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000471 LiveRange LR(defIndex, killIdx, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000472 interval.addRange(LR);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000473 DOUT << " +" << LR << "\n";
Evan Chengf3bb2e62007-09-05 21:46:51 +0000474 interval.addKill(ValNo, killIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000475 return;
476 }
Alkis Evlogimenosdd2cc652003-12-18 08:48:48 +0000477 }
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000478
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000479 // The other case we handle is when a virtual register lives to the end
480 // of the defining block, potentially live across some blocks, then is
481 // live into some number of blocks, but gets killed. Start by adding a
482 // range that goes from this definition to the end of the defining block.
Owen Anderson7fbad272008-07-23 21:37:49 +0000483 LiveRange NewLR(defIndex, getMBBEndIdx(mbb)+1, ValNo);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000484 DOUT << " +" << NewLR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000485 interval.addRange(NewLR);
486
487 // Iterate over all of the blocks that the variable is completely
488 // live in, adding [insrtIndex(begin), instrIndex(end)+4) to the
489 // live interval.
Jeffrey Yasskin493a3d02009-05-26 18:27:15 +0000490 for (SparseBitVector<>::iterator I = vi.AliveBlocks.begin(),
491 E = vi.AliveBlocks.end(); I != E; ++I) {
492 LiveRange LR(getMBBStartIdx(*I),
493 getMBBEndIdx(*I)+1, // MBB ends at -1.
Dan Gohman4a829ec2008-11-13 16:31:27 +0000494 ValNo);
495 interval.addRange(LR);
496 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000497 }
498
499 // Finally, this virtual register is live from the start of any killing
500 // block to the 'use' slot of the killing instruction.
501 for (unsigned i = 0, e = vi.Kills.size(); i != e; ++i) {
502 MachineInstr *Kill = vi.Kills[i];
Evan Cheng8df78602007-08-08 03:00:28 +0000503 unsigned killIdx = getUseIndex(getInstructionIndex(Kill))+1;
Chris Lattner428b92e2006-09-15 03:57:23 +0000504 LiveRange LR(getMBBStartIdx(Kill->getParent()),
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000505 killIdx, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000506 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000507 interval.addKill(ValNo, killIdx);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000508 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000509 }
510
511 } else {
512 // If this is the second time we see a virtual register definition, it
513 // must be due to phi elimination or two addr elimination. If this is
Evan Chengbf105c82006-11-03 03:04:46 +0000514 // the result of two address elimination, then the vreg is one of the
515 // def-and-use register operand.
Bob Wilsond9df5012009-04-09 17:16:43 +0000516 if (mi->isRegTiedToUseOperand(MOIdx)) {
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000517 // If this is a two-address definition, then we have already processed
518 // the live range. The only problem is that we didn't realize there
519 // are actually two values in the live interval. Because of this we
520 // need to take the LiveRegion that defines this register and split it
521 // into two values.
Evan Chenga07cec92008-01-10 08:22:10 +0000522 assert(interval.containsOneValue());
523 unsigned DefIndex = getDefIndex(interval.getValNumInfo(0)->def);
Chris Lattner6b128bd2006-09-03 08:07:11 +0000524 unsigned RedefIndex = getDefIndex(MIIdx);
Evan Chengfb112882009-03-23 08:01:15 +0000525 if (MO.isEarlyClobber())
526 RedefIndex = getUseIndex(MIIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000527
Evan Cheng4f8ff162007-08-11 00:59:19 +0000528 const LiveRange *OldLR = interval.getLiveRangeContaining(RedefIndex-1);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000529 VNInfo *OldValNo = OldLR->valno;
Evan Cheng4f8ff162007-08-11 00:59:19 +0000530
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000531 // Delete the initial value, which should be short and continuous,
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000532 // because the 2-addr copy must be in the same MBB as the redef.
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000533 interval.removeRange(DefIndex, RedefIndex);
Alkis Evlogimenos70651572004-08-04 09:46:56 +0000534
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000535 // Two-address vregs should always only be redefined once. This means
536 // that at this point, there should be exactly one value number in it.
537 assert(interval.containsOneValue() && "Unexpected 2-addr liveint!");
538
Chris Lattner91725b72006-08-31 05:54:43 +0000539 // The new value number (#1) is defined by the instruction we claimed
540 // defined value #0.
Evan Chengc8d044e2008-02-15 18:24:29 +0000541 VNInfo *ValNo = interval.getNextValue(OldValNo->def, OldValNo->copy,
Lang Hames857c4e02009-06-17 21:01:20 +0000542 false, // update at *
Evan Chengc8d044e2008-02-15 18:24:29 +0000543 VNInfoAllocator);
Lang Hames857c4e02009-06-17 21:01:20 +0000544 ValNo->setFlags(OldValNo->getFlags()); // * <- updating here
545
Chris Lattner91725b72006-08-31 05:54:43 +0000546 // Value#0 is now defined by the 2-addr instruction.
Evan Chengc8d044e2008-02-15 18:24:29 +0000547 OldValNo->def = RedefIndex;
548 OldValNo->copy = 0;
Evan Chengfb112882009-03-23 08:01:15 +0000549 if (MO.isEarlyClobber())
Lang Hames857c4e02009-06-17 21:01:20 +0000550 OldValNo->setHasRedefByEC(true);
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000551
552 // Add the new live interval which replaces the range for the input copy.
553 LiveRange LR(DefIndex, RedefIndex, ValNo);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000554 DOUT << " replace range with " << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000555 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000556 interval.addKill(ValNo, RedefIndex);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000557
558 // If this redefinition is dead, we need to add a dummy unit live
559 // range covering the def slot.
Owen Anderson6b098de2008-06-25 23:39:39 +0000560 if (MO.isDead())
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000561 interval.addRange(LiveRange(RedefIndex, RedefIndex+1, OldValNo));
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000562
Evan Cheng56fdd7a2007-03-15 21:19:28 +0000563 DOUT << " RESULT: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +0000564 interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000565
566 } else {
567 // Otherwise, this must be because of phi elimination. If this is the
568 // first redefinition of the vreg that we have seen, go back and change
569 // the live range in the PHI block to be a different value number.
570 if (interval.containsOneValue()) {
571 assert(vi.Kills.size() == 1 &&
572 "PHI elimination vreg should have one kill, the PHI itself!");
573
574 // Remove the old range that we now know has an incorrect number.
Evan Chengf3bb2e62007-09-05 21:46:51 +0000575 VNInfo *VNI = interval.getValNumInfo(0);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000576 MachineInstr *Killer = vi.Kills[0];
Chris Lattner428b92e2006-09-15 03:57:23 +0000577 unsigned Start = getMBBStartIdx(Killer->getParent());
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000578 unsigned End = getUseIndex(getInstructionIndex(Killer))+1;
Evan Cheng56fdd7a2007-03-15 21:19:28 +0000579 DOUT << " Removing [" << Start << "," << End << "] from: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +0000580 interval.print(DOUT, tri_); DOUT << "\n";
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000581 interval.removeRange(Start, End);
Lang Hames857c4e02009-06-17 21:01:20 +0000582 VNI->setHasPHIKill(true);
Dan Gohman6f0d0242008-02-10 18:45:23 +0000583 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000584
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000585 // Replace the interval with one of a NEW value number. Note that this
586 // value number isn't actually defined by an instruction, weird huh? :)
Lang Hames10382fb2009-06-19 02:17:53 +0000587 LiveRange LR(Start, End,
588 interval.getNextValue(mbb->getNumber(), 0, false, VNInfoAllocator));
Lang Hames857c4e02009-06-17 21:01:20 +0000589 LR.valno->setIsPHIDef(true);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000590 DOUT << " replace range with " << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000591 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000592 interval.addKill(LR.valno, End);
Dan Gohman6f0d0242008-02-10 18:45:23 +0000593 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000594 }
595
596 // In the case of PHI elimination, each variable definition is only
597 // live until the end of the block. We've already taken care of the
598 // rest of the live range.
Chris Lattner6b128bd2006-09-03 08:07:11 +0000599 unsigned defIndex = getDefIndex(MIIdx);
Evan Chengfb112882009-03-23 08:01:15 +0000600 if (MO.isEarlyClobber())
601 defIndex = getUseIndex(MIIdx);
Chris Lattner91725b72006-08-31 05:54:43 +0000602
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000603 VNInfo *ValNo;
Evan Chengc8d044e2008-02-15 18:24:29 +0000604 MachineInstr *CopyMI = NULL;
Evan Cheng04ee5a12009-01-20 19:12:24 +0000605 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000606 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000607 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Dan Gohman97121ba2009-04-08 00:15:30 +0000608 mi->getOpcode() == TargetInstrInfo::SUBREG_TO_REG ||
Evan Cheng04ee5a12009-01-20 19:12:24 +0000609 tii_->isMoveInstr(*mi, SrcReg, DstReg, SrcSubReg, DstSubReg))
Evan Chengc8d044e2008-02-15 18:24:29 +0000610 CopyMI = mi;
Lang Hames857c4e02009-06-17 21:01:20 +0000611 ValNo = interval.getNextValue(defIndex, CopyMI, true, VNInfoAllocator);
Chris Lattner91725b72006-08-31 05:54:43 +0000612
Owen Anderson7fbad272008-07-23 21:37:49 +0000613 unsigned killIndex = getMBBEndIdx(mbb) + 1;
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000614 LiveRange LR(defIndex, killIndex, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000615 interval.addRange(LR);
Evan Chengc3fc7d92007-11-29 09:49:23 +0000616 interval.addKill(ValNo, killIndex);
Lang Hames857c4e02009-06-17 21:01:20 +0000617 ValNo->setHasPHIKill(true);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000618 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000619 }
620 }
621
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000622 DOUT << '\n';
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000623}
624
Chris Lattnerf35fef72004-07-23 21:24:19 +0000625void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000626 MachineBasicBlock::iterator mi,
Chris Lattner6b128bd2006-09-03 08:07:11 +0000627 unsigned MIIdx,
Owen Anderson6b098de2008-06-25 23:39:39 +0000628 MachineOperand& MO,
Chris Lattner91725b72006-08-31 05:54:43 +0000629 LiveInterval &interval,
Evan Chengc8d044e2008-02-15 18:24:29 +0000630 MachineInstr *CopyMI) {
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000631 // A physical register cannot be live across basic block, so its
632 // lifetime must end somewhere in its defining basic block.
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000633 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
Alkis Evlogimenos02ba13c2004-01-31 23:13:30 +0000634
Chris Lattner6b128bd2006-09-03 08:07:11 +0000635 unsigned baseIndex = MIIdx;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000636 unsigned start = getDefIndex(baseIndex);
Dale Johannesen86b49f82008-09-24 01:07:17 +0000637 // Earlyclobbers move back one.
638 if (MO.isEarlyClobber())
639 start = getUseIndex(MIIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000640 unsigned end = start;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000641
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000642 // If it is not used after definition, it is considered dead at
643 // the instruction defining it. Hence its interval is:
644 // [defSlot(def), defSlot(def)+1)
Owen Anderson6b098de2008-06-25 23:39:39 +0000645 if (MO.isDead()) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000646 DOUT << " dead";
Dale Johannesen86b49f82008-09-24 01:07:17 +0000647 end = start + 1;
Chris Lattnerab4b66d2005-08-23 22:51:41 +0000648 goto exit;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000649 }
650
651 // If it is not dead on definition, it must be killed by a
652 // subsequent instruction. Hence its interval is:
653 // [defSlot(def), useSlot(kill)+1)
Owen Anderson7fbad272008-07-23 21:37:49 +0000654 baseIndex += InstrSlots::NUM;
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000655 while (++mi != MBB->end()) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000656 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
657 getInstructionFromIndex(baseIndex) == 0)
658 baseIndex += InstrSlots::NUM;
Evan Cheng6130f662008-03-05 00:59:57 +0000659 if (mi->killsRegister(interval.reg, tri_)) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000660 DOUT << " killed";
Chris Lattnerab4b66d2005-08-23 22:51:41 +0000661 end = getUseIndex(baseIndex) + 1;
662 goto exit;
Evan Chengc45288e2009-04-27 20:42:46 +0000663 } else {
664 int DefIdx = mi->findRegisterDefOperandIdx(interval.reg, false, tri_);
665 if (DefIdx != -1) {
666 if (mi->isRegTiedToUseOperand(DefIdx)) {
667 // Two-address instruction.
668 end = getDefIndex(baseIndex);
669 if (mi->getOperand(DefIdx).isEarlyClobber())
670 end = getUseIndex(baseIndex);
671 } else {
672 // Another instruction redefines the register before it is ever read.
673 // Then the register is essentially dead at the instruction that defines
674 // it. Hence its interval is:
675 // [defSlot(def), defSlot(def)+1)
676 DOUT << " dead";
677 end = start + 1;
678 }
679 goto exit;
680 }
Alkis Evlogimenosaf254732004-01-13 22:26:14 +0000681 }
Owen Anderson7fbad272008-07-23 21:37:49 +0000682
683 baseIndex += InstrSlots::NUM;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000684 }
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000685
686 // The only case we should have a dead physreg here without a killing or
687 // instruction where we know it's dead is if it is live-in to the function
Evan Chengd521bc92009-04-27 17:36:47 +0000688 // and never used. Another possible case is the implicit use of the
689 // physical register has been deleted by two-address pass.
Dale Johannesen86b49f82008-09-24 01:07:17 +0000690 end = start + 1;
Alkis Evlogimenos02ba13c2004-01-31 23:13:30 +0000691
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000692exit:
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000693 assert(start < end && "did not find end of interval?");
Chris Lattnerf768bba2005-03-09 23:05:19 +0000694
Evan Cheng24a3cc42007-04-25 07:30:23 +0000695 // Already exists? Extend old live interval.
696 LiveInterval::iterator OldLR = interval.FindLiveRangeContaining(start);
Evan Cheng5379f412008-12-19 20:58:01 +0000697 bool Extend = OldLR != interval.end();
698 VNInfo *ValNo = Extend
Lang Hames857c4e02009-06-17 21:01:20 +0000699 ? OldLR->valno : interval.getNextValue(start, CopyMI, true, VNInfoAllocator);
Evan Cheng5379f412008-12-19 20:58:01 +0000700 if (MO.isEarlyClobber() && Extend)
Lang Hames857c4e02009-06-17 21:01:20 +0000701 ValNo->setHasRedefByEC(true);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000702 LiveRange LR(start, end, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000703 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000704 interval.addKill(LR.valno, end);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000705 DOUT << " +" << LR << '\n';
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000706}
707
Chris Lattnerf35fef72004-07-23 21:24:19 +0000708void LiveIntervals::handleRegisterDef(MachineBasicBlock *MBB,
709 MachineBasicBlock::iterator MI,
Chris Lattner6b128bd2006-09-03 08:07:11 +0000710 unsigned MIIdx,
Evan Chengef0732d2008-07-10 07:35:43 +0000711 MachineOperand& MO,
712 unsigned MOIdx) {
Owen Anderson6b098de2008-06-25 23:39:39 +0000713 if (TargetRegisterInfo::isVirtualRegister(MO.getReg()))
Evan Chengef0732d2008-07-10 07:35:43 +0000714 handleVirtualRegisterDef(MBB, MI, MIIdx, MO, MOIdx,
Owen Anderson6b098de2008-06-25 23:39:39 +0000715 getOrCreateInterval(MO.getReg()));
716 else if (allocatableRegs_[MO.getReg()]) {
Evan Chengc8d044e2008-02-15 18:24:29 +0000717 MachineInstr *CopyMI = NULL;
Evan Cheng04ee5a12009-01-20 19:12:24 +0000718 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000719 if (MI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000720 MI->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Dan Gohman97121ba2009-04-08 00:15:30 +0000721 MI->getOpcode() == TargetInstrInfo::SUBREG_TO_REG ||
Evan Cheng04ee5a12009-01-20 19:12:24 +0000722 tii_->isMoveInstr(*MI, SrcReg, DstReg, SrcSubReg, DstSubReg))
Evan Chengc8d044e2008-02-15 18:24:29 +0000723 CopyMI = MI;
Evan Chengc45288e2009-04-27 20:42:46 +0000724 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
Owen Anderson6b098de2008-06-25 23:39:39 +0000725 getOrCreateInterval(MO.getReg()), CopyMI);
Evan Cheng24a3cc42007-04-25 07:30:23 +0000726 // Def of a register also defines its sub-registers.
Owen Anderson6b098de2008-06-25 23:39:39 +0000727 for (const unsigned* AS = tri_->getSubRegisters(MO.getReg()); *AS; ++AS)
Evan Cheng6130f662008-03-05 00:59:57 +0000728 // If MI also modifies the sub-register explicitly, avoid processing it
729 // more than once. Do not pass in TRI here so it checks for exact match.
730 if (!MI->modifiesRegister(*AS))
Evan Chengc45288e2009-04-27 20:42:46 +0000731 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
Owen Anderson6b098de2008-06-25 23:39:39 +0000732 getOrCreateInterval(*AS), 0);
Chris Lattnerf35fef72004-07-23 21:24:19 +0000733 }
Alkis Evlogimenos4d46e1e2004-01-31 14:37:41 +0000734}
735
Evan Chengb371f452007-02-19 21:49:54 +0000736void LiveIntervals::handleLiveInRegister(MachineBasicBlock *MBB,
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000737 unsigned MIIdx,
Evan Cheng24a3cc42007-04-25 07:30:23 +0000738 LiveInterval &interval, bool isAlias) {
Evan Chengb371f452007-02-19 21:49:54 +0000739 DOUT << "\t\tlivein register: "; DEBUG(printRegName(interval.reg));
740
741 // Look for kills, if it reaches a def before it's killed, then it shouldn't
742 // be considered a livein.
743 MachineBasicBlock::iterator mi = MBB->begin();
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000744 unsigned baseIndex = MIIdx;
745 unsigned start = baseIndex;
Owen Anderson99500ae2008-09-15 22:00:38 +0000746 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
747 getInstructionFromIndex(baseIndex) == 0)
748 baseIndex += InstrSlots::NUM;
749 unsigned end = baseIndex;
Evan Cheng0076c612009-03-05 03:34:26 +0000750 bool SeenDefUse = false;
Owen Anderson99500ae2008-09-15 22:00:38 +0000751
Evan Chengb371f452007-02-19 21:49:54 +0000752 while (mi != MBB->end()) {
Evan Cheng6130f662008-03-05 00:59:57 +0000753 if (mi->killsRegister(interval.reg, tri_)) {
Evan Chengb371f452007-02-19 21:49:54 +0000754 DOUT << " killed";
755 end = getUseIndex(baseIndex) + 1;
Evan Cheng0076c612009-03-05 03:34:26 +0000756 SeenDefUse = true;
Lang Hamesd21c3162009-06-18 22:01:47 +0000757 break;
Evan Cheng6130f662008-03-05 00:59:57 +0000758 } else if (mi->modifiesRegister(interval.reg, tri_)) {
Evan Chengb371f452007-02-19 21:49:54 +0000759 // Another instruction redefines the register before it is ever read.
760 // Then the register is essentially dead at the instruction that defines
761 // it. Hence its interval is:
762 // [defSlot(def), defSlot(def)+1)
763 DOUT << " dead";
764 end = getDefIndex(start) + 1;
Evan Cheng0076c612009-03-05 03:34:26 +0000765 SeenDefUse = true;
Lang Hamesd21c3162009-06-18 22:01:47 +0000766 break;
Evan Chengb371f452007-02-19 21:49:54 +0000767 }
768
769 baseIndex += InstrSlots::NUM;
770 ++mi;
Evan Cheng0076c612009-03-05 03:34:26 +0000771 if (mi != MBB->end()) {
772 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
773 getInstructionFromIndex(baseIndex) == 0)
774 baseIndex += InstrSlots::NUM;
775 }
Evan Chengb371f452007-02-19 21:49:54 +0000776 }
777
Evan Cheng75611fb2007-06-27 01:16:36 +0000778 // Live-in register might not be used at all.
Evan Cheng0076c612009-03-05 03:34:26 +0000779 if (!SeenDefUse) {
Evan Cheng292da942007-06-27 18:47:28 +0000780 if (isAlias) {
781 DOUT << " dead";
Evan Cheng75611fb2007-06-27 01:16:36 +0000782 end = getDefIndex(MIIdx) + 1;
Evan Cheng292da942007-06-27 18:47:28 +0000783 } else {
784 DOUT << " live through";
785 end = baseIndex;
786 }
Evan Cheng24a3cc42007-04-25 07:30:23 +0000787 }
788
Lang Hames10382fb2009-06-19 02:17:53 +0000789 VNInfo *vni =
790 interval.getNextValue(MBB->getNumber(), 0, false, VNInfoAllocator);
Lang Hamesd21c3162009-06-18 22:01:47 +0000791 vni->setIsPHIDef(true);
792 LiveRange LR(start, end, vni);
793
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000794 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000795 interval.addKill(LR.valno, end);
Evan Cheng24c2e5c2007-08-08 07:03:29 +0000796 DOUT << " +" << LR << '\n';
Evan Chengb371f452007-02-19 21:49:54 +0000797}
798
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000799/// computeIntervals - computes the live intervals for virtual
Alkis Evlogimenos4d46e1e2004-01-31 14:37:41 +0000800/// registers. for some ordering of the machine instructions [1,N] a
Alkis Evlogimenos08cec002004-01-31 19:59:32 +0000801/// live interval is an interval [i, j) where 1 <= i <= j < N for
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000802/// which a variable is live
Dale Johannesen91aac102008-09-17 21:13:11 +0000803void LiveIntervals::computeIntervals() {
Dale Johannesen91aac102008-09-17 21:13:11 +0000804
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000805 DOUT << "********** COMPUTING LIVE INTERVALS **********\n"
806 << "********** Function: "
807 << ((Value*)mf_->getFunction())->getName() << '\n';
Owen Anderson7fbad272008-07-23 21:37:49 +0000808
Chris Lattner428b92e2006-09-15 03:57:23 +0000809 for (MachineFunction::iterator MBBI = mf_->begin(), E = mf_->end();
810 MBBI != E; ++MBBI) {
811 MachineBasicBlock *MBB = MBBI;
Owen Anderson134eb732008-09-21 20:43:24 +0000812 // Track the index of the current machine instr.
813 unsigned MIIndex = getMBBStartIdx(MBB);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000814 DOUT << ((Value*)MBB->getBasicBlock())->getName() << ":\n";
Alkis Evlogimenos6b4edba2003-12-21 20:19:10 +0000815
Chris Lattner428b92e2006-09-15 03:57:23 +0000816 MachineBasicBlock::iterator MI = MBB->begin(), miEnd = MBB->end();
Evan Cheng0c9f92e2007-02-13 01:30:55 +0000817
Dan Gohmancb406c22007-10-03 19:26:29 +0000818 // Create intervals for live-ins to this BB first.
819 for (MachineBasicBlock::const_livein_iterator LI = MBB->livein_begin(),
820 LE = MBB->livein_end(); LI != LE; ++LI) {
821 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*LI));
822 // Multiple live-ins can alias the same register.
Dan Gohman6f0d0242008-02-10 18:45:23 +0000823 for (const unsigned* AS = tri_->getSubRegisters(*LI); *AS; ++AS)
Dan Gohmancb406c22007-10-03 19:26:29 +0000824 if (!hasInterval(*AS))
825 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*AS),
826 true);
Chris Lattnerdffb2e82006-09-04 18:27:40 +0000827 }
828
Owen Anderson99500ae2008-09-15 22:00:38 +0000829 // Skip over empty initial indices.
830 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
831 getInstructionFromIndex(MIIndex) == 0)
832 MIIndex += InstrSlots::NUM;
833
Chris Lattner428b92e2006-09-15 03:57:23 +0000834 for (; MI != miEnd; ++MI) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000835 DOUT << MIIndex << "\t" << *MI;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000836
Evan Cheng438f7bc2006-11-10 08:43:01 +0000837 // Handle defs.
Chris Lattner428b92e2006-09-15 03:57:23 +0000838 for (int i = MI->getNumOperands() - 1; i >= 0; --i) {
839 MachineOperand &MO = MI->getOperand(i);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000840 // handle register defs - build intervals
Dan Gohmand735b802008-10-03 15:45:36 +0000841 if (MO.isReg() && MO.getReg() && MO.isDef()) {
Evan Chengef0732d2008-07-10 07:35:43 +0000842 handleRegisterDef(MBB, MI, MIIndex, MO, i);
Dale Johannesen91aac102008-09-17 21:13:11 +0000843 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000844 }
Evan Cheng99fe34b2008-10-18 05:18:55 +0000845
846 // Skip over the empty slots after each instruction.
847 unsigned Slots = MI->getDesc().getNumDefs();
848 if (Slots == 0)
849 Slots = 1;
850 MIIndex += InstrSlots::NUM * Slots;
Owen Anderson7fbad272008-07-23 21:37:49 +0000851
852 // Skip over empty indices.
853 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
854 getInstructionFromIndex(MIIndex) == 0)
855 MIIndex += InstrSlots::NUM;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000856 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000857 }
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000858}
Alkis Evlogimenosb27ef242003-12-05 10:38:28 +0000859
Evan Chengd0e32c52008-10-29 05:06:14 +0000860bool LiveIntervals::findLiveInMBBs(unsigned Start, unsigned End,
Evan Chenga5bfc972007-10-17 06:53:44 +0000861 SmallVectorImpl<MachineBasicBlock*> &MBBs) const {
Evan Cheng4ca980e2007-10-17 02:10:22 +0000862 std::vector<IdxMBBPair>::const_iterator I =
Evan Chengd0e32c52008-10-29 05:06:14 +0000863 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), Start);
Evan Cheng4ca980e2007-10-17 02:10:22 +0000864
865 bool ResVal = false;
866 while (I != Idx2MBBMap.end()) {
Dan Gohman2ad82452008-11-26 05:50:31 +0000867 if (I->first >= End)
Evan Cheng4ca980e2007-10-17 02:10:22 +0000868 break;
869 MBBs.push_back(I->second);
870 ResVal = true;
871 ++I;
872 }
873 return ResVal;
874}
875
Evan Chengd0e32c52008-10-29 05:06:14 +0000876bool LiveIntervals::findReachableMBBs(unsigned Start, unsigned End,
877 SmallVectorImpl<MachineBasicBlock*> &MBBs) const {
878 std::vector<IdxMBBPair>::const_iterator I =
879 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), Start);
880
881 bool ResVal = false;
882 while (I != Idx2MBBMap.end()) {
883 if (I->first > End)
884 break;
885 MachineBasicBlock *MBB = I->second;
886 if (getMBBEndIdx(MBB) > End)
887 break;
888 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
889 SE = MBB->succ_end(); SI != SE; ++SI)
890 MBBs.push_back(*SI);
891 ResVal = true;
892 ++I;
893 }
894 return ResVal;
895}
896
Owen Anderson03857b22008-08-13 21:49:13 +0000897LiveInterval* LiveIntervals::createInterval(unsigned reg) {
Evan Cheng0a1fcce2009-02-08 11:04:35 +0000898 float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ? HUGE_VALF : 0.0F;
Owen Anderson03857b22008-08-13 21:49:13 +0000899 return new LiveInterval(reg, Weight);
Alkis Evlogimenos9a8b4902004-04-09 18:07:57 +0000900}
Evan Chengf2fbca62007-11-12 06:35:08 +0000901
Evan Cheng0a1fcce2009-02-08 11:04:35 +0000902/// dupInterval - Duplicate a live interval. The caller is responsible for
903/// managing the allocated memory.
904LiveInterval* LiveIntervals::dupInterval(LiveInterval *li) {
905 LiveInterval *NewLI = createInterval(li->reg);
Evan Cheng90f95f82009-06-14 20:22:55 +0000906 NewLI->Copy(*li, mri_, getVNInfoAllocator());
Evan Cheng0a1fcce2009-02-08 11:04:35 +0000907 return NewLI;
908}
909
Evan Chengc8d044e2008-02-15 18:24:29 +0000910/// getVNInfoSourceReg - Helper function that parses the specified VNInfo
911/// copy field and returns the source register that defines it.
912unsigned LiveIntervals::getVNInfoSourceReg(const VNInfo *VNI) const {
913 if (!VNI->copy)
914 return 0;
915
Evan Cheng8f90b6e2009-01-07 02:08:57 +0000916 if (VNI->copy->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG) {
917 // If it's extracting out of a physical register, return the sub-register.
918 unsigned Reg = VNI->copy->getOperand(1).getReg();
919 if (TargetRegisterInfo::isPhysicalRegister(Reg))
920 Reg = tri_->getSubReg(Reg, VNI->copy->getOperand(2).getImm());
921 return Reg;
Dan Gohman97121ba2009-04-08 00:15:30 +0000922 } else if (VNI->copy->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
923 VNI->copy->getOpcode() == TargetInstrInfo::SUBREG_TO_REG)
Evan Cheng7e073ba2008-04-09 20:57:25 +0000924 return VNI->copy->getOperand(2).getReg();
Evan Cheng8f90b6e2009-01-07 02:08:57 +0000925
Evan Cheng04ee5a12009-01-20 19:12:24 +0000926 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg;
927 if (tii_->isMoveInstr(*VNI->copy, SrcReg, DstReg, SrcSubReg, DstSubReg))
Evan Chengc8d044e2008-02-15 18:24:29 +0000928 return SrcReg;
929 assert(0 && "Unrecognized copy instruction!");
930 return 0;
931}
Evan Chengf2fbca62007-11-12 06:35:08 +0000932
933//===----------------------------------------------------------------------===//
934// Register allocator hooks.
935//
936
Evan Chengd70dbb52008-02-22 09:24:50 +0000937/// getReMatImplicitUse - If the remat definition MI has one (for now, we only
938/// allow one) virtual register operand, then its uses are implicitly using
939/// the register. Returns the virtual register.
940unsigned LiveIntervals::getReMatImplicitUse(const LiveInterval &li,
941 MachineInstr *MI) const {
942 unsigned RegOp = 0;
943 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
944 MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +0000945 if (!MO.isReg() || !MO.isUse())
Evan Chengd70dbb52008-02-22 09:24:50 +0000946 continue;
947 unsigned Reg = MO.getReg();
948 if (Reg == 0 || Reg == li.reg)
949 continue;
950 // FIXME: For now, only remat MI with at most one register operand.
951 assert(!RegOp &&
952 "Can't rematerialize instruction with multiple register operand!");
953 RegOp = MO.getReg();
Dan Gohman6d69ba82008-07-25 00:02:30 +0000954#ifndef NDEBUG
Evan Chengd70dbb52008-02-22 09:24:50 +0000955 break;
Dan Gohman6d69ba82008-07-25 00:02:30 +0000956#endif
Evan Chengd70dbb52008-02-22 09:24:50 +0000957 }
958 return RegOp;
959}
960
961/// isValNoAvailableAt - Return true if the val# of the specified interval
962/// which reaches the given instruction also reaches the specified use index.
963bool LiveIntervals::isValNoAvailableAt(const LiveInterval &li, MachineInstr *MI,
964 unsigned UseIdx) const {
965 unsigned Index = getInstructionIndex(MI);
966 VNInfo *ValNo = li.FindLiveRangeContaining(Index)->valno;
967 LiveInterval::const_iterator UI = li.FindLiveRangeContaining(UseIdx);
968 return UI != li.end() && UI->valno == ValNo;
969}
970
Evan Chengf2fbca62007-11-12 06:35:08 +0000971/// isReMaterializable - Returns true if the definition MI of the specified
972/// val# of the specified interval is re-materializable.
973bool LiveIntervals::isReMaterializable(const LiveInterval &li,
Evan Cheng5ef3a042007-12-06 00:01:56 +0000974 const VNInfo *ValNo, MachineInstr *MI,
Evan Chengdc377862008-09-30 15:44:16 +0000975 SmallVectorImpl<LiveInterval*> &SpillIs,
Evan Cheng5ef3a042007-12-06 00:01:56 +0000976 bool &isLoad) {
Evan Chengf2fbca62007-11-12 06:35:08 +0000977 if (DisableReMat)
978 return false;
979
Evan Cheng20ccded2008-03-15 00:19:36 +0000980 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF)
Evan Chengd70dbb52008-02-22 09:24:50 +0000981 return true;
Evan Chengdd3465e2008-02-23 01:44:27 +0000982
983 int FrameIdx = 0;
984 if (tii_->isLoadFromStackSlot(MI, FrameIdx) &&
Evan Cheng249ded32008-02-23 03:38:34 +0000985 mf_->getFrameInfo()->isImmutableObjectIndex(FrameIdx))
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000986 // FIXME: Let target specific isReallyTriviallyReMaterializable determines
987 // this but remember this is not safe to fold into a two-address
988 // instruction.
Evan Cheng249ded32008-02-23 03:38:34 +0000989 // This is a load from fixed stack slot. It can be rematerialized.
Evan Chengdd3465e2008-02-23 01:44:27 +0000990 return true;
Evan Chengdd3465e2008-02-23 01:44:27 +0000991
Dan Gohman6d69ba82008-07-25 00:02:30 +0000992 // If the target-specific rules don't identify an instruction as
993 // being trivially rematerializable, use some target-independent
994 // rules.
995 if (!MI->getDesc().isRematerializable() ||
996 !tii_->isTriviallyReMaterializable(MI)) {
Dan Gohman4c8f8702008-07-25 15:08:37 +0000997 if (!EnableAggressiveRemat)
998 return false;
Evan Chengd70dbb52008-02-22 09:24:50 +0000999
Dan Gohman0471a792008-07-28 18:43:51 +00001000 // If the instruction accesses memory but the memoperands have been lost,
Dan Gohman6d69ba82008-07-25 00:02:30 +00001001 // we can't analyze it.
1002 const TargetInstrDesc &TID = MI->getDesc();
1003 if ((TID.mayLoad() || TID.mayStore()) && MI->memoperands_empty())
1004 return false;
1005
1006 // Avoid instructions obviously unsafe for remat.
1007 if (TID.hasUnmodeledSideEffects() || TID.isNotDuplicable())
1008 return false;
1009
1010 // If the instruction accesses memory and the memory could be non-constant,
1011 // assume the instruction is not rematerializable.
Evan Chengdc377862008-09-30 15:44:16 +00001012 for (std::list<MachineMemOperand>::const_iterator
1013 I = MI->memoperands_begin(), E = MI->memoperands_end(); I != E; ++I){
Dan Gohman6d69ba82008-07-25 00:02:30 +00001014 const MachineMemOperand &MMO = *I;
1015 if (MMO.isVolatile() || MMO.isStore())
1016 return false;
1017 const Value *V = MMO.getValue();
1018 if (!V)
1019 return false;
1020 if (const PseudoSourceValue *PSV = dyn_cast<PseudoSourceValue>(V)) {
1021 if (!PSV->isConstant(mf_->getFrameInfo()))
Evan Chengd70dbb52008-02-22 09:24:50 +00001022 return false;
Dan Gohman6d69ba82008-07-25 00:02:30 +00001023 } else if (!aa_->pointsToConstantMemory(V))
1024 return false;
1025 }
1026
1027 // If any of the registers accessed are non-constant, conservatively assume
1028 // the instruction is not rematerializable.
1029 unsigned ImpUse = 0;
1030 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1031 const MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001032 if (MO.isReg()) {
Dan Gohman6d69ba82008-07-25 00:02:30 +00001033 unsigned Reg = MO.getReg();
1034 if (Reg == 0)
1035 continue;
1036 if (TargetRegisterInfo::isPhysicalRegister(Reg))
1037 return false;
1038
1039 // Only allow one def, and that in the first operand.
1040 if (MO.isDef() != (i == 0))
1041 return false;
1042
1043 // Only allow constant-valued registers.
1044 bool IsLiveIn = mri_->isLiveIn(Reg);
1045 MachineRegisterInfo::def_iterator I = mri_->def_begin(Reg),
1046 E = mri_->def_end();
1047
Dan Gohmanc93ced5b2008-12-08 04:53:23 +00001048 // For the def, it should be the only def of that register.
Dan Gohman6d69ba82008-07-25 00:02:30 +00001049 if (MO.isDef() && (next(I) != E || IsLiveIn))
1050 return false;
1051
1052 if (MO.isUse()) {
1053 // Only allow one use other register use, as that's all the
1054 // remat mechanisms support currently.
1055 if (Reg != li.reg) {
1056 if (ImpUse == 0)
1057 ImpUse = Reg;
1058 else if (Reg != ImpUse)
1059 return false;
1060 }
Dan Gohmanc93ced5b2008-12-08 04:53:23 +00001061 // For the use, there should be only one associated def.
Dan Gohman6d69ba82008-07-25 00:02:30 +00001062 if (I != E && (next(I) != E || IsLiveIn))
1063 return false;
1064 }
Evan Chengd70dbb52008-02-22 09:24:50 +00001065 }
1066 }
Evan Cheng5ef3a042007-12-06 00:01:56 +00001067 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001068
Dan Gohman6d69ba82008-07-25 00:02:30 +00001069 unsigned ImpUse = getReMatImplicitUse(li, MI);
1070 if (ImpUse) {
1071 const LiveInterval &ImpLi = getInterval(ImpUse);
1072 for (MachineRegisterInfo::use_iterator ri = mri_->use_begin(li.reg),
1073 re = mri_->use_end(); ri != re; ++ri) {
1074 MachineInstr *UseMI = &*ri;
1075 unsigned UseIdx = getInstructionIndex(UseMI);
1076 if (li.FindLiveRangeContaining(UseIdx)->valno != ValNo)
1077 continue;
1078 if (!isValNoAvailableAt(ImpLi, MI, UseIdx))
1079 return false;
1080 }
Evan Chengdc377862008-09-30 15:44:16 +00001081
1082 // If a register operand of the re-materialized instruction is going to
1083 // be spilled next, then it's not legal to re-materialize this instruction.
1084 for (unsigned i = 0, e = SpillIs.size(); i != e; ++i)
1085 if (ImpUse == SpillIs[i]->reg)
1086 return false;
Dan Gohman6d69ba82008-07-25 00:02:30 +00001087 }
1088 return true;
Evan Cheng5ef3a042007-12-06 00:01:56 +00001089}
1090
Evan Cheng06587492008-10-24 02:05:00 +00001091/// isReMaterializable - Returns true if the definition MI of the specified
1092/// val# of the specified interval is re-materializable.
1093bool LiveIntervals::isReMaterializable(const LiveInterval &li,
1094 const VNInfo *ValNo, MachineInstr *MI) {
1095 SmallVector<LiveInterval*, 4> Dummy1;
1096 bool Dummy2;
1097 return isReMaterializable(li, ValNo, MI, Dummy1, Dummy2);
1098}
1099
Evan Cheng5ef3a042007-12-06 00:01:56 +00001100/// isReMaterializable - Returns true if every definition of MI of every
1101/// val# of the specified interval is re-materializable.
Evan Chengdc377862008-09-30 15:44:16 +00001102bool LiveIntervals::isReMaterializable(const LiveInterval &li,
1103 SmallVectorImpl<LiveInterval*> &SpillIs,
1104 bool &isLoad) {
Evan Cheng5ef3a042007-12-06 00:01:56 +00001105 isLoad = false;
1106 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
1107 i != e; ++i) {
1108 const VNInfo *VNI = *i;
Lang Hames857c4e02009-06-17 21:01:20 +00001109 if (VNI->isUnused())
Evan Cheng5ef3a042007-12-06 00:01:56 +00001110 continue; // Dead val#.
1111 // Is the def for the val# rematerializable?
Lang Hames857c4e02009-06-17 21:01:20 +00001112 if (!VNI->isDefAccurate())
Evan Cheng5ef3a042007-12-06 00:01:56 +00001113 return false;
Lang Hames857c4e02009-06-17 21:01:20 +00001114 MachineInstr *ReMatDefMI = getInstructionFromIndex(VNI->def);
Evan Cheng5ef3a042007-12-06 00:01:56 +00001115 bool DefIsLoad = false;
Evan Chengd70dbb52008-02-22 09:24:50 +00001116 if (!ReMatDefMI ||
Evan Chengdc377862008-09-30 15:44:16 +00001117 !isReMaterializable(li, VNI, ReMatDefMI, SpillIs, DefIsLoad))
Evan Cheng5ef3a042007-12-06 00:01:56 +00001118 return false;
1119 isLoad |= DefIsLoad;
Evan Chengf2fbca62007-11-12 06:35:08 +00001120 }
1121 return true;
1122}
1123
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001124/// FilterFoldedOps - Filter out two-address use operands. Return
1125/// true if it finds any issue with the operands that ought to prevent
1126/// folding.
1127static bool FilterFoldedOps(MachineInstr *MI,
1128 SmallVector<unsigned, 2> &Ops,
1129 unsigned &MRInfo,
1130 SmallVector<unsigned, 2> &FoldOps) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001131 MRInfo = 0;
Evan Chengaee4af62007-12-02 08:30:39 +00001132 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
1133 unsigned OpIdx = Ops[i];
Evan Chengd70dbb52008-02-22 09:24:50 +00001134 MachineOperand &MO = MI->getOperand(OpIdx);
Evan Chengaee4af62007-12-02 08:30:39 +00001135 // FIXME: fold subreg use.
Evan Chengd70dbb52008-02-22 09:24:50 +00001136 if (MO.getSubReg())
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001137 return true;
Evan Chengd70dbb52008-02-22 09:24:50 +00001138 if (MO.isDef())
Evan Chengaee4af62007-12-02 08:30:39 +00001139 MRInfo |= (unsigned)VirtRegMap::isMod;
1140 else {
1141 // Filter out two-address use operand(s).
Evan Chenga24752f2009-03-19 20:30:06 +00001142 if (MI->isRegTiedToDefOperand(OpIdx)) {
Evan Chengaee4af62007-12-02 08:30:39 +00001143 MRInfo = VirtRegMap::isModRef;
1144 continue;
1145 }
1146 MRInfo |= (unsigned)VirtRegMap::isRef;
1147 }
1148 FoldOps.push_back(OpIdx);
Evan Chenge62f97c2007-12-01 02:07:52 +00001149 }
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001150 return false;
1151}
1152
1153
1154/// tryFoldMemoryOperand - Attempts to fold either a spill / restore from
1155/// slot / to reg or any rematerialized load into ith operand of specified
1156/// MI. If it is successul, MI is updated with the newly created MI and
1157/// returns true.
1158bool LiveIntervals::tryFoldMemoryOperand(MachineInstr* &MI,
1159 VirtRegMap &vrm, MachineInstr *DefMI,
1160 unsigned InstrIdx,
1161 SmallVector<unsigned, 2> &Ops,
1162 bool isSS, int Slot, unsigned Reg) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001163 // If it is an implicit def instruction, just delete it.
Evan Cheng20ccded2008-03-15 00:19:36 +00001164 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001165 RemoveMachineInstrFromMaps(MI);
1166 vrm.RemoveMachineInstrFromMaps(MI);
1167 MI->eraseFromParent();
1168 ++numFolds;
1169 return true;
1170 }
1171
1172 // Filter the list of operand indexes that are to be folded. Abort if
1173 // any operand will prevent folding.
1174 unsigned MRInfo = 0;
1175 SmallVector<unsigned, 2> FoldOps;
1176 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
1177 return false;
Evan Chenge62f97c2007-12-01 02:07:52 +00001178
Evan Cheng427f4c12008-03-31 23:19:51 +00001179 // The only time it's safe to fold into a two address instruction is when
1180 // it's folding reload and spill from / into a spill stack slot.
1181 if (DefMI && (MRInfo & VirtRegMap::isMod))
Evan Cheng249ded32008-02-23 03:38:34 +00001182 return false;
1183
Evan Chengf2f8c2a2008-02-08 22:05:27 +00001184 MachineInstr *fmi = isSS ? tii_->foldMemoryOperand(*mf_, MI, FoldOps, Slot)
1185 : tii_->foldMemoryOperand(*mf_, MI, FoldOps, DefMI);
Evan Chengf2fbca62007-11-12 06:35:08 +00001186 if (fmi) {
Evan Chengd3653122008-02-27 03:04:06 +00001187 // Remember this instruction uses the spill slot.
1188 if (isSS) vrm.addSpillSlotUse(Slot, fmi);
1189
Evan Chengf2fbca62007-11-12 06:35:08 +00001190 // Attempt to fold the memory reference into the instruction. If
1191 // we can do this, we don't need to insert spill code.
Evan Chengf2fbca62007-11-12 06:35:08 +00001192 MachineBasicBlock &MBB = *MI->getParent();
Evan Cheng84802932008-01-10 08:24:38 +00001193 if (isSS && !mf_->getFrameInfo()->isImmutableObjectIndex(Slot))
Evan Chengaee4af62007-12-02 08:30:39 +00001194 vrm.virtFolded(Reg, MI, fmi, (VirtRegMap::ModRef)MRInfo);
Evan Cheng81a03822007-11-17 00:40:40 +00001195 vrm.transferSpillPts(MI, fmi);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001196 vrm.transferRestorePts(MI, fmi);
Evan Chengc1f53c72008-03-11 21:34:46 +00001197 vrm.transferEmergencySpills(MI, fmi);
Evan Chengf2fbca62007-11-12 06:35:08 +00001198 mi2iMap_.erase(MI);
Evan Chengcddbb832007-11-30 21:23:43 +00001199 i2miMap_[InstrIdx /InstrSlots::NUM] = fmi;
1200 mi2iMap_[fmi] = InstrIdx;
Evan Chengf2fbca62007-11-12 06:35:08 +00001201 MI = MBB.insert(MBB.erase(MI), fmi);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001202 ++numFolds;
Evan Chengf2fbca62007-11-12 06:35:08 +00001203 return true;
1204 }
1205 return false;
1206}
1207
Evan Cheng018f9b02007-12-05 03:22:34 +00001208/// canFoldMemoryOperand - Returns true if the specified load / store
1209/// folding is possible.
1210bool LiveIntervals::canFoldMemoryOperand(MachineInstr *MI,
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001211 SmallVector<unsigned, 2> &Ops,
Evan Cheng3c75ba82008-04-01 21:37:32 +00001212 bool ReMat) const {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001213 // Filter the list of operand indexes that are to be folded. Abort if
1214 // any operand will prevent folding.
1215 unsigned MRInfo = 0;
Evan Cheng018f9b02007-12-05 03:22:34 +00001216 SmallVector<unsigned, 2> FoldOps;
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001217 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
1218 return false;
Evan Cheng018f9b02007-12-05 03:22:34 +00001219
Evan Cheng3c75ba82008-04-01 21:37:32 +00001220 // It's only legal to remat for a use, not a def.
1221 if (ReMat && (MRInfo & VirtRegMap::isMod))
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001222 return false;
Evan Cheng018f9b02007-12-05 03:22:34 +00001223
Evan Chengd70dbb52008-02-22 09:24:50 +00001224 return tii_->canFoldMemoryOperand(MI, FoldOps);
1225}
1226
Evan Cheng81a03822007-11-17 00:40:40 +00001227bool LiveIntervals::intervalIsInOneMBB(const LiveInterval &li) const {
1228 SmallPtrSet<MachineBasicBlock*, 4> MBBs;
1229 for (LiveInterval::Ranges::const_iterator
1230 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1231 std::vector<IdxMBBPair>::const_iterator II =
1232 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), I->start);
1233 if (II == Idx2MBBMap.end())
1234 continue;
1235 if (I->end > II->first) // crossing a MBB.
1236 return false;
1237 MBBs.insert(II->second);
1238 if (MBBs.size() > 1)
1239 return false;
1240 }
1241 return true;
1242}
1243
Evan Chengd70dbb52008-02-22 09:24:50 +00001244/// rewriteImplicitOps - Rewrite implicit use operands of MI (i.e. uses of
1245/// interval on to-be re-materialized operands of MI) with new register.
1246void LiveIntervals::rewriteImplicitOps(const LiveInterval &li,
1247 MachineInstr *MI, unsigned NewVReg,
1248 VirtRegMap &vrm) {
1249 // There is an implicit use. That means one of the other operand is
1250 // being remat'ed and the remat'ed instruction has li.reg as an
1251 // use operand. Make sure we rewrite that as well.
1252 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1253 MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001254 if (!MO.isReg())
Evan Chengd70dbb52008-02-22 09:24:50 +00001255 continue;
1256 unsigned Reg = MO.getReg();
1257 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
1258 continue;
1259 if (!vrm.isReMaterialized(Reg))
1260 continue;
1261 MachineInstr *ReMatMI = vrm.getReMaterializedMI(Reg);
Evan Cheng6130f662008-03-05 00:59:57 +00001262 MachineOperand *UseMO = ReMatMI->findRegisterUseOperand(li.reg);
1263 if (UseMO)
1264 UseMO->setReg(NewVReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001265 }
1266}
1267
Evan Chengf2fbca62007-11-12 06:35:08 +00001268/// rewriteInstructionForSpills, rewriteInstructionsForSpills - Helper functions
1269/// for addIntervalsForSpills to rewrite uses / defs for the given live range.
Evan Cheng018f9b02007-12-05 03:22:34 +00001270bool LiveIntervals::
Evan Chengd70dbb52008-02-22 09:24:50 +00001271rewriteInstructionForSpills(const LiveInterval &li, const VNInfo *VNI,
1272 bool TrySplit, unsigned index, unsigned end, MachineInstr *MI,
Evan Cheng81a03822007-11-17 00:40:40 +00001273 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Chengf2fbca62007-11-12 06:35:08 +00001274 unsigned Slot, int LdSlot,
1275 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chengd70dbb52008-02-22 09:24:50 +00001276 VirtRegMap &vrm,
Evan Chengf2fbca62007-11-12 06:35:08 +00001277 const TargetRegisterClass* rc,
1278 SmallVector<int, 4> &ReMatIds,
Evan Cheng22f07ff2007-12-11 02:09:15 +00001279 const MachineLoopInfo *loopInfo,
Evan Cheng313d4b82008-02-23 00:33:04 +00001280 unsigned &NewVReg, unsigned ImpUse, bool &HasDef, bool &HasUse,
Owen Anderson28998312008-08-13 22:28:50 +00001281 DenseMap<unsigned,unsigned> &MBBVRegsMap,
Evan Chengc781a242009-05-03 18:32:42 +00001282 std::vector<LiveInterval*> &NewLIs) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001283 bool CanFold = false;
Evan Chengf2fbca62007-11-12 06:35:08 +00001284 RestartInstruction:
1285 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
1286 MachineOperand& mop = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001287 if (!mop.isReg())
Evan Chengf2fbca62007-11-12 06:35:08 +00001288 continue;
1289 unsigned Reg = mop.getReg();
1290 unsigned RegI = Reg;
Dan Gohman6f0d0242008-02-10 18:45:23 +00001291 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
Evan Chengf2fbca62007-11-12 06:35:08 +00001292 continue;
Evan Chengf2fbca62007-11-12 06:35:08 +00001293 if (Reg != li.reg)
1294 continue;
1295
1296 bool TryFold = !DefIsReMat;
Evan Chengcb3c3302007-11-29 23:02:50 +00001297 bool FoldSS = true; // Default behavior unless it's a remat.
Evan Chengf2fbca62007-11-12 06:35:08 +00001298 int FoldSlot = Slot;
1299 if (DefIsReMat) {
1300 // If this is the rematerializable definition MI itself and
1301 // all of its uses are rematerialized, simply delete it.
Evan Cheng81a03822007-11-17 00:40:40 +00001302 if (MI == ReMatOrigDefMI && CanDelete) {
Evan Chengcddbb832007-11-30 21:23:43 +00001303 DOUT << "\t\t\t\tErasing re-materlizable def: ";
1304 DOUT << MI << '\n';
Evan Chengf2fbca62007-11-12 06:35:08 +00001305 RemoveMachineInstrFromMaps(MI);
Evan Chengcada2452007-11-28 01:28:46 +00001306 vrm.RemoveMachineInstrFromMaps(MI);
Evan Chengf2fbca62007-11-12 06:35:08 +00001307 MI->eraseFromParent();
1308 break;
1309 }
1310
1311 // If def for this use can't be rematerialized, then try folding.
Evan Cheng0cbb1162007-11-29 01:06:25 +00001312 // If def is rematerializable and it's a load, also try folding.
Evan Chengcb3c3302007-11-29 23:02:50 +00001313 TryFold = !ReMatDefMI || (ReMatDefMI && (MI == ReMatOrigDefMI || isLoad));
Evan Chengf2fbca62007-11-12 06:35:08 +00001314 if (isLoad) {
1315 // Try fold loads (from stack slot, constant pool, etc.) into uses.
1316 FoldSS = isLoadSS;
1317 FoldSlot = LdSlot;
1318 }
1319 }
1320
Evan Chengf2fbca62007-11-12 06:35:08 +00001321 // Scan all of the operands of this instruction rewriting operands
1322 // to use NewVReg instead of li.reg as appropriate. We do this for
1323 // two reasons:
1324 //
1325 // 1. If the instr reads the same spilled vreg multiple times, we
1326 // want to reuse the NewVReg.
1327 // 2. If the instr is a two-addr instruction, we are required to
1328 // keep the src/dst regs pinned.
1329 //
1330 // Keep track of whether we replace a use and/or def so that we can
1331 // create the spill interval with the appropriate range.
Evan Chengcddbb832007-11-30 21:23:43 +00001332
Evan Cheng81a03822007-11-17 00:40:40 +00001333 HasUse = mop.isUse();
1334 HasDef = mop.isDef();
Evan Chengaee4af62007-12-02 08:30:39 +00001335 SmallVector<unsigned, 2> Ops;
1336 Ops.push_back(i);
Evan Chengf2fbca62007-11-12 06:35:08 +00001337 for (unsigned j = i+1, e = MI->getNumOperands(); j != e; ++j) {
Evan Chengaee4af62007-12-02 08:30:39 +00001338 const MachineOperand &MOj = MI->getOperand(j);
Dan Gohmand735b802008-10-03 15:45:36 +00001339 if (!MOj.isReg())
Evan Chengf2fbca62007-11-12 06:35:08 +00001340 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001341 unsigned RegJ = MOj.getReg();
Dan Gohman6f0d0242008-02-10 18:45:23 +00001342 if (RegJ == 0 || TargetRegisterInfo::isPhysicalRegister(RegJ))
Evan Chengf2fbca62007-11-12 06:35:08 +00001343 continue;
1344 if (RegJ == RegI) {
Evan Chengaee4af62007-12-02 08:30:39 +00001345 Ops.push_back(j);
1346 HasUse |= MOj.isUse();
1347 HasDef |= MOj.isDef();
Evan Chengf2fbca62007-11-12 06:35:08 +00001348 }
1349 }
1350
Evan Cheng79a796c2008-07-12 01:56:02 +00001351 if (HasUse && !li.liveAt(getUseIndex(index)))
1352 // Must be defined by an implicit def. It should not be spilled. Note,
1353 // this is for correctness reason. e.g.
1354 // 8 %reg1024<def> = IMPLICIT_DEF
1355 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1356 // The live range [12, 14) are not part of the r1024 live interval since
1357 // it's defined by an implicit def. It will not conflicts with live
1358 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengb9890ae2008-07-12 02:22:07 +00001359 // easily see a situation where both registers are reloaded before
Evan Cheng79a796c2008-07-12 01:56:02 +00001360 // the INSERT_SUBREG and both target registers that would overlap.
1361 HasUse = false;
1362
David Greene26b86a02008-10-27 17:38:59 +00001363 // Create a new virtual register for the spill interval.
1364 // Create the new register now so we can map the fold instruction
1365 // to the new register so when it is unfolded we get the correct
1366 // answer.
1367 bool CreatedNewVReg = false;
1368 if (NewVReg == 0) {
1369 NewVReg = mri_->createVirtualRegister(rc);
1370 vrm.grow();
1371 CreatedNewVReg = true;
1372 }
1373
Evan Cheng9c3c2212008-06-06 07:54:39 +00001374 if (!TryFold)
1375 CanFold = false;
1376 else {
Evan Cheng018f9b02007-12-05 03:22:34 +00001377 // Do not fold load / store here if we are splitting. We'll find an
1378 // optimal point to insert a load / store later.
1379 if (!TrySplit) {
1380 if (tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
David Greene26b86a02008-10-27 17:38:59 +00001381 Ops, FoldSS, FoldSlot, NewVReg)) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001382 // Folding the load/store can completely change the instruction in
1383 // unpredictable ways, rescan it from the beginning.
David Greene26b86a02008-10-27 17:38:59 +00001384
1385 if (FoldSS) {
1386 // We need to give the new vreg the same stack slot as the
1387 // spilled interval.
1388 vrm.assignVirt2StackSlot(NewVReg, FoldSlot);
1389 }
1390
Evan Cheng018f9b02007-12-05 03:22:34 +00001391 HasUse = false;
1392 HasDef = false;
1393 CanFold = false;
Evan Chengc781a242009-05-03 18:32:42 +00001394 if (isNotInMIMap(MI))
Evan Cheng7e073ba2008-04-09 20:57:25 +00001395 break;
Evan Cheng018f9b02007-12-05 03:22:34 +00001396 goto RestartInstruction;
1397 }
1398 } else {
Evan Cheng9c3c2212008-06-06 07:54:39 +00001399 // We'll try to fold it later if it's profitable.
Evan Cheng3c75ba82008-04-01 21:37:32 +00001400 CanFold = canFoldMemoryOperand(MI, Ops, DefIsReMat);
Evan Cheng018f9b02007-12-05 03:22:34 +00001401 }
Evan Cheng9c3c2212008-06-06 07:54:39 +00001402 }
Evan Chengcddbb832007-11-30 21:23:43 +00001403
Evan Chengcddbb832007-11-30 21:23:43 +00001404 mop.setReg(NewVReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001405 if (mop.isImplicit())
1406 rewriteImplicitOps(li, MI, NewVReg, vrm);
Evan Chengcddbb832007-11-30 21:23:43 +00001407
1408 // Reuse NewVReg for other reads.
Evan Chengd70dbb52008-02-22 09:24:50 +00001409 for (unsigned j = 0, e = Ops.size(); j != e; ++j) {
1410 MachineOperand &mopj = MI->getOperand(Ops[j]);
1411 mopj.setReg(NewVReg);
1412 if (mopj.isImplicit())
1413 rewriteImplicitOps(li, MI, NewVReg, vrm);
1414 }
Evan Chengcddbb832007-11-30 21:23:43 +00001415
Evan Cheng81a03822007-11-17 00:40:40 +00001416 if (CreatedNewVReg) {
1417 if (DefIsReMat) {
1418 vrm.setVirtIsReMaterialized(NewVReg, ReMatDefMI/*, CanDelete*/);
Evan Chengd70dbb52008-02-22 09:24:50 +00001419 if (ReMatIds[VNI->id] == VirtRegMap::MAX_STACK_SLOT) {
Evan Cheng81a03822007-11-17 00:40:40 +00001420 // Each valnum may have its own remat id.
Evan Chengd70dbb52008-02-22 09:24:50 +00001421 ReMatIds[VNI->id] = vrm.assignVirtReMatId(NewVReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001422 } else {
Evan Chengd70dbb52008-02-22 09:24:50 +00001423 vrm.assignVirtReMatId(NewVReg, ReMatIds[VNI->id]);
Evan Cheng81a03822007-11-17 00:40:40 +00001424 }
1425 if (!CanDelete || (HasUse && HasDef)) {
1426 // If this is a two-addr instruction then its use operands are
1427 // rematerializable but its def is not. It should be assigned a
1428 // stack slot.
1429 vrm.assignVirt2StackSlot(NewVReg, Slot);
1430 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001431 } else {
Evan Chengf2fbca62007-11-12 06:35:08 +00001432 vrm.assignVirt2StackSlot(NewVReg, Slot);
1433 }
Evan Chengcb3c3302007-11-29 23:02:50 +00001434 } else if (HasUse && HasDef &&
1435 vrm.getStackSlot(NewVReg) == VirtRegMap::NO_STACK_SLOT) {
1436 // If this interval hasn't been assigned a stack slot (because earlier
1437 // def is a deleted remat def), do it now.
1438 assert(Slot != VirtRegMap::NO_STACK_SLOT);
1439 vrm.assignVirt2StackSlot(NewVReg, Slot);
Evan Chengf2fbca62007-11-12 06:35:08 +00001440 }
1441
Evan Cheng313d4b82008-02-23 00:33:04 +00001442 // Re-matting an instruction with virtual register use. Add the
1443 // register as an implicit use on the use MI.
1444 if (DefIsReMat && ImpUse)
1445 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
1446
Evan Cheng5b69eba2009-04-21 22:46:52 +00001447 // Create a new register interval for this spill / remat.
Evan Chengf2fbca62007-11-12 06:35:08 +00001448 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001449 if (CreatedNewVReg) {
1450 NewLIs.push_back(&nI);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001451 MBBVRegsMap.insert(std::make_pair(MI->getParent()->getNumber(), NewVReg));
Evan Cheng81a03822007-11-17 00:40:40 +00001452 if (TrySplit)
1453 vrm.setIsSplitFromReg(NewVReg, li.reg);
1454 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001455
1456 if (HasUse) {
Evan Cheng81a03822007-11-17 00:40:40 +00001457 if (CreatedNewVReg) {
1458 LiveRange LR(getLoadIndex(index), getUseIndex(index)+1,
Lang Hames857c4e02009-06-17 21:01:20 +00001459 nI.getNextValue(0, 0, false, VNInfoAllocator));
Evan Cheng81a03822007-11-17 00:40:40 +00001460 DOUT << " +" << LR;
1461 nI.addRange(LR);
1462 } else {
1463 // Extend the split live interval to this def / use.
1464 unsigned End = getUseIndex(index)+1;
1465 LiveRange LR(nI.ranges[nI.ranges.size()-1].end, End,
1466 nI.getValNumInfo(nI.getNumValNums()-1));
1467 DOUT << " +" << LR;
1468 nI.addRange(LR);
1469 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001470 }
1471 if (HasDef) {
1472 LiveRange LR(getDefIndex(index), getStoreIndex(index),
Lang Hames857c4e02009-06-17 21:01:20 +00001473 nI.getNextValue(0, 0, false, VNInfoAllocator));
Evan Chengf2fbca62007-11-12 06:35:08 +00001474 DOUT << " +" << LR;
1475 nI.addRange(LR);
1476 }
Evan Cheng81a03822007-11-17 00:40:40 +00001477
Evan Chengf2fbca62007-11-12 06:35:08 +00001478 DOUT << "\t\t\t\tAdded new interval: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +00001479 nI.print(DOUT, tri_);
Evan Chengf2fbca62007-11-12 06:35:08 +00001480 DOUT << '\n';
1481 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001482 return CanFold;
Evan Chengf2fbca62007-11-12 06:35:08 +00001483}
Evan Cheng81a03822007-11-17 00:40:40 +00001484bool LiveIntervals::anyKillInMBBAfterIdx(const LiveInterval &li,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001485 const VNInfo *VNI,
1486 MachineBasicBlock *MBB, unsigned Idx) const {
Evan Cheng81a03822007-11-17 00:40:40 +00001487 unsigned End = getMBBEndIdx(MBB);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001488 for (unsigned j = 0, ee = VNI->kills.size(); j != ee; ++j) {
1489 unsigned KillIdx = VNI->kills[j];
1490 if (KillIdx > Idx && KillIdx < End)
1491 return true;
Evan Cheng81a03822007-11-17 00:40:40 +00001492 }
1493 return false;
1494}
1495
Evan Cheng063284c2008-02-21 00:34:19 +00001496/// RewriteInfo - Keep track of machine instrs that will be rewritten
1497/// during spilling.
Dan Gohman844731a2008-05-13 00:00:25 +00001498namespace {
1499 struct RewriteInfo {
1500 unsigned Index;
1501 MachineInstr *MI;
1502 bool HasUse;
1503 bool HasDef;
1504 RewriteInfo(unsigned i, MachineInstr *mi, bool u, bool d)
1505 : Index(i), MI(mi), HasUse(u), HasDef(d) {}
1506 };
Evan Cheng063284c2008-02-21 00:34:19 +00001507
Dan Gohman844731a2008-05-13 00:00:25 +00001508 struct RewriteInfoCompare {
1509 bool operator()(const RewriteInfo &LHS, const RewriteInfo &RHS) const {
1510 return LHS.Index < RHS.Index;
1511 }
1512 };
1513}
Evan Cheng063284c2008-02-21 00:34:19 +00001514
Evan Chengf2fbca62007-11-12 06:35:08 +00001515void LiveIntervals::
Evan Cheng81a03822007-11-17 00:40:40 +00001516rewriteInstructionsForSpills(const LiveInterval &li, bool TrySplit,
Evan Chengf2fbca62007-11-12 06:35:08 +00001517 LiveInterval::Ranges::const_iterator &I,
Evan Cheng81a03822007-11-17 00:40:40 +00001518 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Chengf2fbca62007-11-12 06:35:08 +00001519 unsigned Slot, int LdSlot,
1520 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chengd70dbb52008-02-22 09:24:50 +00001521 VirtRegMap &vrm,
Evan Chengf2fbca62007-11-12 06:35:08 +00001522 const TargetRegisterClass* rc,
1523 SmallVector<int, 4> &ReMatIds,
Evan Cheng22f07ff2007-12-11 02:09:15 +00001524 const MachineLoopInfo *loopInfo,
Evan Cheng81a03822007-11-17 00:40:40 +00001525 BitVector &SpillMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001526 DenseMap<unsigned, std::vector<SRInfo> > &SpillIdxes,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001527 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001528 DenseMap<unsigned, std::vector<SRInfo> > &RestoreIdxes,
1529 DenseMap<unsigned,unsigned> &MBBVRegsMap,
Evan Chengc781a242009-05-03 18:32:42 +00001530 std::vector<LiveInterval*> &NewLIs) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001531 bool AllCanFold = true;
Evan Cheng81a03822007-11-17 00:40:40 +00001532 unsigned NewVReg = 0;
Evan Cheng063284c2008-02-21 00:34:19 +00001533 unsigned start = getBaseIndex(I->start);
Evan Chengf2fbca62007-11-12 06:35:08 +00001534 unsigned end = getBaseIndex(I->end-1) + InstrSlots::NUM;
Evan Chengf2fbca62007-11-12 06:35:08 +00001535
Evan Cheng063284c2008-02-21 00:34:19 +00001536 // First collect all the def / use in this live range that will be rewritten.
Evan Cheng7e073ba2008-04-09 20:57:25 +00001537 // Make sure they are sorted according to instruction index.
Evan Cheng063284c2008-02-21 00:34:19 +00001538 std::vector<RewriteInfo> RewriteMIs;
Evan Chengd70dbb52008-02-22 09:24:50 +00001539 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1540 re = mri_->reg_end(); ri != re; ) {
Evan Cheng419852c2008-04-03 16:39:43 +00001541 MachineInstr *MI = &*ri;
Evan Cheng063284c2008-02-21 00:34:19 +00001542 MachineOperand &O = ri.getOperand();
1543 ++ri;
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001544 assert(!O.isImplicit() && "Spilling register that's used as implicit use?");
Evan Cheng063284c2008-02-21 00:34:19 +00001545 unsigned index = getInstructionIndex(MI);
1546 if (index < start || index >= end)
1547 continue;
Evan Cheng79a796c2008-07-12 01:56:02 +00001548 if (O.isUse() && !li.liveAt(getUseIndex(index)))
1549 // Must be defined by an implicit def. It should not be spilled. Note,
1550 // this is for correctness reason. e.g.
1551 // 8 %reg1024<def> = IMPLICIT_DEF
1552 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1553 // The live range [12, 14) are not part of the r1024 live interval since
1554 // it's defined by an implicit def. It will not conflicts with live
1555 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengb9890ae2008-07-12 02:22:07 +00001556 // easily see a situation where both registers are reloaded before
Evan Cheng79a796c2008-07-12 01:56:02 +00001557 // the INSERT_SUBREG and both target registers that would overlap.
1558 continue;
Evan Cheng063284c2008-02-21 00:34:19 +00001559 RewriteMIs.push_back(RewriteInfo(index, MI, O.isUse(), O.isDef()));
1560 }
1561 std::sort(RewriteMIs.begin(), RewriteMIs.end(), RewriteInfoCompare());
1562
Evan Cheng313d4b82008-02-23 00:33:04 +00001563 unsigned ImpUse = DefIsReMat ? getReMatImplicitUse(li, ReMatDefMI) : 0;
Evan Cheng063284c2008-02-21 00:34:19 +00001564 // Now rewrite the defs and uses.
1565 for (unsigned i = 0, e = RewriteMIs.size(); i != e; ) {
1566 RewriteInfo &rwi = RewriteMIs[i];
1567 ++i;
1568 unsigned index = rwi.Index;
1569 bool MIHasUse = rwi.HasUse;
1570 bool MIHasDef = rwi.HasDef;
1571 MachineInstr *MI = rwi.MI;
1572 // If MI def and/or use the same register multiple times, then there
1573 // are multiple entries.
Evan Cheng313d4b82008-02-23 00:33:04 +00001574 unsigned NumUses = MIHasUse;
Evan Cheng063284c2008-02-21 00:34:19 +00001575 while (i != e && RewriteMIs[i].MI == MI) {
1576 assert(RewriteMIs[i].Index == index);
Evan Cheng313d4b82008-02-23 00:33:04 +00001577 bool isUse = RewriteMIs[i].HasUse;
1578 if (isUse) ++NumUses;
1579 MIHasUse |= isUse;
Evan Cheng063284c2008-02-21 00:34:19 +00001580 MIHasDef |= RewriteMIs[i].HasDef;
1581 ++i;
1582 }
Evan Cheng81a03822007-11-17 00:40:40 +00001583 MachineBasicBlock *MBB = MI->getParent();
Evan Cheng313d4b82008-02-23 00:33:04 +00001584
Evan Cheng0a891ed2008-05-23 23:00:04 +00001585 if (ImpUse && MI != ReMatDefMI) {
Evan Cheng313d4b82008-02-23 00:33:04 +00001586 // Re-matting an instruction with virtual register use. Update the
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001587 // register interval's spill weight to HUGE_VALF to prevent it from
1588 // being spilled.
Evan Cheng313d4b82008-02-23 00:33:04 +00001589 LiveInterval &ImpLi = getInterval(ImpUse);
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001590 ImpLi.weight = HUGE_VALF;
Evan Cheng313d4b82008-02-23 00:33:04 +00001591 }
1592
Evan Cheng063284c2008-02-21 00:34:19 +00001593 unsigned MBBId = MBB->getNumber();
Evan Cheng018f9b02007-12-05 03:22:34 +00001594 unsigned ThisVReg = 0;
Evan Cheng70306f82007-12-03 09:58:48 +00001595 if (TrySplit) {
Owen Anderson28998312008-08-13 22:28:50 +00001596 DenseMap<unsigned,unsigned>::iterator NVI = MBBVRegsMap.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001597 if (NVI != MBBVRegsMap.end()) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001598 ThisVReg = NVI->second;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001599 // One common case:
1600 // x = use
1601 // ...
1602 // ...
1603 // def = ...
1604 // = use
1605 // It's better to start a new interval to avoid artifically
1606 // extend the new interval.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001607 if (MIHasDef && !MIHasUse) {
1608 MBBVRegsMap.erase(MBB->getNumber());
Evan Cheng018f9b02007-12-05 03:22:34 +00001609 ThisVReg = 0;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001610 }
1611 }
Evan Chengcada2452007-11-28 01:28:46 +00001612 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001613
1614 bool IsNew = ThisVReg == 0;
1615 if (IsNew) {
1616 // This ends the previous live interval. If all of its def / use
1617 // can be folded, give it a low spill weight.
1618 if (NewVReg && TrySplit && AllCanFold) {
1619 LiveInterval &nI = getOrCreateInterval(NewVReg);
1620 nI.weight /= 10.0F;
1621 }
1622 AllCanFold = true;
1623 }
1624 NewVReg = ThisVReg;
1625
Evan Cheng81a03822007-11-17 00:40:40 +00001626 bool HasDef = false;
1627 bool HasUse = false;
Evan Chengd70dbb52008-02-22 09:24:50 +00001628 bool CanFold = rewriteInstructionForSpills(li, I->valno, TrySplit,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001629 index, end, MI, ReMatOrigDefMI, ReMatDefMI,
1630 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
1631 CanDelete, vrm, rc, ReMatIds, loopInfo, NewVReg,
Evan Chengc781a242009-05-03 18:32:42 +00001632 ImpUse, HasDef, HasUse, MBBVRegsMap, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001633 if (!HasDef && !HasUse)
1634 continue;
1635
Evan Cheng018f9b02007-12-05 03:22:34 +00001636 AllCanFold &= CanFold;
1637
Evan Cheng81a03822007-11-17 00:40:40 +00001638 // Update weight of spill interval.
1639 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng70306f82007-12-03 09:58:48 +00001640 if (!TrySplit) {
Evan Cheng81a03822007-11-17 00:40:40 +00001641 // The spill weight is now infinity as it cannot be spilled again.
1642 nI.weight = HUGE_VALF;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001643 continue;
Evan Cheng81a03822007-11-17 00:40:40 +00001644 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001645
1646 // Keep track of the last def and first use in each MBB.
Evan Cheng0cbb1162007-11-29 01:06:25 +00001647 if (HasDef) {
1648 if (MI != ReMatOrigDefMI || !CanDelete) {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001649 bool HasKill = false;
1650 if (!HasUse)
1651 HasKill = anyKillInMBBAfterIdx(li, I->valno, MBB, getDefIndex(index));
1652 else {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001653 // If this is a two-address code, then this index starts a new VNInfo.
Evan Cheng3f32d652008-06-04 09:18:41 +00001654 const VNInfo *VNI = li.findDefinedVNInfo(getDefIndex(index));
Evan Cheng0cbb1162007-11-29 01:06:25 +00001655 if (VNI)
1656 HasKill = anyKillInMBBAfterIdx(li, VNI, MBB, getDefIndex(index));
1657 }
Owen Anderson28998312008-08-13 22:28:50 +00001658 DenseMap<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Chenge3110d02007-12-01 04:42:39 +00001659 SpillIdxes.find(MBBId);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001660 if (!HasKill) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001661 if (SII == SpillIdxes.end()) {
1662 std::vector<SRInfo> S;
1663 S.push_back(SRInfo(index, NewVReg, true));
1664 SpillIdxes.insert(std::make_pair(MBBId, S));
1665 } else if (SII->second.back().vreg != NewVReg) {
1666 SII->second.push_back(SRInfo(index, NewVReg, true));
1667 } else if ((int)index > SII->second.back().index) {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001668 // If there is an earlier def and this is a two-address
1669 // instruction, then it's not possible to fold the store (which
1670 // would also fold the load).
Evan Cheng1953d0c2007-11-29 10:12:14 +00001671 SRInfo &Info = SII->second.back();
1672 Info.index = index;
1673 Info.canFold = !HasUse;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001674 }
1675 SpillMBBs.set(MBBId);
Evan Chenge3110d02007-12-01 04:42:39 +00001676 } else if (SII != SpillIdxes.end() &&
1677 SII->second.back().vreg == NewVReg &&
1678 (int)index > SII->second.back().index) {
1679 // There is an earlier def that's not killed (must be two-address).
1680 // The spill is no longer needed.
1681 SII->second.pop_back();
1682 if (SII->second.empty()) {
1683 SpillIdxes.erase(MBBId);
1684 SpillMBBs.reset(MBBId);
1685 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001686 }
1687 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001688 }
1689
1690 if (HasUse) {
Owen Anderson28998312008-08-13 22:28:50 +00001691 DenseMap<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Cheng0cbb1162007-11-29 01:06:25 +00001692 SpillIdxes.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001693 if (SII != SpillIdxes.end() &&
1694 SII->second.back().vreg == NewVReg &&
1695 (int)index > SII->second.back().index)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001696 // Use(s) following the last def, it's not safe to fold the spill.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001697 SII->second.back().canFold = false;
Owen Anderson28998312008-08-13 22:28:50 +00001698 DenseMap<unsigned, std::vector<SRInfo> >::iterator RII =
Evan Cheng0cbb1162007-11-29 01:06:25 +00001699 RestoreIdxes.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001700 if (RII != RestoreIdxes.end() && RII->second.back().vreg == NewVReg)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001701 // If we are splitting live intervals, only fold if it's the first
1702 // use and there isn't another use later in the MBB.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001703 RII->second.back().canFold = false;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001704 else if (IsNew) {
1705 // Only need a reload if there isn't an earlier def / use.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001706 if (RII == RestoreIdxes.end()) {
1707 std::vector<SRInfo> Infos;
1708 Infos.push_back(SRInfo(index, NewVReg, true));
1709 RestoreIdxes.insert(std::make_pair(MBBId, Infos));
1710 } else {
1711 RII->second.push_back(SRInfo(index, NewVReg, true));
1712 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001713 RestoreMBBs.set(MBBId);
1714 }
1715 }
1716
1717 // Update spill weight.
Evan Cheng22f07ff2007-12-11 02:09:15 +00001718 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Chengc3417602008-06-21 06:45:54 +00001719 nI.weight += getSpillWeight(HasDef, HasUse, loopDepth);
Evan Chengf2fbca62007-11-12 06:35:08 +00001720 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001721
1722 if (NewVReg && TrySplit && AllCanFold) {
1723 // If all of its def / use can be folded, give it a low spill weight.
1724 LiveInterval &nI = getOrCreateInterval(NewVReg);
1725 nI.weight /= 10.0F;
1726 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001727}
1728
Evan Cheng1953d0c2007-11-29 10:12:14 +00001729bool LiveIntervals::alsoFoldARestore(int Id, int index, unsigned vr,
1730 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001731 DenseMap<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001732 if (!RestoreMBBs[Id])
1733 return false;
1734 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1735 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1736 if (Restores[i].index == index &&
1737 Restores[i].vreg == vr &&
1738 Restores[i].canFold)
1739 return true;
1740 return false;
1741}
1742
1743void LiveIntervals::eraseRestoreInfo(int Id, int index, unsigned vr,
1744 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001745 DenseMap<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001746 if (!RestoreMBBs[Id])
1747 return;
1748 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1749 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1750 if (Restores[i].index == index && Restores[i].vreg)
1751 Restores[i].index = -1;
1752}
Evan Cheng81a03822007-11-17 00:40:40 +00001753
Evan Cheng4cce6b42008-04-11 17:53:36 +00001754/// handleSpilledImpDefs - Remove IMPLICIT_DEF instructions which are being
1755/// spilled and create empty intervals for their uses.
1756void
1757LiveIntervals::handleSpilledImpDefs(const LiveInterval &li, VirtRegMap &vrm,
1758 const TargetRegisterClass* rc,
1759 std::vector<LiveInterval*> &NewLIs) {
Evan Cheng419852c2008-04-03 16:39:43 +00001760 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1761 re = mri_->reg_end(); ri != re; ) {
Evan Cheng4cce6b42008-04-11 17:53:36 +00001762 MachineOperand &O = ri.getOperand();
Evan Cheng419852c2008-04-03 16:39:43 +00001763 MachineInstr *MI = &*ri;
1764 ++ri;
Evan Cheng4cce6b42008-04-11 17:53:36 +00001765 if (O.isDef()) {
1766 assert(MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF &&
1767 "Register def was not rewritten?");
1768 RemoveMachineInstrFromMaps(MI);
1769 vrm.RemoveMachineInstrFromMaps(MI);
1770 MI->eraseFromParent();
1771 } else {
1772 // This must be an use of an implicit_def so it's not part of the live
1773 // interval. Create a new empty live interval for it.
1774 // FIXME: Can we simply erase some of the instructions? e.g. Stores?
1775 unsigned NewVReg = mri_->createVirtualRegister(rc);
1776 vrm.grow();
1777 vrm.setIsImplicitlyDefined(NewVReg);
1778 NewLIs.push_back(&getOrCreateInterval(NewVReg));
1779 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1780 MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001781 if (MO.isReg() && MO.getReg() == li.reg)
Evan Cheng4cce6b42008-04-11 17:53:36 +00001782 MO.setReg(NewVReg);
1783 }
1784 }
Evan Cheng419852c2008-04-03 16:39:43 +00001785 }
1786}
1787
Evan Chengf2fbca62007-11-12 06:35:08 +00001788std::vector<LiveInterval*> LiveIntervals::
Owen Andersond6664312008-08-18 18:05:32 +00001789addIntervalsForSpillsFast(const LiveInterval &li,
1790 const MachineLoopInfo *loopInfo,
Evan Chengc781a242009-05-03 18:32:42 +00001791 VirtRegMap &vrm) {
Owen Anderson17197312008-08-18 23:41:04 +00001792 unsigned slot = vrm.assignVirt2StackSlot(li.reg);
Owen Andersond6664312008-08-18 18:05:32 +00001793
1794 std::vector<LiveInterval*> added;
1795
1796 assert(li.weight != HUGE_VALF &&
1797 "attempt to spill already spilled interval!");
1798
1799 DOUT << "\t\t\t\tadding intervals for spills for interval: ";
1800 DEBUG(li.dump());
1801 DOUT << '\n';
1802
1803 const TargetRegisterClass* rc = mri_->getRegClass(li.reg);
1804
Owen Andersona41e47a2008-08-19 22:12:11 +00001805 MachineRegisterInfo::reg_iterator RI = mri_->reg_begin(li.reg);
1806 while (RI != mri_->reg_end()) {
1807 MachineInstr* MI = &*RI;
1808
1809 SmallVector<unsigned, 2> Indices;
1810 bool HasUse = false;
1811 bool HasDef = false;
1812
1813 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
1814 MachineOperand& mop = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001815 if (!mop.isReg() || mop.getReg() != li.reg) continue;
Owen Andersona41e47a2008-08-19 22:12:11 +00001816
1817 HasUse |= MI->getOperand(i).isUse();
1818 HasDef |= MI->getOperand(i).isDef();
1819
1820 Indices.push_back(i);
1821 }
1822
1823 if (!tryFoldMemoryOperand(MI, vrm, NULL, getInstructionIndex(MI),
1824 Indices, true, slot, li.reg)) {
1825 unsigned NewVReg = mri_->createVirtualRegister(rc);
Owen Anderson9a032932008-08-18 21:20:32 +00001826 vrm.grow();
Owen Anderson17197312008-08-18 23:41:04 +00001827 vrm.assignVirt2StackSlot(NewVReg, slot);
1828
Owen Andersona41e47a2008-08-19 22:12:11 +00001829 // create a new register for this spill
1830 LiveInterval &nI = getOrCreateInterval(NewVReg);
Owen Andersond6664312008-08-18 18:05:32 +00001831
Owen Andersona41e47a2008-08-19 22:12:11 +00001832 // the spill weight is now infinity as it
1833 // cannot be spilled again
1834 nI.weight = HUGE_VALF;
1835
1836 // Rewrite register operands to use the new vreg.
1837 for (SmallVectorImpl<unsigned>::iterator I = Indices.begin(),
1838 E = Indices.end(); I != E; ++I) {
1839 MI->getOperand(*I).setReg(NewVReg);
1840
1841 if (MI->getOperand(*I).isUse())
1842 MI->getOperand(*I).setIsKill(true);
1843 }
1844
1845 // Fill in the new live interval.
1846 unsigned index = getInstructionIndex(MI);
1847 if (HasUse) {
1848 LiveRange LR(getLoadIndex(index), getUseIndex(index),
Lang Hames857c4e02009-06-17 21:01:20 +00001849 nI.getNextValue(0, 0, false, getVNInfoAllocator()));
Owen Andersona41e47a2008-08-19 22:12:11 +00001850 DOUT << " +" << LR;
1851 nI.addRange(LR);
1852 vrm.addRestorePoint(NewVReg, MI);
1853 }
1854 if (HasDef) {
1855 LiveRange LR(getDefIndex(index), getStoreIndex(index),
Lang Hames857c4e02009-06-17 21:01:20 +00001856 nI.getNextValue(0, 0, false, getVNInfoAllocator()));
Owen Andersona41e47a2008-08-19 22:12:11 +00001857 DOUT << " +" << LR;
1858 nI.addRange(LR);
1859 vrm.addSpillPoint(NewVReg, true, MI);
1860 }
1861
Owen Anderson17197312008-08-18 23:41:04 +00001862 added.push_back(&nI);
Owen Anderson8dc2cbe2008-08-18 18:38:12 +00001863
Owen Andersona41e47a2008-08-19 22:12:11 +00001864 DOUT << "\t\t\t\tadded new interval: ";
1865 DEBUG(nI.dump());
1866 DOUT << '\n';
Owen Andersona41e47a2008-08-19 22:12:11 +00001867 }
Owen Anderson9a032932008-08-18 21:20:32 +00001868
Owen Anderson9a032932008-08-18 21:20:32 +00001869
Owen Andersona41e47a2008-08-19 22:12:11 +00001870 RI = mri_->reg_begin(li.reg);
Owen Andersond6664312008-08-18 18:05:32 +00001871 }
Owen Andersond6664312008-08-18 18:05:32 +00001872
1873 return added;
1874}
1875
1876std::vector<LiveInterval*> LiveIntervals::
Evan Cheng81a03822007-11-17 00:40:40 +00001877addIntervalsForSpills(const LiveInterval &li,
Evan Chengdc377862008-09-30 15:44:16 +00001878 SmallVectorImpl<LiveInterval*> &SpillIs,
Evan Chengc781a242009-05-03 18:32:42 +00001879 const MachineLoopInfo *loopInfo, VirtRegMap &vrm) {
Owen Andersonae339ba2008-08-19 00:17:30 +00001880
1881 if (EnableFastSpilling)
Evan Chengc781a242009-05-03 18:32:42 +00001882 return addIntervalsForSpillsFast(li, loopInfo, vrm);
Owen Andersonae339ba2008-08-19 00:17:30 +00001883
Evan Chengf2fbca62007-11-12 06:35:08 +00001884 assert(li.weight != HUGE_VALF &&
1885 "attempt to spill already spilled interval!");
1886
1887 DOUT << "\t\t\t\tadding intervals for spills for interval: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +00001888 li.print(DOUT, tri_);
Evan Chengf2fbca62007-11-12 06:35:08 +00001889 DOUT << '\n';
1890
Evan Cheng72eeb942008-12-05 17:00:16 +00001891 // Each bit specify whether a spill is required in the MBB.
Evan Cheng81a03822007-11-17 00:40:40 +00001892 BitVector SpillMBBs(mf_->getNumBlockIDs());
Owen Anderson28998312008-08-13 22:28:50 +00001893 DenseMap<unsigned, std::vector<SRInfo> > SpillIdxes;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001894 BitVector RestoreMBBs(mf_->getNumBlockIDs());
Owen Anderson28998312008-08-13 22:28:50 +00001895 DenseMap<unsigned, std::vector<SRInfo> > RestoreIdxes;
1896 DenseMap<unsigned,unsigned> MBBVRegsMap;
Evan Chengf2fbca62007-11-12 06:35:08 +00001897 std::vector<LiveInterval*> NewLIs;
Evan Chengd70dbb52008-02-22 09:24:50 +00001898 const TargetRegisterClass* rc = mri_->getRegClass(li.reg);
Evan Chengf2fbca62007-11-12 06:35:08 +00001899
1900 unsigned NumValNums = li.getNumValNums();
1901 SmallVector<MachineInstr*, 4> ReMatDefs;
1902 ReMatDefs.resize(NumValNums, NULL);
1903 SmallVector<MachineInstr*, 4> ReMatOrigDefs;
1904 ReMatOrigDefs.resize(NumValNums, NULL);
1905 SmallVector<int, 4> ReMatIds;
1906 ReMatIds.resize(NumValNums, VirtRegMap::MAX_STACK_SLOT);
1907 BitVector ReMatDelete(NumValNums);
1908 unsigned Slot = VirtRegMap::MAX_STACK_SLOT;
1909
Evan Cheng81a03822007-11-17 00:40:40 +00001910 // Spilling a split live interval. It cannot be split any further. Also,
1911 // it's also guaranteed to be a single val# / range interval.
1912 if (vrm.getPreSplitReg(li.reg)) {
1913 vrm.setIsSplitFromReg(li.reg, 0);
Evan Chengd120ffd2007-12-05 10:24:35 +00001914 // Unset the split kill marker on the last use.
1915 unsigned KillIdx = vrm.getKillPoint(li.reg);
1916 if (KillIdx) {
1917 MachineInstr *KillMI = getInstructionFromIndex(KillIdx);
1918 assert(KillMI && "Last use disappeared?");
1919 int KillOp = KillMI->findRegisterUseOperandIdx(li.reg, true);
1920 assert(KillOp != -1 && "Last use disappeared?");
Chris Lattnerf7382302007-12-30 21:56:09 +00001921 KillMI->getOperand(KillOp).setIsKill(false);
Evan Chengd120ffd2007-12-05 10:24:35 +00001922 }
Evan Chengadf85902007-12-05 09:51:10 +00001923 vrm.removeKillPoint(li.reg);
Evan Cheng81a03822007-11-17 00:40:40 +00001924 bool DefIsReMat = vrm.isReMaterialized(li.reg);
1925 Slot = vrm.getStackSlot(li.reg);
1926 assert(Slot != VirtRegMap::MAX_STACK_SLOT);
1927 MachineInstr *ReMatDefMI = DefIsReMat ?
1928 vrm.getReMaterializedMI(li.reg) : NULL;
1929 int LdSlot = 0;
1930 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
1931 bool isLoad = isLoadSS ||
Dan Gohman15511cf2008-12-03 18:15:48 +00001932 (DefIsReMat && (ReMatDefMI->getDesc().canFoldAsLoad()));
Evan Cheng81a03822007-11-17 00:40:40 +00001933 bool IsFirstRange = true;
1934 for (LiveInterval::Ranges::const_iterator
1935 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1936 // If this is a split live interval with multiple ranges, it means there
1937 // are two-address instructions that re-defined the value. Only the
1938 // first def can be rematerialized!
1939 if (IsFirstRange) {
Evan Chengcb3c3302007-11-29 23:02:50 +00001940 // Note ReMatOrigDefMI has already been deleted.
Evan Cheng81a03822007-11-17 00:40:40 +00001941 rewriteInstructionsForSpills(li, false, I, NULL, ReMatDefMI,
1942 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chengd70dbb52008-02-22 09:24:50 +00001943 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001944 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Chengc781a242009-05-03 18:32:42 +00001945 MBBVRegsMap, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001946 } else {
1947 rewriteInstructionsForSpills(li, false, I, NULL, 0,
1948 Slot, 0, false, false, false,
Evan Chengd70dbb52008-02-22 09:24:50 +00001949 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001950 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Chengc781a242009-05-03 18:32:42 +00001951 MBBVRegsMap, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001952 }
1953 IsFirstRange = false;
1954 }
Evan Cheng419852c2008-04-03 16:39:43 +00001955
Evan Cheng4cce6b42008-04-11 17:53:36 +00001956 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001957 return NewLIs;
1958 }
1959
1960 bool TrySplit = SplitAtBB && !intervalIsInOneMBB(li);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001961 if (SplitLimit != -1 && (int)numSplits >= SplitLimit)
1962 TrySplit = false;
1963 if (TrySplit)
1964 ++numSplits;
Evan Chengf2fbca62007-11-12 06:35:08 +00001965 bool NeedStackSlot = false;
1966 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
1967 i != e; ++i) {
1968 const VNInfo *VNI = *i;
1969 unsigned VN = VNI->id;
Lang Hames857c4e02009-06-17 21:01:20 +00001970 if (VNI->isUnused())
Evan Chengf2fbca62007-11-12 06:35:08 +00001971 continue; // Dead val#.
1972 // Is the def for the val# rematerializable?
Lang Hames857c4e02009-06-17 21:01:20 +00001973 MachineInstr *ReMatDefMI = VNI->isDefAccurate()
1974 ? getInstructionFromIndex(VNI->def) : 0;
Evan Cheng5ef3a042007-12-06 00:01:56 +00001975 bool dummy;
Evan Chengdc377862008-09-30 15:44:16 +00001976 if (ReMatDefMI && isReMaterializable(li, VNI, ReMatDefMI, SpillIs, dummy)) {
Evan Chengf2fbca62007-11-12 06:35:08 +00001977 // Remember how to remat the def of this val#.
Evan Cheng81a03822007-11-17 00:40:40 +00001978 ReMatOrigDefs[VN] = ReMatDefMI;
Dan Gohman2c3f7ae2008-07-17 23:49:46 +00001979 // Original def may be modified so we have to make a copy here.
Evan Cheng1ed99222008-07-19 00:37:25 +00001980 MachineInstr *Clone = mf_->CloneMachineInstr(ReMatDefMI);
1981 ClonedMIs.push_back(Clone);
1982 ReMatDefs[VN] = Clone;
Evan Chengf2fbca62007-11-12 06:35:08 +00001983
1984 bool CanDelete = true;
Lang Hames857c4e02009-06-17 21:01:20 +00001985 if (VNI->hasPHIKill()) {
Evan Chengc3fc7d92007-11-29 09:49:23 +00001986 // A kill is a phi node, not all of its uses can be rematerialized.
Evan Chengf2fbca62007-11-12 06:35:08 +00001987 // It must not be deleted.
Evan Chengc3fc7d92007-11-29 09:49:23 +00001988 CanDelete = false;
1989 // Need a stack slot if there is any live range where uses cannot be
1990 // rematerialized.
1991 NeedStackSlot = true;
Evan Chengf2fbca62007-11-12 06:35:08 +00001992 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001993 if (CanDelete)
1994 ReMatDelete.set(VN);
1995 } else {
1996 // Need a stack slot if there is any live range where uses cannot be
1997 // rematerialized.
1998 NeedStackSlot = true;
1999 }
2000 }
2001
2002 // One stack slot per live interval.
Owen Andersonb98bbb72009-03-26 18:53:38 +00002003 if (NeedStackSlot && vrm.getPreSplitReg(li.reg) == 0) {
2004 if (vrm.getStackSlot(li.reg) == VirtRegMap::NO_STACK_SLOT)
2005 Slot = vrm.assignVirt2StackSlot(li.reg);
2006
2007 // This case only occurs when the prealloc splitter has already assigned
2008 // a stack slot to this vreg.
2009 else
2010 Slot = vrm.getStackSlot(li.reg);
2011 }
Evan Chengf2fbca62007-11-12 06:35:08 +00002012
2013 // Create new intervals and rewrite defs and uses.
2014 for (LiveInterval::Ranges::const_iterator
2015 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
Evan Cheng81a03822007-11-17 00:40:40 +00002016 MachineInstr *ReMatDefMI = ReMatDefs[I->valno->id];
2017 MachineInstr *ReMatOrigDefMI = ReMatOrigDefs[I->valno->id];
2018 bool DefIsReMat = ReMatDefMI != NULL;
Evan Chengf2fbca62007-11-12 06:35:08 +00002019 bool CanDelete = ReMatDelete[I->valno->id];
2020 int LdSlot = 0;
Evan Cheng81a03822007-11-17 00:40:40 +00002021 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
Evan Chengf2fbca62007-11-12 06:35:08 +00002022 bool isLoad = isLoadSS ||
Dan Gohman15511cf2008-12-03 18:15:48 +00002023 (DefIsReMat && ReMatDefMI->getDesc().canFoldAsLoad());
Evan Cheng81a03822007-11-17 00:40:40 +00002024 rewriteInstructionsForSpills(li, TrySplit, I, ReMatOrigDefMI, ReMatDefMI,
Evan Cheng0cbb1162007-11-29 01:06:25 +00002025 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chengd70dbb52008-02-22 09:24:50 +00002026 CanDelete, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00002027 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Chengc781a242009-05-03 18:32:42 +00002028 MBBVRegsMap, NewLIs);
Evan Chengf2fbca62007-11-12 06:35:08 +00002029 }
2030
Evan Cheng0cbb1162007-11-29 01:06:25 +00002031 // Insert spills / restores if we are splitting.
Evan Cheng419852c2008-04-03 16:39:43 +00002032 if (!TrySplit) {
Evan Cheng4cce6b42008-04-11 17:53:36 +00002033 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng1953d0c2007-11-29 10:12:14 +00002034 return NewLIs;
Evan Cheng419852c2008-04-03 16:39:43 +00002035 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002036
Evan Chengb50bb8c2007-12-05 08:16:32 +00002037 SmallPtrSet<LiveInterval*, 4> AddedKill;
Evan Chengaee4af62007-12-02 08:30:39 +00002038 SmallVector<unsigned, 2> Ops;
Evan Cheng1953d0c2007-11-29 10:12:14 +00002039 if (NeedStackSlot) {
2040 int Id = SpillMBBs.find_first();
2041 while (Id != -1) {
2042 std::vector<SRInfo> &spills = SpillIdxes[Id];
2043 for (unsigned i = 0, e = spills.size(); i != e; ++i) {
2044 int index = spills[i].index;
2045 unsigned VReg = spills[i].vreg;
Evan Cheng597d10d2007-12-04 00:32:23 +00002046 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng0cbb1162007-11-29 01:06:25 +00002047 bool isReMat = vrm.isReMaterialized(VReg);
2048 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengaee4af62007-12-02 08:30:39 +00002049 bool CanFold = false;
2050 bool FoundUse = false;
2051 Ops.clear();
Evan Chengcddbb832007-11-30 21:23:43 +00002052 if (spills[i].canFold) {
Evan Chengaee4af62007-12-02 08:30:39 +00002053 CanFold = true;
Evan Cheng0cbb1162007-11-29 01:06:25 +00002054 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
2055 MachineOperand &MO = MI->getOperand(j);
Dan Gohmand735b802008-10-03 15:45:36 +00002056 if (!MO.isReg() || MO.getReg() != VReg)
Evan Cheng0cbb1162007-11-29 01:06:25 +00002057 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00002058
2059 Ops.push_back(j);
2060 if (MO.isDef())
Evan Chengcddbb832007-11-30 21:23:43 +00002061 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00002062 if (isReMat ||
2063 (!FoundUse && !alsoFoldARestore(Id, index, VReg,
2064 RestoreMBBs, RestoreIdxes))) {
2065 // MI has two-address uses of the same register. If the use
2066 // isn't the first and only use in the BB, then we can't fold
2067 // it. FIXME: Move this to rewriteInstructionsForSpills.
2068 CanFold = false;
Evan Chengcddbb832007-11-30 21:23:43 +00002069 break;
2070 }
Evan Chengaee4af62007-12-02 08:30:39 +00002071 FoundUse = true;
Evan Cheng0cbb1162007-11-29 01:06:25 +00002072 }
2073 }
2074 // Fold the store into the def if possible.
Evan Chengcddbb832007-11-30 21:23:43 +00002075 bool Folded = false;
Evan Chengaee4af62007-12-02 08:30:39 +00002076 if (CanFold && !Ops.empty()) {
2077 if (tryFoldMemoryOperand(MI, vrm, NULL, index, Ops, true, Slot,VReg)){
Evan Chengcddbb832007-11-30 21:23:43 +00002078 Folded = true;
Sebastian Redl48fe6352009-03-19 23:26:52 +00002079 if (FoundUse) {
Evan Chengaee4af62007-12-02 08:30:39 +00002080 // Also folded uses, do not issue a load.
2081 eraseRestoreInfo(Id, index, VReg, RestoreMBBs, RestoreIdxes);
Evan Chengf38d14f2007-12-05 09:05:34 +00002082 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
2083 }
Evan Cheng597d10d2007-12-04 00:32:23 +00002084 nI.removeRange(getDefIndex(index), getStoreIndex(index));
Evan Chengcddbb832007-11-30 21:23:43 +00002085 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002086 }
2087
Evan Cheng7e073ba2008-04-09 20:57:25 +00002088 // Otherwise tell the spiller to issue a spill.
Evan Chengb50bb8c2007-12-05 08:16:32 +00002089 if (!Folded) {
2090 LiveRange *LR = &nI.ranges[nI.ranges.size()-1];
2091 bool isKill = LR->end == getStoreIndex(index);
Evan Chengb0a6f622008-05-20 08:10:37 +00002092 if (!MI->registerDefIsDead(nI.reg))
2093 // No need to spill a dead def.
2094 vrm.addSpillPoint(VReg, isKill, MI);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002095 if (isKill)
2096 AddedKill.insert(&nI);
2097 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002098 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002099 Id = SpillMBBs.find_next(Id);
Evan Cheng0cbb1162007-11-29 01:06:25 +00002100 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002101 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002102
Evan Cheng1953d0c2007-11-29 10:12:14 +00002103 int Id = RestoreMBBs.find_first();
2104 while (Id != -1) {
2105 std::vector<SRInfo> &restores = RestoreIdxes[Id];
2106 for (unsigned i = 0, e = restores.size(); i != e; ++i) {
2107 int index = restores[i].index;
2108 if (index == -1)
2109 continue;
2110 unsigned VReg = restores[i].vreg;
Evan Cheng597d10d2007-12-04 00:32:23 +00002111 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng9c3c2212008-06-06 07:54:39 +00002112 bool isReMat = vrm.isReMaterialized(VReg);
Evan Cheng81a03822007-11-17 00:40:40 +00002113 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengaee4af62007-12-02 08:30:39 +00002114 bool CanFold = false;
2115 Ops.clear();
Evan Chengcddbb832007-11-30 21:23:43 +00002116 if (restores[i].canFold) {
Evan Chengaee4af62007-12-02 08:30:39 +00002117 CanFold = true;
Evan Cheng81a03822007-11-17 00:40:40 +00002118 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
2119 MachineOperand &MO = MI->getOperand(j);
Dan Gohmand735b802008-10-03 15:45:36 +00002120 if (!MO.isReg() || MO.getReg() != VReg)
Evan Cheng81a03822007-11-17 00:40:40 +00002121 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00002122
Evan Cheng0cbb1162007-11-29 01:06:25 +00002123 if (MO.isDef()) {
Evan Chengaee4af62007-12-02 08:30:39 +00002124 // If this restore were to be folded, it would have been folded
2125 // already.
2126 CanFold = false;
Evan Cheng81a03822007-11-17 00:40:40 +00002127 break;
2128 }
Evan Chengaee4af62007-12-02 08:30:39 +00002129 Ops.push_back(j);
Evan Cheng81a03822007-11-17 00:40:40 +00002130 }
2131 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002132
2133 // Fold the load into the use if possible.
Evan Chengcddbb832007-11-30 21:23:43 +00002134 bool Folded = false;
Evan Chengaee4af62007-12-02 08:30:39 +00002135 if (CanFold && !Ops.empty()) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00002136 if (!isReMat)
Evan Chengaee4af62007-12-02 08:30:39 +00002137 Folded = tryFoldMemoryOperand(MI, vrm, NULL,index,Ops,true,Slot,VReg);
2138 else {
Evan Cheng0cbb1162007-11-29 01:06:25 +00002139 MachineInstr *ReMatDefMI = vrm.getReMaterializedMI(VReg);
2140 int LdSlot = 0;
2141 bool isLoadSS = tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
2142 // If the rematerializable def is a load, also try to fold it.
Dan Gohman15511cf2008-12-03 18:15:48 +00002143 if (isLoadSS || ReMatDefMI->getDesc().canFoldAsLoad())
Evan Chengaee4af62007-12-02 08:30:39 +00002144 Folded = tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
2145 Ops, isLoadSS, LdSlot, VReg);
Evan Cheng650d7f32008-12-05 17:41:31 +00002146 if (!Folded) {
2147 unsigned ImpUse = getReMatImplicitUse(li, ReMatDefMI);
2148 if (ImpUse) {
2149 // Re-matting an instruction with virtual register use. Add the
2150 // register as an implicit use on the use MI and update the register
2151 // interval's spill weight to HUGE_VALF to prevent it from being
2152 // spilled.
2153 LiveInterval &ImpLi = getInterval(ImpUse);
2154 ImpLi.weight = HUGE_VALF;
2155 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
2156 }
Evan Chengd70dbb52008-02-22 09:24:50 +00002157 }
Evan Chengaee4af62007-12-02 08:30:39 +00002158 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002159 }
2160 // If folding is not possible / failed, then tell the spiller to issue a
2161 // load / rematerialization for us.
Evan Cheng597d10d2007-12-04 00:32:23 +00002162 if (Folded)
2163 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002164 else
Evan Cheng0cbb1162007-11-29 01:06:25 +00002165 vrm.addRestorePoint(VReg, MI);
Evan Cheng81a03822007-11-17 00:40:40 +00002166 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002167 Id = RestoreMBBs.find_next(Id);
Evan Cheng81a03822007-11-17 00:40:40 +00002168 }
2169
Evan Chengb50bb8c2007-12-05 08:16:32 +00002170 // Finalize intervals: add kills, finalize spill weights, and filter out
2171 // dead intervals.
Evan Cheng597d10d2007-12-04 00:32:23 +00002172 std::vector<LiveInterval*> RetNewLIs;
2173 for (unsigned i = 0, e = NewLIs.size(); i != e; ++i) {
2174 LiveInterval *LI = NewLIs[i];
2175 if (!LI->empty()) {
Owen Anderson496bac52008-07-23 19:47:27 +00002176 LI->weight /= InstrSlots::NUM * getApproximateInstructionCount(*LI);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002177 if (!AddedKill.count(LI)) {
2178 LiveRange *LR = &LI->ranges[LI->ranges.size()-1];
Evan Chengd120ffd2007-12-05 10:24:35 +00002179 unsigned LastUseIdx = getBaseIndex(LR->end);
2180 MachineInstr *LastUse = getInstructionFromIndex(LastUseIdx);
Evan Cheng6130f662008-03-05 00:59:57 +00002181 int UseIdx = LastUse->findRegisterUseOperandIdx(LI->reg, false);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002182 assert(UseIdx != -1);
Evan Chenga24752f2009-03-19 20:30:06 +00002183 if (!LastUse->isRegTiedToDefOperand(UseIdx)) {
Evan Chengb50bb8c2007-12-05 08:16:32 +00002184 LastUse->getOperand(UseIdx).setIsKill();
Evan Chengd120ffd2007-12-05 10:24:35 +00002185 vrm.addKillPoint(LI->reg, LastUseIdx);
Evan Chengadf85902007-12-05 09:51:10 +00002186 }
Evan Chengb50bb8c2007-12-05 08:16:32 +00002187 }
Evan Cheng597d10d2007-12-04 00:32:23 +00002188 RetNewLIs.push_back(LI);
2189 }
2190 }
Evan Cheng81a03822007-11-17 00:40:40 +00002191
Evan Cheng4cce6b42008-04-11 17:53:36 +00002192 handleSpilledImpDefs(li, vrm, rc, RetNewLIs);
Evan Cheng597d10d2007-12-04 00:32:23 +00002193 return RetNewLIs;
Evan Chengf2fbca62007-11-12 06:35:08 +00002194}
Evan Cheng676dd7c2008-03-11 07:19:34 +00002195
2196/// hasAllocatableSuperReg - Return true if the specified physical register has
2197/// any super register that's allocatable.
2198bool LiveIntervals::hasAllocatableSuperReg(unsigned Reg) const {
2199 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS)
2200 if (allocatableRegs_[*AS] && hasInterval(*AS))
2201 return true;
2202 return false;
2203}
2204
2205/// getRepresentativeReg - Find the largest super register of the specified
2206/// physical register.
2207unsigned LiveIntervals::getRepresentativeReg(unsigned Reg) const {
2208 // Find the largest super-register that is allocatable.
2209 unsigned BestReg = Reg;
2210 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS) {
2211 unsigned SuperReg = *AS;
2212 if (!hasAllocatableSuperReg(SuperReg) && hasInterval(SuperReg)) {
2213 BestReg = SuperReg;
2214 break;
2215 }
2216 }
2217 return BestReg;
2218}
2219
2220/// getNumConflictsWithPhysReg - Return the number of uses and defs of the
2221/// specified interval that conflicts with the specified physical register.
2222unsigned LiveIntervals::getNumConflictsWithPhysReg(const LiveInterval &li,
2223 unsigned PhysReg) const {
2224 unsigned NumConflicts = 0;
2225 const LiveInterval &pli = getInterval(getRepresentativeReg(PhysReg));
2226 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
2227 E = mri_->reg_end(); I != E; ++I) {
2228 MachineOperand &O = I.getOperand();
2229 MachineInstr *MI = O.getParent();
2230 unsigned Index = getInstructionIndex(MI);
2231 if (pli.liveAt(Index))
2232 ++NumConflicts;
2233 }
2234 return NumConflicts;
2235}
2236
2237/// spillPhysRegAroundRegDefsUses - Spill the specified physical register
Evan Cheng2824a652009-03-23 18:24:37 +00002238/// around all defs and uses of the specified interval. Return true if it
2239/// was able to cut its interval.
2240bool LiveIntervals::spillPhysRegAroundRegDefsUses(const LiveInterval &li,
Evan Cheng676dd7c2008-03-11 07:19:34 +00002241 unsigned PhysReg, VirtRegMap &vrm) {
2242 unsigned SpillReg = getRepresentativeReg(PhysReg);
2243
2244 for (const unsigned *AS = tri_->getAliasSet(PhysReg); *AS; ++AS)
2245 // If there are registers which alias PhysReg, but which are not a
2246 // sub-register of the chosen representative super register. Assert
2247 // since we can't handle it yet.
Dan Gohman70f2f652009-04-13 15:22:29 +00002248 assert(*AS == SpillReg || !allocatableRegs_[*AS] || !hasInterval(*AS) ||
Evan Cheng676dd7c2008-03-11 07:19:34 +00002249 tri_->isSuperRegister(*AS, SpillReg));
2250
Evan Cheng2824a652009-03-23 18:24:37 +00002251 bool Cut = false;
Evan Cheng676dd7c2008-03-11 07:19:34 +00002252 LiveInterval &pli = getInterval(SpillReg);
2253 SmallPtrSet<MachineInstr*, 8> SeenMIs;
2254 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
2255 E = mri_->reg_end(); I != E; ++I) {
2256 MachineOperand &O = I.getOperand();
2257 MachineInstr *MI = O.getParent();
2258 if (SeenMIs.count(MI))
2259 continue;
2260 SeenMIs.insert(MI);
2261 unsigned Index = getInstructionIndex(MI);
2262 if (pli.liveAt(Index)) {
2263 vrm.addEmergencySpill(SpillReg, MI);
Evan Cheng5a3c6a82009-01-29 02:20:59 +00002264 unsigned StartIdx = getLoadIndex(Index);
2265 unsigned EndIdx = getStoreIndex(Index)+1;
Evan Cheng2824a652009-03-23 18:24:37 +00002266 if (pli.isInOneLiveRange(StartIdx, EndIdx)) {
Evan Cheng5a3c6a82009-01-29 02:20:59 +00002267 pli.removeRange(StartIdx, EndIdx);
Evan Cheng2824a652009-03-23 18:24:37 +00002268 Cut = true;
2269 } else {
Evan Cheng5a3c6a82009-01-29 02:20:59 +00002270 cerr << "Ran out of registers during register allocation!\n";
2271 if (MI->getOpcode() == TargetInstrInfo::INLINEASM) {
2272 cerr << "Please check your inline asm statement for invalid "
2273 << "constraints:\n";
2274 MI->print(cerr.stream(), tm_);
2275 }
2276 exit(1);
2277 }
Evan Cheng676dd7c2008-03-11 07:19:34 +00002278 for (const unsigned* AS = tri_->getSubRegisters(SpillReg); *AS; ++AS) {
2279 if (!hasInterval(*AS))
2280 continue;
2281 LiveInterval &spli = getInterval(*AS);
2282 if (spli.liveAt(Index))
2283 spli.removeRange(getLoadIndex(Index), getStoreIndex(Index)+1);
2284 }
2285 }
2286 }
Evan Cheng2824a652009-03-23 18:24:37 +00002287 return Cut;
Evan Cheng676dd7c2008-03-11 07:19:34 +00002288}
Owen Andersonc4dc1322008-06-05 17:15:43 +00002289
2290LiveRange LiveIntervals::addLiveRangeToEndOfBlock(unsigned reg,
2291 MachineInstr* startInst) {
2292 LiveInterval& Interval = getOrCreateInterval(reg);
2293 VNInfo* VN = Interval.getNextValue(
2294 getInstructionIndex(startInst) + InstrSlots::DEF,
Lang Hames857c4e02009-06-17 21:01:20 +00002295 startInst, true, getVNInfoAllocator());
2296 VN->setHasPHIKill(true);
Owen Andersonc4dc1322008-06-05 17:15:43 +00002297 VN->kills.push_back(getMBBEndIdx(startInst->getParent()));
2298 LiveRange LR(getInstructionIndex(startInst) + InstrSlots::DEF,
2299 getMBBEndIdx(startInst->getParent()) + 1, VN);
2300 Interval.addRange(LR);
2301
2302 return LR;
2303}