blob: d4169dbcf51d3cb5a2e26d08f36afce885a61cc9 [file] [log] [blame]
Chris Lattnera3b8b5c2004-07-23 17:56:30 +00001//===-- LiveIntervalAnalysis.cpp - Live Interval Analysis -----------------===//
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the LiveInterval analysis pass which is used
11// by the Linear Scan Register allocator. This pass linearizes the
12// basic blocks of the function in DFS order and uses the
13// LiveVariables pass to conservatively compute live intervals for
14// each virtual and physical register.
15//
16//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "liveintervals"
Chris Lattner3c3fe462005-09-21 04:19:09 +000019#include "llvm/CodeGen/LiveIntervalAnalysis.h"
Misha Brukman08a6c762004-09-03 18:25:53 +000020#include "VirtRegMap.h"
Chris Lattner015959e2004-05-01 21:24:39 +000021#include "llvm/Value.h"
Dan Gohman6d69ba82008-07-25 00:02:30 +000022#include "llvm/Analysis/AliasAnalysis.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000023#include "llvm/CodeGen/LiveVariables.h"
24#include "llvm/CodeGen/MachineFrameInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000025#include "llvm/CodeGen/MachineInstr.h"
Evan Cheng22f07ff2007-12-11 02:09:15 +000026#include "llvm/CodeGen/MachineLoopInfo.h"
Chris Lattner84bc5422007-12-31 04:13:23 +000027#include "llvm/CodeGen/MachineRegisterInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000028#include "llvm/CodeGen/Passes.h"
Dan Gohman6d69ba82008-07-25 00:02:30 +000029#include "llvm/CodeGen/PseudoSourceValue.h"
Dan Gohman6f0d0242008-02-10 18:45:23 +000030#include "llvm/Target/TargetRegisterInfo.h"
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000031#include "llvm/Target/TargetInstrInfo.h"
32#include "llvm/Target/TargetMachine.h"
Owen Anderson95dad832008-10-07 20:22:28 +000033#include "llvm/Target/TargetOptions.h"
Reid Spencer551ccae2004-09-01 22:55:40 +000034#include "llvm/Support/CommandLine.h"
35#include "llvm/Support/Debug.h"
36#include "llvm/ADT/Statistic.h"
37#include "llvm/ADT/STLExtras.h"
Alkis Evlogimenos20aa4742004-09-03 18:19:51 +000038#include <algorithm>
Lang Hamesf41538d2009-06-02 16:53:25 +000039#include <limits>
Jeff Cohen97af7512006-12-02 02:22:01 +000040#include <cmath>
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000041using namespace llvm;
42
Dan Gohman844731a2008-05-13 00:00:25 +000043// Hidden options for help debugging.
44static cl::opt<bool> DisableReMat("disable-rematerialization",
45 cl::init(false), cl::Hidden);
Evan Cheng81a03822007-11-17 00:40:40 +000046
Dan Gohman844731a2008-05-13 00:00:25 +000047static cl::opt<bool> SplitAtBB("split-intervals-at-bb",
48 cl::init(true), cl::Hidden);
49static cl::opt<int> SplitLimit("split-limit",
50 cl::init(-1), cl::Hidden);
Evan Chengbc165e42007-08-16 07:24:22 +000051
Dan Gohman4c8f8702008-07-25 15:08:37 +000052static cl::opt<bool> EnableAggressiveRemat("aggressive-remat", cl::Hidden);
53
Owen Andersonae339ba2008-08-19 00:17:30 +000054static cl::opt<bool> EnableFastSpilling("fast-spill",
55 cl::init(false), cl::Hidden);
56
Chris Lattnercd3245a2006-12-19 22:41:21 +000057STATISTIC(numIntervals, "Number of original intervals");
Evan Cheng0cbb1162007-11-29 01:06:25 +000058STATISTIC(numFolds , "Number of loads/stores folded into instructions");
59STATISTIC(numSplits , "Number of intervals split");
Chris Lattnercd3245a2006-12-19 22:41:21 +000060
Devang Patel19974732007-05-03 01:11:54 +000061char LiveIntervals::ID = 0;
Dan Gohman844731a2008-05-13 00:00:25 +000062static RegisterPass<LiveIntervals> X("liveintervals", "Live Interval Analysis");
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000063
Chris Lattnerf7da2c72006-08-24 22:43:55 +000064void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
Dan Gohman6d69ba82008-07-25 00:02:30 +000065 AU.addRequired<AliasAnalysis>();
66 AU.addPreserved<AliasAnalysis>();
David Greene25133302007-06-08 17:18:56 +000067 AU.addPreserved<LiveVariables>();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000068 AU.addRequired<LiveVariables>();
Bill Wendling67d65bb2008-01-04 20:54:55 +000069 AU.addPreservedID(MachineLoopInfoID);
70 AU.addPreservedID(MachineDominatorsID);
Owen Anderson95dad832008-10-07 20:22:28 +000071
72 if (!StrongPHIElim) {
73 AU.addPreservedID(PHIEliminationID);
74 AU.addRequiredID(PHIEliminationID);
75 }
76
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000077 AU.addRequiredID(TwoAddressInstructionPassID);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000078 MachineFunctionPass::getAnalysisUsage(AU);
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +000079}
80
Chris Lattnerf7da2c72006-08-24 22:43:55 +000081void LiveIntervals::releaseMemory() {
Owen Anderson03857b22008-08-13 21:49:13 +000082 // Free the live intervals themselves.
Owen Anderson20e28392008-08-13 22:08:30 +000083 for (DenseMap<unsigned, LiveInterval*>::iterator I = r2iMap_.begin(),
Owen Anderson03857b22008-08-13 21:49:13 +000084 E = r2iMap_.end(); I != E; ++I)
85 delete I->second;
86
Evan Cheng3f32d652008-06-04 09:18:41 +000087 MBB2IdxMap.clear();
Evan Cheng4ca980e2007-10-17 02:10:22 +000088 Idx2MBBMap.clear();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +000089 mi2iMap_.clear();
90 i2miMap_.clear();
91 r2iMap_.clear();
Evan Chengdd199d22007-09-06 01:07:24 +000092 // Release VNInfo memroy regions after all VNInfo objects are dtor'd.
93 VNInfoAllocator.Reset();
Evan Cheng1ed99222008-07-19 00:37:25 +000094 while (!ClonedMIs.empty()) {
95 MachineInstr *MI = ClonedMIs.back();
96 ClonedMIs.pop_back();
97 mf_->DeleteMachineInstr(MI);
98 }
Alkis Evlogimenos08cec002004-01-31 19:59:32 +000099}
100
Owen Anderson80b3ce62008-05-28 20:54:50 +0000101void LiveIntervals::computeNumbering() {
102 Index2MiMap OldI2MI = i2miMap_;
Owen Anderson7fbad272008-07-23 21:37:49 +0000103 std::vector<IdxMBBPair> OldI2MBB = Idx2MBBMap;
Owen Anderson80b3ce62008-05-28 20:54:50 +0000104
105 Idx2MBBMap.clear();
106 MBB2IdxMap.clear();
107 mi2iMap_.clear();
108 i2miMap_.clear();
109
Owen Andersona1566f22008-07-22 22:46:49 +0000110 FunctionSize = 0;
111
Chris Lattner428b92e2006-09-15 03:57:23 +0000112 // Number MachineInstrs and MachineBasicBlocks.
113 // Initialize MBB indexes to a sentinal.
Evan Cheng549f27d32007-08-13 23:45:17 +0000114 MBB2IdxMap.resize(mf_->getNumBlockIDs(), std::make_pair(~0U,~0U));
Chris Lattner428b92e2006-09-15 03:57:23 +0000115
116 unsigned MIIndex = 0;
117 for (MachineFunction::iterator MBB = mf_->begin(), E = mf_->end();
118 MBB != E; ++MBB) {
Evan Cheng549f27d32007-08-13 23:45:17 +0000119 unsigned StartIdx = MIIndex;
Evan Cheng0c9f92e2007-02-13 01:30:55 +0000120
Owen Anderson7fbad272008-07-23 21:37:49 +0000121 // Insert an empty slot at the beginning of each block.
122 MIIndex += InstrSlots::NUM;
123 i2miMap_.push_back(0);
124
Chris Lattner428b92e2006-09-15 03:57:23 +0000125 for (MachineBasicBlock::iterator I = MBB->begin(), E = MBB->end();
126 I != E; ++I) {
127 bool inserted = mi2iMap_.insert(std::make_pair(I, MIIndex)).second;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000128 assert(inserted && "multiple MachineInstr -> index mappings");
Devang Patel59500c82008-11-21 20:00:59 +0000129 inserted = true;
Chris Lattner428b92e2006-09-15 03:57:23 +0000130 i2miMap_.push_back(I);
131 MIIndex += InstrSlots::NUM;
Owen Andersona1566f22008-07-22 22:46:49 +0000132 FunctionSize++;
Owen Anderson7fbad272008-07-23 21:37:49 +0000133
Evan Cheng4ed43292008-10-18 05:21:37 +0000134 // Insert max(1, numdefs) empty slots after every instruction.
Evan Cheng99fe34b2008-10-18 05:18:55 +0000135 unsigned Slots = I->getDesc().getNumDefs();
136 if (Slots == 0)
137 Slots = 1;
138 MIIndex += InstrSlots::NUM * Slots;
139 while (Slots--)
140 i2miMap_.push_back(0);
Owen Anderson35578012008-06-16 07:10:49 +0000141 }
Owen Anderson7fbad272008-07-23 21:37:49 +0000142
Owen Anderson1fbb4542008-06-16 16:58:24 +0000143 // Set the MBB2IdxMap entry for this MBB.
144 MBB2IdxMap[MBB->getNumber()] = std::make_pair(StartIdx, MIIndex - 1);
145 Idx2MBBMap.push_back(std::make_pair(StartIdx, MBB));
Chris Lattner428b92e2006-09-15 03:57:23 +0000146 }
Evan Cheng4ca980e2007-10-17 02:10:22 +0000147 std::sort(Idx2MBBMap.begin(), Idx2MBBMap.end(), Idx2MBBCompare());
Owen Anderson80b3ce62008-05-28 20:54:50 +0000148
149 if (!OldI2MI.empty())
Owen Anderson788d0412008-08-06 18:35:45 +0000150 for (iterator OI = begin(), OE = end(); OI != OE; ++OI) {
Owen Anderson03857b22008-08-13 21:49:13 +0000151 for (LiveInterval::iterator LI = OI->second->begin(),
152 LE = OI->second->end(); LI != LE; ++LI) {
Owen Anderson4b5b2092008-05-29 18:15:49 +0000153
Owen Anderson7eec0c22008-05-29 23:01:22 +0000154 // Remap the start index of the live range to the corresponding new
155 // number, or our best guess at what it _should_ correspond to if the
156 // original instruction has been erased. This is either the following
157 // instruction or its predecessor.
Owen Anderson7fbad272008-07-23 21:37:49 +0000158 unsigned index = LI->start / InstrSlots::NUM;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000159 unsigned offset = LI->start % InstrSlots::NUM;
Owen Anderson0a7615a2008-07-25 23:06:59 +0000160 if (offset == InstrSlots::LOAD) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000161 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000162 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->start);
Owen Anderson7fbad272008-07-23 21:37:49 +0000163 // Take the pair containing the index
164 std::vector<IdxMBBPair>::const_iterator J =
Owen Andersona0c032f2008-07-29 21:15:44 +0000165 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000166
Owen Anderson7fbad272008-07-23 21:37:49 +0000167 LI->start = getMBBStartIdx(J->second);
168 } else {
169 LI->start = mi2iMap_[OldI2MI[index]] + offset;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000170 }
171
172 // Remap the ending index in the same way that we remapped the start,
173 // except for the final step where we always map to the immediately
174 // following instruction.
Owen Andersond7dcbec2008-07-25 19:50:48 +0000175 index = (LI->end - 1) / InstrSlots::NUM;
Owen Anderson7fbad272008-07-23 21:37:49 +0000176 offset = LI->end % InstrSlots::NUM;
Owen Anderson9382b932008-07-30 00:22:56 +0000177 if (offset == InstrSlots::LOAD) {
178 // VReg dies at end of block.
Owen Anderson7fbad272008-07-23 21:37:49 +0000179 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000180 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), LI->end);
Owen Anderson9382b932008-07-30 00:22:56 +0000181 --I;
Owen Anderson7fbad272008-07-23 21:37:49 +0000182
Owen Anderson9382b932008-07-30 00:22:56 +0000183 LI->end = getMBBEndIdx(I->second) + 1;
Owen Anderson4b5b2092008-05-29 18:15:49 +0000184 } else {
Owen Andersond7dcbec2008-07-25 19:50:48 +0000185 unsigned idx = index;
Owen Anderson8d0cc0a2008-07-25 21:07:13 +0000186 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
187
188 if (index != OldI2MI.size())
189 LI->end = mi2iMap_[OldI2MI[index]] + (idx == index ? offset : 0);
190 else
191 LI->end = InstrSlots::NUM * i2miMap_.size();
Owen Anderson4b5b2092008-05-29 18:15:49 +0000192 }
Owen Anderson788d0412008-08-06 18:35:45 +0000193 }
194
Owen Anderson03857b22008-08-13 21:49:13 +0000195 for (LiveInterval::vni_iterator VNI = OI->second->vni_begin(),
196 VNE = OI->second->vni_end(); VNI != VNE; ++VNI) {
Owen Anderson788d0412008-08-06 18:35:45 +0000197 VNInfo* vni = *VNI;
Owen Anderson745825f42008-05-28 22:40:08 +0000198
Owen Anderson7eec0c22008-05-29 23:01:22 +0000199 // Remap the VNInfo def index, which works the same as the
Owen Anderson788d0412008-08-06 18:35:45 +0000200 // start indices above. VN's with special sentinel defs
201 // don't need to be remapped.
Lang Hames857c4e02009-06-17 21:01:20 +0000202 if (vni->isDefAccurate() && !vni->isUnused()) {
Owen Anderson788d0412008-08-06 18:35:45 +0000203 unsigned index = vni->def / InstrSlots::NUM;
204 unsigned offset = vni->def % InstrSlots::NUM;
Owen Anderson91292392008-07-30 17:42:47 +0000205 if (offset == InstrSlots::LOAD) {
206 std::vector<IdxMBBPair>::const_iterator I =
Owen Anderson0a7615a2008-07-25 23:06:59 +0000207 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->def);
Owen Anderson91292392008-07-30 17:42:47 +0000208 // Take the pair containing the index
209 std::vector<IdxMBBPair>::const_iterator J =
Owen Andersona0c032f2008-07-29 21:15:44 +0000210 (I == OldI2MBB.end() && OldI2MBB.size()>0) ? (I-1): I;
Owen Anderson7eec0c22008-05-29 23:01:22 +0000211
Owen Anderson91292392008-07-30 17:42:47 +0000212 vni->def = getMBBStartIdx(J->second);
213 } else {
214 vni->def = mi2iMap_[OldI2MI[index]] + offset;
215 }
Owen Anderson7eec0c22008-05-29 23:01:22 +0000216 }
Owen Anderson745825f42008-05-28 22:40:08 +0000217
Owen Anderson7eec0c22008-05-29 23:01:22 +0000218 // Remap the VNInfo kill indices, which works the same as
219 // the end indices above.
Owen Anderson4b5b2092008-05-29 18:15:49 +0000220 for (size_t i = 0; i < vni->kills.size(); ++i) {
Owen Anderson9382b932008-07-30 00:22:56 +0000221 // PHI kills don't need to be remapped.
222 if (!vni->kills[i]) continue;
223
Owen Anderson788d0412008-08-06 18:35:45 +0000224 unsigned index = (vni->kills[i]-1) / InstrSlots::NUM;
225 unsigned offset = vni->kills[i] % InstrSlots::NUM;
Owen Anderson309c6162008-09-30 22:51:54 +0000226 if (offset == InstrSlots::LOAD) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000227 std::vector<IdxMBBPair>::const_iterator I =
Owen Andersond7dcbec2008-07-25 19:50:48 +0000228 std::lower_bound(OldI2MBB.begin(), OldI2MBB.end(), vni->kills[i]);
Owen Anderson9382b932008-07-30 00:22:56 +0000229 --I;
Owen Anderson7fbad272008-07-23 21:37:49 +0000230
Owen Anderson788d0412008-08-06 18:35:45 +0000231 vni->kills[i] = getMBBEndIdx(I->second);
Owen Anderson7fbad272008-07-23 21:37:49 +0000232 } else {
Owen Andersond7dcbec2008-07-25 19:50:48 +0000233 unsigned idx = index;
Owen Anderson8d0cc0a2008-07-25 21:07:13 +0000234 while (index < OldI2MI.size() && !OldI2MI[index]) ++index;
235
236 if (index != OldI2MI.size())
237 vni->kills[i] = mi2iMap_[OldI2MI[index]] +
238 (idx == index ? offset : 0);
239 else
240 vni->kills[i] = InstrSlots::NUM * i2miMap_.size();
Owen Anderson7eec0c22008-05-29 23:01:22 +0000241 }
Owen Anderson4b5b2092008-05-29 18:15:49 +0000242 }
Owen Anderson80b3ce62008-05-28 20:54:50 +0000243 }
Owen Anderson788d0412008-08-06 18:35:45 +0000244 }
Owen Anderson80b3ce62008-05-28 20:54:50 +0000245}
Alkis Evlogimenosd6e40a62004-01-14 10:44:29 +0000246
Lang Hamesf41538d2009-06-02 16:53:25 +0000247void LiveIntervals::scaleNumbering(int factor) {
248 // Need to
249 // * scale MBB begin and end points
250 // * scale all ranges.
251 // * Update VNI structures.
252 // * Scale instruction numberings
253
254 // Scale the MBB indices.
255 Idx2MBBMap.clear();
256 for (MachineFunction::iterator MBB = mf_->begin(), MBBE = mf_->end();
257 MBB != MBBE; ++MBB) {
258 std::pair<unsigned, unsigned> &mbbIndices = MBB2IdxMap[MBB->getNumber()];
259 mbbIndices.first = InstrSlots::scale(mbbIndices.first, factor);
260 mbbIndices.second = InstrSlots::scale(mbbIndices.second, factor);
261 Idx2MBBMap.push_back(std::make_pair(mbbIndices.first, MBB));
262 }
263 std::sort(Idx2MBBMap.begin(), Idx2MBBMap.end(), Idx2MBBCompare());
264
265 // Scale the intervals.
266 for (iterator LI = begin(), LE = end(); LI != LE; ++LI) {
267 LI->second->scaleNumbering(factor);
268 }
269
270 // Scale MachineInstrs.
271 Mi2IndexMap oldmi2iMap = mi2iMap_;
272 unsigned highestSlot = 0;
273 for (Mi2IndexMap::iterator MI = oldmi2iMap.begin(), ME = oldmi2iMap.end();
274 MI != ME; ++MI) {
275 unsigned newSlot = InstrSlots::scale(MI->second, factor);
276 mi2iMap_[MI->first] = newSlot;
277 highestSlot = std::max(highestSlot, newSlot);
278 }
279
280 i2miMap_.clear();
281 i2miMap_.resize(highestSlot + 1);
282 for (Mi2IndexMap::iterator MI = mi2iMap_.begin(), ME = mi2iMap_.end();
283 MI != ME; ++MI) {
284 i2miMap_[MI->second] = MI->first;
285 }
286
287}
288
289
Owen Anderson80b3ce62008-05-28 20:54:50 +0000290/// runOnMachineFunction - Register allocate the whole function
291///
292bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
293 mf_ = &fn;
294 mri_ = &mf_->getRegInfo();
295 tm_ = &fn.getTarget();
296 tri_ = tm_->getRegisterInfo();
297 tii_ = tm_->getInstrInfo();
Dan Gohman6d69ba82008-07-25 00:02:30 +0000298 aa_ = &getAnalysis<AliasAnalysis>();
Owen Anderson80b3ce62008-05-28 20:54:50 +0000299 lv_ = &getAnalysis<LiveVariables>();
300 allocatableRegs_ = tri_->getAllocatableSet(fn);
301
302 computeNumbering();
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000303 computeIntervals();
Alkis Evlogimenos843b1602004-02-15 10:24:21 +0000304
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000305 numIntervals += getNumIntervals();
306
Chris Lattner70ca3582004-09-30 15:59:17 +0000307 DEBUG(dump());
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000308 return true;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000309}
310
Chris Lattner70ca3582004-09-30 15:59:17 +0000311/// print - Implement the dump method.
Reid Spencerce9653c2004-12-07 04:03:45 +0000312void LiveIntervals::print(std::ostream &O, const Module* ) const {
Chris Lattner70ca3582004-09-30 15:59:17 +0000313 O << "********** INTERVALS **********\n";
Chris Lattner8e7a7092005-07-27 23:03:38 +0000314 for (const_iterator I = begin(), E = end(); I != E; ++I) {
Owen Anderson03857b22008-08-13 21:49:13 +0000315 I->second->print(O, tri_);
Evan Cheng3f32d652008-06-04 09:18:41 +0000316 O << "\n";
Chris Lattner8e7a7092005-07-27 23:03:38 +0000317 }
Chris Lattner70ca3582004-09-30 15:59:17 +0000318
319 O << "********** MACHINEINSTRS **********\n";
320 for (MachineFunction::iterator mbbi = mf_->begin(), mbbe = mf_->end();
321 mbbi != mbbe; ++mbbi) {
322 O << ((Value*)mbbi->getBasicBlock())->getName() << ":\n";
323 for (MachineBasicBlock::iterator mii = mbbi->begin(),
324 mie = mbbi->end(); mii != mie; ++mii) {
Chris Lattner477e4552004-09-30 16:10:45 +0000325 O << getInstructionIndex(mii) << '\t' << *mii;
Chris Lattner70ca3582004-09-30 15:59:17 +0000326 }
327 }
328}
329
Evan Chengc92da382007-11-03 07:20:12 +0000330/// conflictsWithPhysRegDef - Returns true if the specified register
331/// is defined during the duration of the specified interval.
332bool LiveIntervals::conflictsWithPhysRegDef(const LiveInterval &li,
333 VirtRegMap &vrm, unsigned reg) {
334 for (LiveInterval::Ranges::const_iterator
335 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
336 for (unsigned index = getBaseIndex(I->start),
337 end = getBaseIndex(I->end-1) + InstrSlots::NUM; index != end;
338 index += InstrSlots::NUM) {
339 // skip deleted instructions
340 while (index != end && !getInstructionFromIndex(index))
341 index += InstrSlots::NUM;
342 if (index == end) break;
343
344 MachineInstr *MI = getInstructionFromIndex(index);
Evan Cheng04ee5a12009-01-20 19:12:24 +0000345 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg;
346 if (tii_->isMoveInstr(*MI, SrcReg, DstReg, SrcSubReg, DstSubReg))
Evan Cheng5d446262007-11-15 08:13:29 +0000347 if (SrcReg == li.reg || DstReg == li.reg)
348 continue;
Evan Chengc92da382007-11-03 07:20:12 +0000349 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
350 MachineOperand& mop = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +0000351 if (!mop.isReg())
Evan Chengc92da382007-11-03 07:20:12 +0000352 continue;
353 unsigned PhysReg = mop.getReg();
Evan Cheng5d446262007-11-15 08:13:29 +0000354 if (PhysReg == 0 || PhysReg == li.reg)
Evan Chengc92da382007-11-03 07:20:12 +0000355 continue;
Dan Gohman6f0d0242008-02-10 18:45:23 +0000356 if (TargetRegisterInfo::isVirtualRegister(PhysReg)) {
Evan Cheng5d446262007-11-15 08:13:29 +0000357 if (!vrm.hasPhys(PhysReg))
358 continue;
Evan Chengc92da382007-11-03 07:20:12 +0000359 PhysReg = vrm.getPhys(PhysReg);
Evan Cheng5d446262007-11-15 08:13:29 +0000360 }
Dan Gohman6f0d0242008-02-10 18:45:23 +0000361 if (PhysReg && tri_->regsOverlap(PhysReg, reg))
Evan Chengc92da382007-11-03 07:20:12 +0000362 return true;
363 }
364 }
365 }
366
367 return false;
368}
369
Evan Cheng8f90b6e2009-01-07 02:08:57 +0000370/// conflictsWithPhysRegRef - Similar to conflictsWithPhysRegRef except
371/// it can check use as well.
372bool LiveIntervals::conflictsWithPhysRegRef(LiveInterval &li,
373 unsigned Reg, bool CheckUse,
374 SmallPtrSet<MachineInstr*,32> &JoinedCopies) {
375 for (LiveInterval::Ranges::const_iterator
376 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
377 for (unsigned index = getBaseIndex(I->start),
378 end = getBaseIndex(I->end-1) + InstrSlots::NUM; index != end;
379 index += InstrSlots::NUM) {
380 // Skip deleted instructions.
381 MachineInstr *MI = 0;
382 while (index != end) {
383 MI = getInstructionFromIndex(index);
384 if (MI)
385 break;
386 index += InstrSlots::NUM;
387 }
388 if (index == end) break;
389
390 if (JoinedCopies.count(MI))
391 continue;
392 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
393 MachineOperand& MO = MI->getOperand(i);
394 if (!MO.isReg())
395 continue;
396 if (MO.isUse() && !CheckUse)
397 continue;
398 unsigned PhysReg = MO.getReg();
399 if (PhysReg == 0 || TargetRegisterInfo::isVirtualRegister(PhysReg))
400 continue;
401 if (tri_->isSubRegister(Reg, PhysReg))
402 return true;
403 }
404 }
405 }
406
407 return false;
408}
409
410
Evan Cheng549f27d32007-08-13 23:45:17 +0000411void LiveIntervals::printRegName(unsigned reg) const {
Dan Gohman6f0d0242008-02-10 18:45:23 +0000412 if (TargetRegisterInfo::isPhysicalRegister(reg))
Bill Wendlinge6d088a2008-02-26 21:47:57 +0000413 cerr << tri_->getName(reg);
Evan Cheng549f27d32007-08-13 23:45:17 +0000414 else
415 cerr << "%reg" << reg;
416}
417
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000418void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000419 MachineBasicBlock::iterator mi,
Owen Anderson6b098de2008-06-25 23:39:39 +0000420 unsigned MIIdx, MachineOperand& MO,
Evan Chengef0732d2008-07-10 07:35:43 +0000421 unsigned MOIdx,
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000422 LiveInterval &interval) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000423 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000424 LiveVariables::VarInfo& vi = lv_->getVarInfo(interval.reg);
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000425
Evan Cheng419852c2008-04-03 16:39:43 +0000426 if (mi->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
427 DOUT << "is a implicit_def\n";
428 return;
429 }
430
Alkis Evlogimenos70651572004-08-04 09:46:56 +0000431 // Virtual registers may be defined multiple times (due to phi
432 // elimination and 2-addr elimination). Much of what we do only has to be
433 // done once for the vreg. We use an empty interval to detect the first
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000434 // time we see a vreg.
435 if (interval.empty()) {
436 // Get the Idx of the defining instructions.
Chris Lattner6b128bd2006-09-03 08:07:11 +0000437 unsigned defIndex = getDefIndex(MIIdx);
Dale Johannesen86b49f82008-09-24 01:07:17 +0000438 // Earlyclobbers move back one.
439 if (MO.isEarlyClobber())
440 defIndex = getUseIndex(MIIdx);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000441 VNInfo *ValNo;
Evan Chengc8d044e2008-02-15 18:24:29 +0000442 MachineInstr *CopyMI = NULL;
Evan Cheng04ee5a12009-01-20 19:12:24 +0000443 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000444 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000445 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Dan Gohman97121ba2009-04-08 00:15:30 +0000446 mi->getOpcode() == TargetInstrInfo::SUBREG_TO_REG ||
Evan Cheng04ee5a12009-01-20 19:12:24 +0000447 tii_->isMoveInstr(*mi, SrcReg, DstReg, SrcSubReg, DstSubReg))
Evan Chengc8d044e2008-02-15 18:24:29 +0000448 CopyMI = mi;
Evan Cheng5379f412008-12-19 20:58:01 +0000449 // Earlyclobbers move back one.
Lang Hames857c4e02009-06-17 21:01:20 +0000450 ValNo = interval.getNextValue(defIndex, CopyMI, true, VNInfoAllocator);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000451
452 assert(ValNo->id == 0 && "First value in interval is not 0?");
Chris Lattner7ac2d312004-07-24 02:59:07 +0000453
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000454 // Loop over all of the blocks that the vreg is defined in. There are
455 // two cases we have to handle here. The most common case is a vreg
456 // whose lifetime is contained within a basic block. In this case there
457 // will be a single kill, in MBB, which comes after the definition.
458 if (vi.Kills.size() == 1 && vi.Kills[0]->getParent() == mbb) {
459 // FIXME: what about dead vars?
460 unsigned killIdx;
461 if (vi.Kills[0] != mi)
462 killIdx = getUseIndex(getInstructionIndex(vi.Kills[0]))+1;
463 else
464 killIdx = defIndex+1;
Chris Lattner6097d132004-07-19 02:15:56 +0000465
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000466 // If the kill happens after the definition, we have an intra-block
467 // live range.
468 if (killIdx > defIndex) {
Jeffrey Yasskin493a3d02009-05-26 18:27:15 +0000469 assert(vi.AliveBlocks.empty() &&
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000470 "Shouldn't be alive across any blocks!");
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000471 LiveRange LR(defIndex, killIdx, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000472 interval.addRange(LR);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000473 DOUT << " +" << LR << "\n";
Evan Chengf3bb2e62007-09-05 21:46:51 +0000474 interval.addKill(ValNo, killIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000475 return;
476 }
Alkis Evlogimenosdd2cc652003-12-18 08:48:48 +0000477 }
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000478
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000479 // The other case we handle is when a virtual register lives to the end
480 // of the defining block, potentially live across some blocks, then is
481 // live into some number of blocks, but gets killed. Start by adding a
482 // range that goes from this definition to the end of the defining block.
Owen Anderson7fbad272008-07-23 21:37:49 +0000483 LiveRange NewLR(defIndex, getMBBEndIdx(mbb)+1, ValNo);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000484 DOUT << " +" << NewLR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000485 interval.addRange(NewLR);
486
487 // Iterate over all of the blocks that the variable is completely
488 // live in, adding [insrtIndex(begin), instrIndex(end)+4) to the
489 // live interval.
Jeffrey Yasskin493a3d02009-05-26 18:27:15 +0000490 for (SparseBitVector<>::iterator I = vi.AliveBlocks.begin(),
491 E = vi.AliveBlocks.end(); I != E; ++I) {
492 LiveRange LR(getMBBStartIdx(*I),
493 getMBBEndIdx(*I)+1, // MBB ends at -1.
Dan Gohman4a829ec2008-11-13 16:31:27 +0000494 ValNo);
495 interval.addRange(LR);
496 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000497 }
498
499 // Finally, this virtual register is live from the start of any killing
500 // block to the 'use' slot of the killing instruction.
501 for (unsigned i = 0, e = vi.Kills.size(); i != e; ++i) {
502 MachineInstr *Kill = vi.Kills[i];
Evan Cheng8df78602007-08-08 03:00:28 +0000503 unsigned killIdx = getUseIndex(getInstructionIndex(Kill))+1;
Chris Lattner428b92e2006-09-15 03:57:23 +0000504 LiveRange LR(getMBBStartIdx(Kill->getParent()),
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000505 killIdx, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000506 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000507 interval.addKill(ValNo, killIdx);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000508 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000509 }
510
511 } else {
512 // If this is the second time we see a virtual register definition, it
513 // must be due to phi elimination or two addr elimination. If this is
Evan Chengbf105c82006-11-03 03:04:46 +0000514 // the result of two address elimination, then the vreg is one of the
515 // def-and-use register operand.
Bob Wilsond9df5012009-04-09 17:16:43 +0000516 if (mi->isRegTiedToUseOperand(MOIdx)) {
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000517 // If this is a two-address definition, then we have already processed
518 // the live range. The only problem is that we didn't realize there
519 // are actually two values in the live interval. Because of this we
520 // need to take the LiveRegion that defines this register and split it
521 // into two values.
Evan Chenga07cec92008-01-10 08:22:10 +0000522 assert(interval.containsOneValue());
523 unsigned DefIndex = getDefIndex(interval.getValNumInfo(0)->def);
Chris Lattner6b128bd2006-09-03 08:07:11 +0000524 unsigned RedefIndex = getDefIndex(MIIdx);
Evan Chengfb112882009-03-23 08:01:15 +0000525 if (MO.isEarlyClobber())
526 RedefIndex = getUseIndex(MIIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000527
Evan Cheng4f8ff162007-08-11 00:59:19 +0000528 const LiveRange *OldLR = interval.getLiveRangeContaining(RedefIndex-1);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000529 VNInfo *OldValNo = OldLR->valno;
Evan Cheng4f8ff162007-08-11 00:59:19 +0000530
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000531 // Delete the initial value, which should be short and continuous,
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000532 // because the 2-addr copy must be in the same MBB as the redef.
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000533 interval.removeRange(DefIndex, RedefIndex);
Alkis Evlogimenos70651572004-08-04 09:46:56 +0000534
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000535 // Two-address vregs should always only be redefined once. This means
536 // that at this point, there should be exactly one value number in it.
537 assert(interval.containsOneValue() && "Unexpected 2-addr liveint!");
538
Chris Lattner91725b72006-08-31 05:54:43 +0000539 // The new value number (#1) is defined by the instruction we claimed
540 // defined value #0.
Evan Chengc8d044e2008-02-15 18:24:29 +0000541 VNInfo *ValNo = interval.getNextValue(OldValNo->def, OldValNo->copy,
Lang Hames857c4e02009-06-17 21:01:20 +0000542 false, // update at *
Evan Chengc8d044e2008-02-15 18:24:29 +0000543 VNInfoAllocator);
Lang Hames857c4e02009-06-17 21:01:20 +0000544 ValNo->setFlags(OldValNo->getFlags()); // * <- updating here
545
Chris Lattner91725b72006-08-31 05:54:43 +0000546 // Value#0 is now defined by the 2-addr instruction.
Evan Chengc8d044e2008-02-15 18:24:29 +0000547 OldValNo->def = RedefIndex;
548 OldValNo->copy = 0;
Evan Chengfb112882009-03-23 08:01:15 +0000549 if (MO.isEarlyClobber())
Lang Hames857c4e02009-06-17 21:01:20 +0000550 OldValNo->setHasRedefByEC(true);
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000551
552 // Add the new live interval which replaces the range for the input copy.
553 LiveRange LR(DefIndex, RedefIndex, ValNo);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000554 DOUT << " replace range with " << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000555 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000556 interval.addKill(ValNo, RedefIndex);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000557
558 // If this redefinition is dead, we need to add a dummy unit live
559 // range covering the def slot.
Owen Anderson6b098de2008-06-25 23:39:39 +0000560 if (MO.isDead())
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000561 interval.addRange(LiveRange(RedefIndex, RedefIndex+1, OldValNo));
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000562
Evan Cheng56fdd7a2007-03-15 21:19:28 +0000563 DOUT << " RESULT: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +0000564 interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000565
566 } else {
567 // Otherwise, this must be because of phi elimination. If this is the
568 // first redefinition of the vreg that we have seen, go back and change
569 // the live range in the PHI block to be a different value number.
570 if (interval.containsOneValue()) {
571 assert(vi.Kills.size() == 1 &&
572 "PHI elimination vreg should have one kill, the PHI itself!");
573
574 // Remove the old range that we now know has an incorrect number.
Evan Chengf3bb2e62007-09-05 21:46:51 +0000575 VNInfo *VNI = interval.getValNumInfo(0);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000576 MachineInstr *Killer = vi.Kills[0];
Chris Lattner428b92e2006-09-15 03:57:23 +0000577 unsigned Start = getMBBStartIdx(Killer->getParent());
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000578 unsigned End = getUseIndex(getInstructionIndex(Killer))+1;
Evan Cheng56fdd7a2007-03-15 21:19:28 +0000579 DOUT << " Removing [" << Start << "," << End << "] from: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +0000580 interval.print(DOUT, tri_); DOUT << "\n";
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000581 interval.removeRange(Start, End);
Lang Hames857c4e02009-06-17 21:01:20 +0000582 VNI->setHasPHIKill(true);
Dan Gohman6f0d0242008-02-10 18:45:23 +0000583 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000584
Chris Lattnerbe4f88a2006-08-22 18:19:46 +0000585 // Replace the interval with one of a NEW value number. Note that this
586 // value number isn't actually defined by an instruction, weird huh? :)
Lang Hamesd21c3162009-06-18 22:01:47 +0000587 LiveRange LR(Start, End, interval.getNextValue(Start, 0, false, VNInfoAllocator));
Lang Hames857c4e02009-06-17 21:01:20 +0000588 LR.valno->setIsPHIDef(true);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000589 DOUT << " replace range with " << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000590 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000591 interval.addKill(LR.valno, End);
Dan Gohman6f0d0242008-02-10 18:45:23 +0000592 DOUT << " RESULT: "; interval.print(DOUT, tri_);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000593 }
594
595 // In the case of PHI elimination, each variable definition is only
596 // live until the end of the block. We've already taken care of the
597 // rest of the live range.
Chris Lattner6b128bd2006-09-03 08:07:11 +0000598 unsigned defIndex = getDefIndex(MIIdx);
Evan Chengfb112882009-03-23 08:01:15 +0000599 if (MO.isEarlyClobber())
600 defIndex = getUseIndex(MIIdx);
Chris Lattner91725b72006-08-31 05:54:43 +0000601
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000602 VNInfo *ValNo;
Evan Chengc8d044e2008-02-15 18:24:29 +0000603 MachineInstr *CopyMI = NULL;
Evan Cheng04ee5a12009-01-20 19:12:24 +0000604 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000605 if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000606 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Dan Gohman97121ba2009-04-08 00:15:30 +0000607 mi->getOpcode() == TargetInstrInfo::SUBREG_TO_REG ||
Evan Cheng04ee5a12009-01-20 19:12:24 +0000608 tii_->isMoveInstr(*mi, SrcReg, DstReg, SrcSubReg, DstSubReg))
Evan Chengc8d044e2008-02-15 18:24:29 +0000609 CopyMI = mi;
Lang Hames857c4e02009-06-17 21:01:20 +0000610 ValNo = interval.getNextValue(defIndex, CopyMI, true, VNInfoAllocator);
Chris Lattner91725b72006-08-31 05:54:43 +0000611
Owen Anderson7fbad272008-07-23 21:37:49 +0000612 unsigned killIndex = getMBBEndIdx(mbb) + 1;
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000613 LiveRange LR(defIndex, killIndex, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000614 interval.addRange(LR);
Evan Chengc3fc7d92007-11-29 09:49:23 +0000615 interval.addKill(ValNo, killIndex);
Lang Hames857c4e02009-06-17 21:01:20 +0000616 ValNo->setHasPHIKill(true);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000617 DOUT << " +" << LR;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000618 }
619 }
620
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000621 DOUT << '\n';
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000622}
623
Chris Lattnerf35fef72004-07-23 21:24:19 +0000624void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000625 MachineBasicBlock::iterator mi,
Chris Lattner6b128bd2006-09-03 08:07:11 +0000626 unsigned MIIdx,
Owen Anderson6b098de2008-06-25 23:39:39 +0000627 MachineOperand& MO,
Chris Lattner91725b72006-08-31 05:54:43 +0000628 LiveInterval &interval,
Evan Chengc8d044e2008-02-15 18:24:29 +0000629 MachineInstr *CopyMI) {
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000630 // A physical register cannot be live across basic block, so its
631 // lifetime must end somewhere in its defining basic block.
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000632 DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
Alkis Evlogimenos02ba13c2004-01-31 23:13:30 +0000633
Chris Lattner6b128bd2006-09-03 08:07:11 +0000634 unsigned baseIndex = MIIdx;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000635 unsigned start = getDefIndex(baseIndex);
Dale Johannesen86b49f82008-09-24 01:07:17 +0000636 // Earlyclobbers move back one.
637 if (MO.isEarlyClobber())
638 start = getUseIndex(MIIdx);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000639 unsigned end = start;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000640
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000641 // If it is not used after definition, it is considered dead at
642 // the instruction defining it. Hence its interval is:
643 // [defSlot(def), defSlot(def)+1)
Owen Anderson6b098de2008-06-25 23:39:39 +0000644 if (MO.isDead()) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000645 DOUT << " dead";
Dale Johannesen86b49f82008-09-24 01:07:17 +0000646 end = start + 1;
Chris Lattnerab4b66d2005-08-23 22:51:41 +0000647 goto exit;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000648 }
649
650 // If it is not dead on definition, it must be killed by a
651 // subsequent instruction. Hence its interval is:
652 // [defSlot(def), useSlot(kill)+1)
Owen Anderson7fbad272008-07-23 21:37:49 +0000653 baseIndex += InstrSlots::NUM;
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000654 while (++mi != MBB->end()) {
Owen Anderson7fbad272008-07-23 21:37:49 +0000655 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
656 getInstructionFromIndex(baseIndex) == 0)
657 baseIndex += InstrSlots::NUM;
Evan Cheng6130f662008-03-05 00:59:57 +0000658 if (mi->killsRegister(interval.reg, tri_)) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000659 DOUT << " killed";
Chris Lattnerab4b66d2005-08-23 22:51:41 +0000660 end = getUseIndex(baseIndex) + 1;
661 goto exit;
Evan Chengc45288e2009-04-27 20:42:46 +0000662 } else {
663 int DefIdx = mi->findRegisterDefOperandIdx(interval.reg, false, tri_);
664 if (DefIdx != -1) {
665 if (mi->isRegTiedToUseOperand(DefIdx)) {
666 // Two-address instruction.
667 end = getDefIndex(baseIndex);
668 if (mi->getOperand(DefIdx).isEarlyClobber())
669 end = getUseIndex(baseIndex);
670 } else {
671 // Another instruction redefines the register before it is ever read.
672 // Then the register is essentially dead at the instruction that defines
673 // it. Hence its interval is:
674 // [defSlot(def), defSlot(def)+1)
675 DOUT << " dead";
676 end = start + 1;
677 }
678 goto exit;
679 }
Alkis Evlogimenosaf254732004-01-13 22:26:14 +0000680 }
Owen Anderson7fbad272008-07-23 21:37:49 +0000681
682 baseIndex += InstrSlots::NUM;
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000683 }
Chris Lattner5ab6f5f2005-09-02 00:20:32 +0000684
685 // The only case we should have a dead physreg here without a killing or
686 // instruction where we know it's dead is if it is live-in to the function
Evan Chengd521bc92009-04-27 17:36:47 +0000687 // and never used. Another possible case is the implicit use of the
688 // physical register has been deleted by two-address pass.
Dale Johannesen86b49f82008-09-24 01:07:17 +0000689 end = start + 1;
Alkis Evlogimenos02ba13c2004-01-31 23:13:30 +0000690
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000691exit:
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000692 assert(start < end && "did not find end of interval?");
Chris Lattnerf768bba2005-03-09 23:05:19 +0000693
Evan Cheng24a3cc42007-04-25 07:30:23 +0000694 // Already exists? Extend old live interval.
695 LiveInterval::iterator OldLR = interval.FindLiveRangeContaining(start);
Evan Cheng5379f412008-12-19 20:58:01 +0000696 bool Extend = OldLR != interval.end();
697 VNInfo *ValNo = Extend
Lang Hames857c4e02009-06-17 21:01:20 +0000698 ? OldLR->valno : interval.getNextValue(start, CopyMI, true, VNInfoAllocator);
Evan Cheng5379f412008-12-19 20:58:01 +0000699 if (MO.isEarlyClobber() && Extend)
Lang Hames857c4e02009-06-17 21:01:20 +0000700 ValNo->setHasRedefByEC(true);
Evan Cheng7ecb38b2007-08-29 20:45:00 +0000701 LiveRange LR(start, end, ValNo);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000702 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000703 interval.addKill(LR.valno, end);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000704 DOUT << " +" << LR << '\n';
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000705}
706
Chris Lattnerf35fef72004-07-23 21:24:19 +0000707void LiveIntervals::handleRegisterDef(MachineBasicBlock *MBB,
708 MachineBasicBlock::iterator MI,
Chris Lattner6b128bd2006-09-03 08:07:11 +0000709 unsigned MIIdx,
Evan Chengef0732d2008-07-10 07:35:43 +0000710 MachineOperand& MO,
711 unsigned MOIdx) {
Owen Anderson6b098de2008-06-25 23:39:39 +0000712 if (TargetRegisterInfo::isVirtualRegister(MO.getReg()))
Evan Chengef0732d2008-07-10 07:35:43 +0000713 handleVirtualRegisterDef(MBB, MI, MIIdx, MO, MOIdx,
Owen Anderson6b098de2008-06-25 23:39:39 +0000714 getOrCreateInterval(MO.getReg()));
715 else if (allocatableRegs_[MO.getReg()]) {
Evan Chengc8d044e2008-02-15 18:24:29 +0000716 MachineInstr *CopyMI = NULL;
Evan Cheng04ee5a12009-01-20 19:12:24 +0000717 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg;
Evan Chengc8d044e2008-02-15 18:24:29 +0000718 if (MI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
Evan Cheng7e073ba2008-04-09 20:57:25 +0000719 MI->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
Dan Gohman97121ba2009-04-08 00:15:30 +0000720 MI->getOpcode() == TargetInstrInfo::SUBREG_TO_REG ||
Evan Cheng04ee5a12009-01-20 19:12:24 +0000721 tii_->isMoveInstr(*MI, SrcReg, DstReg, SrcSubReg, DstSubReg))
Evan Chengc8d044e2008-02-15 18:24:29 +0000722 CopyMI = MI;
Evan Chengc45288e2009-04-27 20:42:46 +0000723 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
Owen Anderson6b098de2008-06-25 23:39:39 +0000724 getOrCreateInterval(MO.getReg()), CopyMI);
Evan Cheng24a3cc42007-04-25 07:30:23 +0000725 // Def of a register also defines its sub-registers.
Owen Anderson6b098de2008-06-25 23:39:39 +0000726 for (const unsigned* AS = tri_->getSubRegisters(MO.getReg()); *AS; ++AS)
Evan Cheng6130f662008-03-05 00:59:57 +0000727 // If MI also modifies the sub-register explicitly, avoid processing it
728 // more than once. Do not pass in TRI here so it checks for exact match.
729 if (!MI->modifiesRegister(*AS))
Evan Chengc45288e2009-04-27 20:42:46 +0000730 handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
Owen Anderson6b098de2008-06-25 23:39:39 +0000731 getOrCreateInterval(*AS), 0);
Chris Lattnerf35fef72004-07-23 21:24:19 +0000732 }
Alkis Evlogimenos4d46e1e2004-01-31 14:37:41 +0000733}
734
Evan Chengb371f452007-02-19 21:49:54 +0000735void LiveIntervals::handleLiveInRegister(MachineBasicBlock *MBB,
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000736 unsigned MIIdx,
Evan Cheng24a3cc42007-04-25 07:30:23 +0000737 LiveInterval &interval, bool isAlias) {
Evan Chengb371f452007-02-19 21:49:54 +0000738 DOUT << "\t\tlivein register: "; DEBUG(printRegName(interval.reg));
739
740 // Look for kills, if it reaches a def before it's killed, then it shouldn't
741 // be considered a livein.
742 MachineBasicBlock::iterator mi = MBB->begin();
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000743 unsigned baseIndex = MIIdx;
744 unsigned start = baseIndex;
Owen Anderson99500ae2008-09-15 22:00:38 +0000745 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
746 getInstructionFromIndex(baseIndex) == 0)
747 baseIndex += InstrSlots::NUM;
748 unsigned end = baseIndex;
Evan Cheng0076c612009-03-05 03:34:26 +0000749 bool SeenDefUse = false;
Owen Anderson99500ae2008-09-15 22:00:38 +0000750
Evan Chengb371f452007-02-19 21:49:54 +0000751 while (mi != MBB->end()) {
Evan Cheng6130f662008-03-05 00:59:57 +0000752 if (mi->killsRegister(interval.reg, tri_)) {
Evan Chengb371f452007-02-19 21:49:54 +0000753 DOUT << " killed";
754 end = getUseIndex(baseIndex) + 1;
Evan Cheng0076c612009-03-05 03:34:26 +0000755 SeenDefUse = true;
Lang Hamesd21c3162009-06-18 22:01:47 +0000756 break;
Evan Cheng6130f662008-03-05 00:59:57 +0000757 } else if (mi->modifiesRegister(interval.reg, tri_)) {
Evan Chengb371f452007-02-19 21:49:54 +0000758 // Another instruction redefines the register before it is ever read.
759 // Then the register is essentially dead at the instruction that defines
760 // it. Hence its interval is:
761 // [defSlot(def), defSlot(def)+1)
762 DOUT << " dead";
763 end = getDefIndex(start) + 1;
Evan Cheng0076c612009-03-05 03:34:26 +0000764 SeenDefUse = true;
Lang Hamesd21c3162009-06-18 22:01:47 +0000765 break;
Evan Chengb371f452007-02-19 21:49:54 +0000766 }
767
768 baseIndex += InstrSlots::NUM;
769 ++mi;
Evan Cheng0076c612009-03-05 03:34:26 +0000770 if (mi != MBB->end()) {
771 while (baseIndex / InstrSlots::NUM < i2miMap_.size() &&
772 getInstructionFromIndex(baseIndex) == 0)
773 baseIndex += InstrSlots::NUM;
774 }
Evan Chengb371f452007-02-19 21:49:54 +0000775 }
776
Evan Cheng75611fb2007-06-27 01:16:36 +0000777 // Live-in register might not be used at all.
Evan Cheng0076c612009-03-05 03:34:26 +0000778 if (!SeenDefUse) {
Evan Cheng292da942007-06-27 18:47:28 +0000779 if (isAlias) {
780 DOUT << " dead";
Evan Cheng75611fb2007-06-27 01:16:36 +0000781 end = getDefIndex(MIIdx) + 1;
Evan Cheng292da942007-06-27 18:47:28 +0000782 } else {
783 DOUT << " live through";
784 end = baseIndex;
785 }
Evan Cheng24a3cc42007-04-25 07:30:23 +0000786 }
787
Lang Hamesd21c3162009-06-18 22:01:47 +0000788 VNInfo *vni = interval.getNextValue(start, 0, false, VNInfoAllocator);
789 vni->setIsPHIDef(true);
790 LiveRange LR(start, end, vni);
791
Jim Laskey9b25b8c2007-02-21 22:41:17 +0000792 interval.addRange(LR);
Evan Chengf3bb2e62007-09-05 21:46:51 +0000793 interval.addKill(LR.valno, end);
Evan Cheng24c2e5c2007-08-08 07:03:29 +0000794 DOUT << " +" << LR << '\n';
Evan Chengb371f452007-02-19 21:49:54 +0000795}
796
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000797/// computeIntervals - computes the live intervals for virtual
Alkis Evlogimenos4d46e1e2004-01-31 14:37:41 +0000798/// registers. for some ordering of the machine instructions [1,N] a
Alkis Evlogimenos08cec002004-01-31 19:59:32 +0000799/// live interval is an interval [i, j) where 1 <= i <= j < N for
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000800/// which a variable is live
Dale Johannesen91aac102008-09-17 21:13:11 +0000801void LiveIntervals::computeIntervals() {
Dale Johannesen91aac102008-09-17 21:13:11 +0000802
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000803 DOUT << "********** COMPUTING LIVE INTERVALS **********\n"
804 << "********** Function: "
805 << ((Value*)mf_->getFunction())->getName() << '\n';
Owen Anderson7fbad272008-07-23 21:37:49 +0000806
Chris Lattner428b92e2006-09-15 03:57:23 +0000807 for (MachineFunction::iterator MBBI = mf_->begin(), E = mf_->end();
808 MBBI != E; ++MBBI) {
809 MachineBasicBlock *MBB = MBBI;
Owen Anderson134eb732008-09-21 20:43:24 +0000810 // Track the index of the current machine instr.
811 unsigned MIIndex = getMBBStartIdx(MBB);
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000812 DOUT << ((Value*)MBB->getBasicBlock())->getName() << ":\n";
Alkis Evlogimenos6b4edba2003-12-21 20:19:10 +0000813
Chris Lattner428b92e2006-09-15 03:57:23 +0000814 MachineBasicBlock::iterator MI = MBB->begin(), miEnd = MBB->end();
Evan Cheng0c9f92e2007-02-13 01:30:55 +0000815
Dan Gohmancb406c22007-10-03 19:26:29 +0000816 // Create intervals for live-ins to this BB first.
817 for (MachineBasicBlock::const_livein_iterator LI = MBB->livein_begin(),
818 LE = MBB->livein_end(); LI != LE; ++LI) {
819 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*LI));
820 // Multiple live-ins can alias the same register.
Dan Gohman6f0d0242008-02-10 18:45:23 +0000821 for (const unsigned* AS = tri_->getSubRegisters(*LI); *AS; ++AS)
Dan Gohmancb406c22007-10-03 19:26:29 +0000822 if (!hasInterval(*AS))
823 handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*AS),
824 true);
Chris Lattnerdffb2e82006-09-04 18:27:40 +0000825 }
826
Owen Anderson99500ae2008-09-15 22:00:38 +0000827 // Skip over empty initial indices.
828 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
829 getInstructionFromIndex(MIIndex) == 0)
830 MIIndex += InstrSlots::NUM;
831
Chris Lattner428b92e2006-09-15 03:57:23 +0000832 for (; MI != miEnd; ++MI) {
Bill Wendlingbdc679d2006-11-29 00:39:47 +0000833 DOUT << MIIndex << "\t" << *MI;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000834
Evan Cheng438f7bc2006-11-10 08:43:01 +0000835 // Handle defs.
Chris Lattner428b92e2006-09-15 03:57:23 +0000836 for (int i = MI->getNumOperands() - 1; i >= 0; --i) {
837 MachineOperand &MO = MI->getOperand(i);
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000838 // handle register defs - build intervals
Dan Gohmand735b802008-10-03 15:45:36 +0000839 if (MO.isReg() && MO.getReg() && MO.isDef()) {
Evan Chengef0732d2008-07-10 07:35:43 +0000840 handleRegisterDef(MBB, MI, MIIndex, MO, i);
Dale Johannesen91aac102008-09-17 21:13:11 +0000841 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000842 }
Evan Cheng99fe34b2008-10-18 05:18:55 +0000843
844 // Skip over the empty slots after each instruction.
845 unsigned Slots = MI->getDesc().getNumDefs();
846 if (Slots == 0)
847 Slots = 1;
848 MIIndex += InstrSlots::NUM * Slots;
Owen Anderson7fbad272008-07-23 21:37:49 +0000849
850 // Skip over empty indices.
851 while (MIIndex / InstrSlots::NUM < i2miMap_.size() &&
852 getInstructionFromIndex(MIIndex) == 0)
853 MIIndex += InstrSlots::NUM;
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000854 }
Alkis Evlogimenos1a8ea012004-08-04 09:46:26 +0000855 }
Alkis Evlogimenosff0cbe12003-11-20 03:32:25 +0000856}
Alkis Evlogimenosb27ef242003-12-05 10:38:28 +0000857
Evan Chengd0e32c52008-10-29 05:06:14 +0000858bool LiveIntervals::findLiveInMBBs(unsigned Start, unsigned End,
Evan Chenga5bfc972007-10-17 06:53:44 +0000859 SmallVectorImpl<MachineBasicBlock*> &MBBs) const {
Evan Cheng4ca980e2007-10-17 02:10:22 +0000860 std::vector<IdxMBBPair>::const_iterator I =
Evan Chengd0e32c52008-10-29 05:06:14 +0000861 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), Start);
Evan Cheng4ca980e2007-10-17 02:10:22 +0000862
863 bool ResVal = false;
864 while (I != Idx2MBBMap.end()) {
Dan Gohman2ad82452008-11-26 05:50:31 +0000865 if (I->first >= End)
Evan Cheng4ca980e2007-10-17 02:10:22 +0000866 break;
867 MBBs.push_back(I->second);
868 ResVal = true;
869 ++I;
870 }
871 return ResVal;
872}
873
Evan Chengd0e32c52008-10-29 05:06:14 +0000874bool LiveIntervals::findReachableMBBs(unsigned Start, unsigned End,
875 SmallVectorImpl<MachineBasicBlock*> &MBBs) const {
876 std::vector<IdxMBBPair>::const_iterator I =
877 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), Start);
878
879 bool ResVal = false;
880 while (I != Idx2MBBMap.end()) {
881 if (I->first > End)
882 break;
883 MachineBasicBlock *MBB = I->second;
884 if (getMBBEndIdx(MBB) > End)
885 break;
886 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
887 SE = MBB->succ_end(); SI != SE; ++SI)
888 MBBs.push_back(*SI);
889 ResVal = true;
890 ++I;
891 }
892 return ResVal;
893}
894
Owen Anderson03857b22008-08-13 21:49:13 +0000895LiveInterval* LiveIntervals::createInterval(unsigned reg) {
Evan Cheng0a1fcce2009-02-08 11:04:35 +0000896 float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ? HUGE_VALF : 0.0F;
Owen Anderson03857b22008-08-13 21:49:13 +0000897 return new LiveInterval(reg, Weight);
Alkis Evlogimenos9a8b4902004-04-09 18:07:57 +0000898}
Evan Chengf2fbca62007-11-12 06:35:08 +0000899
Evan Cheng0a1fcce2009-02-08 11:04:35 +0000900/// dupInterval - Duplicate a live interval. The caller is responsible for
901/// managing the allocated memory.
902LiveInterval* LiveIntervals::dupInterval(LiveInterval *li) {
903 LiveInterval *NewLI = createInterval(li->reg);
Evan Cheng90f95f82009-06-14 20:22:55 +0000904 NewLI->Copy(*li, mri_, getVNInfoAllocator());
Evan Cheng0a1fcce2009-02-08 11:04:35 +0000905 return NewLI;
906}
907
Evan Chengc8d044e2008-02-15 18:24:29 +0000908/// getVNInfoSourceReg - Helper function that parses the specified VNInfo
909/// copy field and returns the source register that defines it.
910unsigned LiveIntervals::getVNInfoSourceReg(const VNInfo *VNI) const {
911 if (!VNI->copy)
912 return 0;
913
Evan Cheng8f90b6e2009-01-07 02:08:57 +0000914 if (VNI->copy->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG) {
915 // If it's extracting out of a physical register, return the sub-register.
916 unsigned Reg = VNI->copy->getOperand(1).getReg();
917 if (TargetRegisterInfo::isPhysicalRegister(Reg))
918 Reg = tri_->getSubReg(Reg, VNI->copy->getOperand(2).getImm());
919 return Reg;
Dan Gohman97121ba2009-04-08 00:15:30 +0000920 } else if (VNI->copy->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
921 VNI->copy->getOpcode() == TargetInstrInfo::SUBREG_TO_REG)
Evan Cheng7e073ba2008-04-09 20:57:25 +0000922 return VNI->copy->getOperand(2).getReg();
Evan Cheng8f90b6e2009-01-07 02:08:57 +0000923
Evan Cheng04ee5a12009-01-20 19:12:24 +0000924 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg;
925 if (tii_->isMoveInstr(*VNI->copy, SrcReg, DstReg, SrcSubReg, DstSubReg))
Evan Chengc8d044e2008-02-15 18:24:29 +0000926 return SrcReg;
927 assert(0 && "Unrecognized copy instruction!");
928 return 0;
929}
Evan Chengf2fbca62007-11-12 06:35:08 +0000930
931//===----------------------------------------------------------------------===//
932// Register allocator hooks.
933//
934
Evan Chengd70dbb52008-02-22 09:24:50 +0000935/// getReMatImplicitUse - If the remat definition MI has one (for now, we only
936/// allow one) virtual register operand, then its uses are implicitly using
937/// the register. Returns the virtual register.
938unsigned LiveIntervals::getReMatImplicitUse(const LiveInterval &li,
939 MachineInstr *MI) const {
940 unsigned RegOp = 0;
941 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
942 MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +0000943 if (!MO.isReg() || !MO.isUse())
Evan Chengd70dbb52008-02-22 09:24:50 +0000944 continue;
945 unsigned Reg = MO.getReg();
946 if (Reg == 0 || Reg == li.reg)
947 continue;
948 // FIXME: For now, only remat MI with at most one register operand.
949 assert(!RegOp &&
950 "Can't rematerialize instruction with multiple register operand!");
951 RegOp = MO.getReg();
Dan Gohman6d69ba82008-07-25 00:02:30 +0000952#ifndef NDEBUG
Evan Chengd70dbb52008-02-22 09:24:50 +0000953 break;
Dan Gohman6d69ba82008-07-25 00:02:30 +0000954#endif
Evan Chengd70dbb52008-02-22 09:24:50 +0000955 }
956 return RegOp;
957}
958
959/// isValNoAvailableAt - Return true if the val# of the specified interval
960/// which reaches the given instruction also reaches the specified use index.
961bool LiveIntervals::isValNoAvailableAt(const LiveInterval &li, MachineInstr *MI,
962 unsigned UseIdx) const {
963 unsigned Index = getInstructionIndex(MI);
964 VNInfo *ValNo = li.FindLiveRangeContaining(Index)->valno;
965 LiveInterval::const_iterator UI = li.FindLiveRangeContaining(UseIdx);
966 return UI != li.end() && UI->valno == ValNo;
967}
968
Evan Chengf2fbca62007-11-12 06:35:08 +0000969/// isReMaterializable - Returns true if the definition MI of the specified
970/// val# of the specified interval is re-materializable.
971bool LiveIntervals::isReMaterializable(const LiveInterval &li,
Evan Cheng5ef3a042007-12-06 00:01:56 +0000972 const VNInfo *ValNo, MachineInstr *MI,
Evan Chengdc377862008-09-30 15:44:16 +0000973 SmallVectorImpl<LiveInterval*> &SpillIs,
Evan Cheng5ef3a042007-12-06 00:01:56 +0000974 bool &isLoad) {
Evan Chengf2fbca62007-11-12 06:35:08 +0000975 if (DisableReMat)
976 return false;
977
Evan Cheng20ccded2008-03-15 00:19:36 +0000978 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF)
Evan Chengd70dbb52008-02-22 09:24:50 +0000979 return true;
Evan Chengdd3465e2008-02-23 01:44:27 +0000980
981 int FrameIdx = 0;
982 if (tii_->isLoadFromStackSlot(MI, FrameIdx) &&
Evan Cheng249ded32008-02-23 03:38:34 +0000983 mf_->getFrameInfo()->isImmutableObjectIndex(FrameIdx))
Evan Cheng79a0c1e2008-02-25 08:50:41 +0000984 // FIXME: Let target specific isReallyTriviallyReMaterializable determines
985 // this but remember this is not safe to fold into a two-address
986 // instruction.
Evan Cheng249ded32008-02-23 03:38:34 +0000987 // This is a load from fixed stack slot. It can be rematerialized.
Evan Chengdd3465e2008-02-23 01:44:27 +0000988 return true;
Evan Chengdd3465e2008-02-23 01:44:27 +0000989
Dan Gohman6d69ba82008-07-25 00:02:30 +0000990 // If the target-specific rules don't identify an instruction as
991 // being trivially rematerializable, use some target-independent
992 // rules.
993 if (!MI->getDesc().isRematerializable() ||
994 !tii_->isTriviallyReMaterializable(MI)) {
Dan Gohman4c8f8702008-07-25 15:08:37 +0000995 if (!EnableAggressiveRemat)
996 return false;
Evan Chengd70dbb52008-02-22 09:24:50 +0000997
Dan Gohman0471a792008-07-28 18:43:51 +0000998 // If the instruction accesses memory but the memoperands have been lost,
Dan Gohman6d69ba82008-07-25 00:02:30 +0000999 // we can't analyze it.
1000 const TargetInstrDesc &TID = MI->getDesc();
1001 if ((TID.mayLoad() || TID.mayStore()) && MI->memoperands_empty())
1002 return false;
1003
1004 // Avoid instructions obviously unsafe for remat.
1005 if (TID.hasUnmodeledSideEffects() || TID.isNotDuplicable())
1006 return false;
1007
1008 // If the instruction accesses memory and the memory could be non-constant,
1009 // assume the instruction is not rematerializable.
Evan Chengdc377862008-09-30 15:44:16 +00001010 for (std::list<MachineMemOperand>::const_iterator
1011 I = MI->memoperands_begin(), E = MI->memoperands_end(); I != E; ++I){
Dan Gohman6d69ba82008-07-25 00:02:30 +00001012 const MachineMemOperand &MMO = *I;
1013 if (MMO.isVolatile() || MMO.isStore())
1014 return false;
1015 const Value *V = MMO.getValue();
1016 if (!V)
1017 return false;
1018 if (const PseudoSourceValue *PSV = dyn_cast<PseudoSourceValue>(V)) {
1019 if (!PSV->isConstant(mf_->getFrameInfo()))
Evan Chengd70dbb52008-02-22 09:24:50 +00001020 return false;
Dan Gohman6d69ba82008-07-25 00:02:30 +00001021 } else if (!aa_->pointsToConstantMemory(V))
1022 return false;
1023 }
1024
1025 // If any of the registers accessed are non-constant, conservatively assume
1026 // the instruction is not rematerializable.
1027 unsigned ImpUse = 0;
1028 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1029 const MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001030 if (MO.isReg()) {
Dan Gohman6d69ba82008-07-25 00:02:30 +00001031 unsigned Reg = MO.getReg();
1032 if (Reg == 0)
1033 continue;
1034 if (TargetRegisterInfo::isPhysicalRegister(Reg))
1035 return false;
1036
1037 // Only allow one def, and that in the first operand.
1038 if (MO.isDef() != (i == 0))
1039 return false;
1040
1041 // Only allow constant-valued registers.
1042 bool IsLiveIn = mri_->isLiveIn(Reg);
1043 MachineRegisterInfo::def_iterator I = mri_->def_begin(Reg),
1044 E = mri_->def_end();
1045
Dan Gohmanc93ced5b2008-12-08 04:53:23 +00001046 // For the def, it should be the only def of that register.
Dan Gohman6d69ba82008-07-25 00:02:30 +00001047 if (MO.isDef() && (next(I) != E || IsLiveIn))
1048 return false;
1049
1050 if (MO.isUse()) {
1051 // Only allow one use other register use, as that's all the
1052 // remat mechanisms support currently.
1053 if (Reg != li.reg) {
1054 if (ImpUse == 0)
1055 ImpUse = Reg;
1056 else if (Reg != ImpUse)
1057 return false;
1058 }
Dan Gohmanc93ced5b2008-12-08 04:53:23 +00001059 // For the use, there should be only one associated def.
Dan Gohman6d69ba82008-07-25 00:02:30 +00001060 if (I != E && (next(I) != E || IsLiveIn))
1061 return false;
1062 }
Evan Chengd70dbb52008-02-22 09:24:50 +00001063 }
1064 }
Evan Cheng5ef3a042007-12-06 00:01:56 +00001065 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001066
Dan Gohman6d69ba82008-07-25 00:02:30 +00001067 unsigned ImpUse = getReMatImplicitUse(li, MI);
1068 if (ImpUse) {
1069 const LiveInterval &ImpLi = getInterval(ImpUse);
1070 for (MachineRegisterInfo::use_iterator ri = mri_->use_begin(li.reg),
1071 re = mri_->use_end(); ri != re; ++ri) {
1072 MachineInstr *UseMI = &*ri;
1073 unsigned UseIdx = getInstructionIndex(UseMI);
1074 if (li.FindLiveRangeContaining(UseIdx)->valno != ValNo)
1075 continue;
1076 if (!isValNoAvailableAt(ImpLi, MI, UseIdx))
1077 return false;
1078 }
Evan Chengdc377862008-09-30 15:44:16 +00001079
1080 // If a register operand of the re-materialized instruction is going to
1081 // be spilled next, then it's not legal to re-materialize this instruction.
1082 for (unsigned i = 0, e = SpillIs.size(); i != e; ++i)
1083 if (ImpUse == SpillIs[i]->reg)
1084 return false;
Dan Gohman6d69ba82008-07-25 00:02:30 +00001085 }
1086 return true;
Evan Cheng5ef3a042007-12-06 00:01:56 +00001087}
1088
Evan Cheng06587492008-10-24 02:05:00 +00001089/// isReMaterializable - Returns true if the definition MI of the specified
1090/// val# of the specified interval is re-materializable.
1091bool LiveIntervals::isReMaterializable(const LiveInterval &li,
1092 const VNInfo *ValNo, MachineInstr *MI) {
1093 SmallVector<LiveInterval*, 4> Dummy1;
1094 bool Dummy2;
1095 return isReMaterializable(li, ValNo, MI, Dummy1, Dummy2);
1096}
1097
Evan Cheng5ef3a042007-12-06 00:01:56 +00001098/// isReMaterializable - Returns true if every definition of MI of every
1099/// val# of the specified interval is re-materializable.
Evan Chengdc377862008-09-30 15:44:16 +00001100bool LiveIntervals::isReMaterializable(const LiveInterval &li,
1101 SmallVectorImpl<LiveInterval*> &SpillIs,
1102 bool &isLoad) {
Evan Cheng5ef3a042007-12-06 00:01:56 +00001103 isLoad = false;
1104 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
1105 i != e; ++i) {
1106 const VNInfo *VNI = *i;
Lang Hames857c4e02009-06-17 21:01:20 +00001107 if (VNI->isUnused())
Evan Cheng5ef3a042007-12-06 00:01:56 +00001108 continue; // Dead val#.
1109 // Is the def for the val# rematerializable?
Lang Hames857c4e02009-06-17 21:01:20 +00001110 if (!VNI->isDefAccurate())
Evan Cheng5ef3a042007-12-06 00:01:56 +00001111 return false;
Lang Hames857c4e02009-06-17 21:01:20 +00001112 MachineInstr *ReMatDefMI = getInstructionFromIndex(VNI->def);
Evan Cheng5ef3a042007-12-06 00:01:56 +00001113 bool DefIsLoad = false;
Evan Chengd70dbb52008-02-22 09:24:50 +00001114 if (!ReMatDefMI ||
Evan Chengdc377862008-09-30 15:44:16 +00001115 !isReMaterializable(li, VNI, ReMatDefMI, SpillIs, DefIsLoad))
Evan Cheng5ef3a042007-12-06 00:01:56 +00001116 return false;
1117 isLoad |= DefIsLoad;
Evan Chengf2fbca62007-11-12 06:35:08 +00001118 }
1119 return true;
1120}
1121
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001122/// FilterFoldedOps - Filter out two-address use operands. Return
1123/// true if it finds any issue with the operands that ought to prevent
1124/// folding.
1125static bool FilterFoldedOps(MachineInstr *MI,
1126 SmallVector<unsigned, 2> &Ops,
1127 unsigned &MRInfo,
1128 SmallVector<unsigned, 2> &FoldOps) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001129 MRInfo = 0;
Evan Chengaee4af62007-12-02 08:30:39 +00001130 for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
1131 unsigned OpIdx = Ops[i];
Evan Chengd70dbb52008-02-22 09:24:50 +00001132 MachineOperand &MO = MI->getOperand(OpIdx);
Evan Chengaee4af62007-12-02 08:30:39 +00001133 // FIXME: fold subreg use.
Evan Chengd70dbb52008-02-22 09:24:50 +00001134 if (MO.getSubReg())
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001135 return true;
Evan Chengd70dbb52008-02-22 09:24:50 +00001136 if (MO.isDef())
Evan Chengaee4af62007-12-02 08:30:39 +00001137 MRInfo |= (unsigned)VirtRegMap::isMod;
1138 else {
1139 // Filter out two-address use operand(s).
Evan Chenga24752f2009-03-19 20:30:06 +00001140 if (MI->isRegTiedToDefOperand(OpIdx)) {
Evan Chengaee4af62007-12-02 08:30:39 +00001141 MRInfo = VirtRegMap::isModRef;
1142 continue;
1143 }
1144 MRInfo |= (unsigned)VirtRegMap::isRef;
1145 }
1146 FoldOps.push_back(OpIdx);
Evan Chenge62f97c2007-12-01 02:07:52 +00001147 }
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001148 return false;
1149}
1150
1151
1152/// tryFoldMemoryOperand - Attempts to fold either a spill / restore from
1153/// slot / to reg or any rematerialized load into ith operand of specified
1154/// MI. If it is successul, MI is updated with the newly created MI and
1155/// returns true.
1156bool LiveIntervals::tryFoldMemoryOperand(MachineInstr* &MI,
1157 VirtRegMap &vrm, MachineInstr *DefMI,
1158 unsigned InstrIdx,
1159 SmallVector<unsigned, 2> &Ops,
1160 bool isSS, int Slot, unsigned Reg) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001161 // If it is an implicit def instruction, just delete it.
Evan Cheng20ccded2008-03-15 00:19:36 +00001162 if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001163 RemoveMachineInstrFromMaps(MI);
1164 vrm.RemoveMachineInstrFromMaps(MI);
1165 MI->eraseFromParent();
1166 ++numFolds;
1167 return true;
1168 }
1169
1170 // Filter the list of operand indexes that are to be folded. Abort if
1171 // any operand will prevent folding.
1172 unsigned MRInfo = 0;
1173 SmallVector<unsigned, 2> FoldOps;
1174 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
1175 return false;
Evan Chenge62f97c2007-12-01 02:07:52 +00001176
Evan Cheng427f4c12008-03-31 23:19:51 +00001177 // The only time it's safe to fold into a two address instruction is when
1178 // it's folding reload and spill from / into a spill stack slot.
1179 if (DefMI && (MRInfo & VirtRegMap::isMod))
Evan Cheng249ded32008-02-23 03:38:34 +00001180 return false;
1181
Evan Chengf2f8c2a2008-02-08 22:05:27 +00001182 MachineInstr *fmi = isSS ? tii_->foldMemoryOperand(*mf_, MI, FoldOps, Slot)
1183 : tii_->foldMemoryOperand(*mf_, MI, FoldOps, DefMI);
Evan Chengf2fbca62007-11-12 06:35:08 +00001184 if (fmi) {
Evan Chengd3653122008-02-27 03:04:06 +00001185 // Remember this instruction uses the spill slot.
1186 if (isSS) vrm.addSpillSlotUse(Slot, fmi);
1187
Evan Chengf2fbca62007-11-12 06:35:08 +00001188 // Attempt to fold the memory reference into the instruction. If
1189 // we can do this, we don't need to insert spill code.
Evan Chengf2fbca62007-11-12 06:35:08 +00001190 MachineBasicBlock &MBB = *MI->getParent();
Evan Cheng84802932008-01-10 08:24:38 +00001191 if (isSS && !mf_->getFrameInfo()->isImmutableObjectIndex(Slot))
Evan Chengaee4af62007-12-02 08:30:39 +00001192 vrm.virtFolded(Reg, MI, fmi, (VirtRegMap::ModRef)MRInfo);
Evan Cheng81a03822007-11-17 00:40:40 +00001193 vrm.transferSpillPts(MI, fmi);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001194 vrm.transferRestorePts(MI, fmi);
Evan Chengc1f53c72008-03-11 21:34:46 +00001195 vrm.transferEmergencySpills(MI, fmi);
Evan Chengf2fbca62007-11-12 06:35:08 +00001196 mi2iMap_.erase(MI);
Evan Chengcddbb832007-11-30 21:23:43 +00001197 i2miMap_[InstrIdx /InstrSlots::NUM] = fmi;
1198 mi2iMap_[fmi] = InstrIdx;
Evan Chengf2fbca62007-11-12 06:35:08 +00001199 MI = MBB.insert(MBB.erase(MI), fmi);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001200 ++numFolds;
Evan Chengf2fbca62007-11-12 06:35:08 +00001201 return true;
1202 }
1203 return false;
1204}
1205
Evan Cheng018f9b02007-12-05 03:22:34 +00001206/// canFoldMemoryOperand - Returns true if the specified load / store
1207/// folding is possible.
1208bool LiveIntervals::canFoldMemoryOperand(MachineInstr *MI,
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001209 SmallVector<unsigned, 2> &Ops,
Evan Cheng3c75ba82008-04-01 21:37:32 +00001210 bool ReMat) const {
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001211 // Filter the list of operand indexes that are to be folded. Abort if
1212 // any operand will prevent folding.
1213 unsigned MRInfo = 0;
Evan Cheng018f9b02007-12-05 03:22:34 +00001214 SmallVector<unsigned, 2> FoldOps;
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001215 if (FilterFoldedOps(MI, Ops, MRInfo, FoldOps))
1216 return false;
Evan Cheng018f9b02007-12-05 03:22:34 +00001217
Evan Cheng3c75ba82008-04-01 21:37:32 +00001218 // It's only legal to remat for a use, not a def.
1219 if (ReMat && (MRInfo & VirtRegMap::isMod))
Evan Cheng79a0c1e2008-02-25 08:50:41 +00001220 return false;
Evan Cheng018f9b02007-12-05 03:22:34 +00001221
Evan Chengd70dbb52008-02-22 09:24:50 +00001222 return tii_->canFoldMemoryOperand(MI, FoldOps);
1223}
1224
Evan Cheng81a03822007-11-17 00:40:40 +00001225bool LiveIntervals::intervalIsInOneMBB(const LiveInterval &li) const {
1226 SmallPtrSet<MachineBasicBlock*, 4> MBBs;
1227 for (LiveInterval::Ranges::const_iterator
1228 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1229 std::vector<IdxMBBPair>::const_iterator II =
1230 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), I->start);
1231 if (II == Idx2MBBMap.end())
1232 continue;
1233 if (I->end > II->first) // crossing a MBB.
1234 return false;
1235 MBBs.insert(II->second);
1236 if (MBBs.size() > 1)
1237 return false;
1238 }
1239 return true;
1240}
1241
Evan Chengd70dbb52008-02-22 09:24:50 +00001242/// rewriteImplicitOps - Rewrite implicit use operands of MI (i.e. uses of
1243/// interval on to-be re-materialized operands of MI) with new register.
1244void LiveIntervals::rewriteImplicitOps(const LiveInterval &li,
1245 MachineInstr *MI, unsigned NewVReg,
1246 VirtRegMap &vrm) {
1247 // There is an implicit use. That means one of the other operand is
1248 // being remat'ed and the remat'ed instruction has li.reg as an
1249 // use operand. Make sure we rewrite that as well.
1250 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1251 MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001252 if (!MO.isReg())
Evan Chengd70dbb52008-02-22 09:24:50 +00001253 continue;
1254 unsigned Reg = MO.getReg();
1255 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
1256 continue;
1257 if (!vrm.isReMaterialized(Reg))
1258 continue;
1259 MachineInstr *ReMatMI = vrm.getReMaterializedMI(Reg);
Evan Cheng6130f662008-03-05 00:59:57 +00001260 MachineOperand *UseMO = ReMatMI->findRegisterUseOperand(li.reg);
1261 if (UseMO)
1262 UseMO->setReg(NewVReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001263 }
1264}
1265
Evan Chengf2fbca62007-11-12 06:35:08 +00001266/// rewriteInstructionForSpills, rewriteInstructionsForSpills - Helper functions
1267/// for addIntervalsForSpills to rewrite uses / defs for the given live range.
Evan Cheng018f9b02007-12-05 03:22:34 +00001268bool LiveIntervals::
Evan Chengd70dbb52008-02-22 09:24:50 +00001269rewriteInstructionForSpills(const LiveInterval &li, const VNInfo *VNI,
1270 bool TrySplit, unsigned index, unsigned end, MachineInstr *MI,
Evan Cheng81a03822007-11-17 00:40:40 +00001271 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Chengf2fbca62007-11-12 06:35:08 +00001272 unsigned Slot, int LdSlot,
1273 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chengd70dbb52008-02-22 09:24:50 +00001274 VirtRegMap &vrm,
Evan Chengf2fbca62007-11-12 06:35:08 +00001275 const TargetRegisterClass* rc,
1276 SmallVector<int, 4> &ReMatIds,
Evan Cheng22f07ff2007-12-11 02:09:15 +00001277 const MachineLoopInfo *loopInfo,
Evan Cheng313d4b82008-02-23 00:33:04 +00001278 unsigned &NewVReg, unsigned ImpUse, bool &HasDef, bool &HasUse,
Owen Anderson28998312008-08-13 22:28:50 +00001279 DenseMap<unsigned,unsigned> &MBBVRegsMap,
Evan Chengc781a242009-05-03 18:32:42 +00001280 std::vector<LiveInterval*> &NewLIs) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001281 bool CanFold = false;
Evan Chengf2fbca62007-11-12 06:35:08 +00001282 RestartInstruction:
1283 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
1284 MachineOperand& mop = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001285 if (!mop.isReg())
Evan Chengf2fbca62007-11-12 06:35:08 +00001286 continue;
1287 unsigned Reg = mop.getReg();
1288 unsigned RegI = Reg;
Dan Gohman6f0d0242008-02-10 18:45:23 +00001289 if (Reg == 0 || TargetRegisterInfo::isPhysicalRegister(Reg))
Evan Chengf2fbca62007-11-12 06:35:08 +00001290 continue;
Evan Chengf2fbca62007-11-12 06:35:08 +00001291 if (Reg != li.reg)
1292 continue;
1293
1294 bool TryFold = !DefIsReMat;
Evan Chengcb3c3302007-11-29 23:02:50 +00001295 bool FoldSS = true; // Default behavior unless it's a remat.
Evan Chengf2fbca62007-11-12 06:35:08 +00001296 int FoldSlot = Slot;
1297 if (DefIsReMat) {
1298 // If this is the rematerializable definition MI itself and
1299 // all of its uses are rematerialized, simply delete it.
Evan Cheng81a03822007-11-17 00:40:40 +00001300 if (MI == ReMatOrigDefMI && CanDelete) {
Evan Chengcddbb832007-11-30 21:23:43 +00001301 DOUT << "\t\t\t\tErasing re-materlizable def: ";
1302 DOUT << MI << '\n';
Evan Chengf2fbca62007-11-12 06:35:08 +00001303 RemoveMachineInstrFromMaps(MI);
Evan Chengcada2452007-11-28 01:28:46 +00001304 vrm.RemoveMachineInstrFromMaps(MI);
Evan Chengf2fbca62007-11-12 06:35:08 +00001305 MI->eraseFromParent();
1306 break;
1307 }
1308
1309 // If def for this use can't be rematerialized, then try folding.
Evan Cheng0cbb1162007-11-29 01:06:25 +00001310 // If def is rematerializable and it's a load, also try folding.
Evan Chengcb3c3302007-11-29 23:02:50 +00001311 TryFold = !ReMatDefMI || (ReMatDefMI && (MI == ReMatOrigDefMI || isLoad));
Evan Chengf2fbca62007-11-12 06:35:08 +00001312 if (isLoad) {
1313 // Try fold loads (from stack slot, constant pool, etc.) into uses.
1314 FoldSS = isLoadSS;
1315 FoldSlot = LdSlot;
1316 }
1317 }
1318
Evan Chengf2fbca62007-11-12 06:35:08 +00001319 // Scan all of the operands of this instruction rewriting operands
1320 // to use NewVReg instead of li.reg as appropriate. We do this for
1321 // two reasons:
1322 //
1323 // 1. If the instr reads the same spilled vreg multiple times, we
1324 // want to reuse the NewVReg.
1325 // 2. If the instr is a two-addr instruction, we are required to
1326 // keep the src/dst regs pinned.
1327 //
1328 // Keep track of whether we replace a use and/or def so that we can
1329 // create the spill interval with the appropriate range.
Evan Chengcddbb832007-11-30 21:23:43 +00001330
Evan Cheng81a03822007-11-17 00:40:40 +00001331 HasUse = mop.isUse();
1332 HasDef = mop.isDef();
Evan Chengaee4af62007-12-02 08:30:39 +00001333 SmallVector<unsigned, 2> Ops;
1334 Ops.push_back(i);
Evan Chengf2fbca62007-11-12 06:35:08 +00001335 for (unsigned j = i+1, e = MI->getNumOperands(); j != e; ++j) {
Evan Chengaee4af62007-12-02 08:30:39 +00001336 const MachineOperand &MOj = MI->getOperand(j);
Dan Gohmand735b802008-10-03 15:45:36 +00001337 if (!MOj.isReg())
Evan Chengf2fbca62007-11-12 06:35:08 +00001338 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00001339 unsigned RegJ = MOj.getReg();
Dan Gohman6f0d0242008-02-10 18:45:23 +00001340 if (RegJ == 0 || TargetRegisterInfo::isPhysicalRegister(RegJ))
Evan Chengf2fbca62007-11-12 06:35:08 +00001341 continue;
1342 if (RegJ == RegI) {
Evan Chengaee4af62007-12-02 08:30:39 +00001343 Ops.push_back(j);
1344 HasUse |= MOj.isUse();
1345 HasDef |= MOj.isDef();
Evan Chengf2fbca62007-11-12 06:35:08 +00001346 }
1347 }
1348
Evan Cheng79a796c2008-07-12 01:56:02 +00001349 if (HasUse && !li.liveAt(getUseIndex(index)))
1350 // Must be defined by an implicit def. It should not be spilled. Note,
1351 // this is for correctness reason. e.g.
1352 // 8 %reg1024<def> = IMPLICIT_DEF
1353 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1354 // The live range [12, 14) are not part of the r1024 live interval since
1355 // it's defined by an implicit def. It will not conflicts with live
1356 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengb9890ae2008-07-12 02:22:07 +00001357 // easily see a situation where both registers are reloaded before
Evan Cheng79a796c2008-07-12 01:56:02 +00001358 // the INSERT_SUBREG and both target registers that would overlap.
1359 HasUse = false;
1360
David Greene26b86a02008-10-27 17:38:59 +00001361 // Create a new virtual register for the spill interval.
1362 // Create the new register now so we can map the fold instruction
1363 // to the new register so when it is unfolded we get the correct
1364 // answer.
1365 bool CreatedNewVReg = false;
1366 if (NewVReg == 0) {
1367 NewVReg = mri_->createVirtualRegister(rc);
1368 vrm.grow();
1369 CreatedNewVReg = true;
1370 }
1371
Evan Cheng9c3c2212008-06-06 07:54:39 +00001372 if (!TryFold)
1373 CanFold = false;
1374 else {
Evan Cheng018f9b02007-12-05 03:22:34 +00001375 // Do not fold load / store here if we are splitting. We'll find an
1376 // optimal point to insert a load / store later.
1377 if (!TrySplit) {
1378 if (tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
David Greene26b86a02008-10-27 17:38:59 +00001379 Ops, FoldSS, FoldSlot, NewVReg)) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001380 // Folding the load/store can completely change the instruction in
1381 // unpredictable ways, rescan it from the beginning.
David Greene26b86a02008-10-27 17:38:59 +00001382
1383 if (FoldSS) {
1384 // We need to give the new vreg the same stack slot as the
1385 // spilled interval.
1386 vrm.assignVirt2StackSlot(NewVReg, FoldSlot);
1387 }
1388
Evan Cheng018f9b02007-12-05 03:22:34 +00001389 HasUse = false;
1390 HasDef = false;
1391 CanFold = false;
Evan Chengc781a242009-05-03 18:32:42 +00001392 if (isNotInMIMap(MI))
Evan Cheng7e073ba2008-04-09 20:57:25 +00001393 break;
Evan Cheng018f9b02007-12-05 03:22:34 +00001394 goto RestartInstruction;
1395 }
1396 } else {
Evan Cheng9c3c2212008-06-06 07:54:39 +00001397 // We'll try to fold it later if it's profitable.
Evan Cheng3c75ba82008-04-01 21:37:32 +00001398 CanFold = canFoldMemoryOperand(MI, Ops, DefIsReMat);
Evan Cheng018f9b02007-12-05 03:22:34 +00001399 }
Evan Cheng9c3c2212008-06-06 07:54:39 +00001400 }
Evan Chengcddbb832007-11-30 21:23:43 +00001401
Evan Chengcddbb832007-11-30 21:23:43 +00001402 mop.setReg(NewVReg);
Evan Chengd70dbb52008-02-22 09:24:50 +00001403 if (mop.isImplicit())
1404 rewriteImplicitOps(li, MI, NewVReg, vrm);
Evan Chengcddbb832007-11-30 21:23:43 +00001405
1406 // Reuse NewVReg for other reads.
Evan Chengd70dbb52008-02-22 09:24:50 +00001407 for (unsigned j = 0, e = Ops.size(); j != e; ++j) {
1408 MachineOperand &mopj = MI->getOperand(Ops[j]);
1409 mopj.setReg(NewVReg);
1410 if (mopj.isImplicit())
1411 rewriteImplicitOps(li, MI, NewVReg, vrm);
1412 }
Evan Chengcddbb832007-11-30 21:23:43 +00001413
Evan Cheng81a03822007-11-17 00:40:40 +00001414 if (CreatedNewVReg) {
1415 if (DefIsReMat) {
1416 vrm.setVirtIsReMaterialized(NewVReg, ReMatDefMI/*, CanDelete*/);
Evan Chengd70dbb52008-02-22 09:24:50 +00001417 if (ReMatIds[VNI->id] == VirtRegMap::MAX_STACK_SLOT) {
Evan Cheng81a03822007-11-17 00:40:40 +00001418 // Each valnum may have its own remat id.
Evan Chengd70dbb52008-02-22 09:24:50 +00001419 ReMatIds[VNI->id] = vrm.assignVirtReMatId(NewVReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001420 } else {
Evan Chengd70dbb52008-02-22 09:24:50 +00001421 vrm.assignVirtReMatId(NewVReg, ReMatIds[VNI->id]);
Evan Cheng81a03822007-11-17 00:40:40 +00001422 }
1423 if (!CanDelete || (HasUse && HasDef)) {
1424 // If this is a two-addr instruction then its use operands are
1425 // rematerializable but its def is not. It should be assigned a
1426 // stack slot.
1427 vrm.assignVirt2StackSlot(NewVReg, Slot);
1428 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001429 } else {
Evan Chengf2fbca62007-11-12 06:35:08 +00001430 vrm.assignVirt2StackSlot(NewVReg, Slot);
1431 }
Evan Chengcb3c3302007-11-29 23:02:50 +00001432 } else if (HasUse && HasDef &&
1433 vrm.getStackSlot(NewVReg) == VirtRegMap::NO_STACK_SLOT) {
1434 // If this interval hasn't been assigned a stack slot (because earlier
1435 // def is a deleted remat def), do it now.
1436 assert(Slot != VirtRegMap::NO_STACK_SLOT);
1437 vrm.assignVirt2StackSlot(NewVReg, Slot);
Evan Chengf2fbca62007-11-12 06:35:08 +00001438 }
1439
Evan Cheng313d4b82008-02-23 00:33:04 +00001440 // Re-matting an instruction with virtual register use. Add the
1441 // register as an implicit use on the use MI.
1442 if (DefIsReMat && ImpUse)
1443 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
1444
Evan Cheng5b69eba2009-04-21 22:46:52 +00001445 // Create a new register interval for this spill / remat.
Evan Chengf2fbca62007-11-12 06:35:08 +00001446 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng81a03822007-11-17 00:40:40 +00001447 if (CreatedNewVReg) {
1448 NewLIs.push_back(&nI);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001449 MBBVRegsMap.insert(std::make_pair(MI->getParent()->getNumber(), NewVReg));
Evan Cheng81a03822007-11-17 00:40:40 +00001450 if (TrySplit)
1451 vrm.setIsSplitFromReg(NewVReg, li.reg);
1452 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001453
1454 if (HasUse) {
Evan Cheng81a03822007-11-17 00:40:40 +00001455 if (CreatedNewVReg) {
1456 LiveRange LR(getLoadIndex(index), getUseIndex(index)+1,
Lang Hames857c4e02009-06-17 21:01:20 +00001457 nI.getNextValue(0, 0, false, VNInfoAllocator));
Evan Cheng81a03822007-11-17 00:40:40 +00001458 DOUT << " +" << LR;
1459 nI.addRange(LR);
1460 } else {
1461 // Extend the split live interval to this def / use.
1462 unsigned End = getUseIndex(index)+1;
1463 LiveRange LR(nI.ranges[nI.ranges.size()-1].end, End,
1464 nI.getValNumInfo(nI.getNumValNums()-1));
1465 DOUT << " +" << LR;
1466 nI.addRange(LR);
1467 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001468 }
1469 if (HasDef) {
1470 LiveRange LR(getDefIndex(index), getStoreIndex(index),
Lang Hames857c4e02009-06-17 21:01:20 +00001471 nI.getNextValue(0, 0, false, VNInfoAllocator));
Evan Chengf2fbca62007-11-12 06:35:08 +00001472 DOUT << " +" << LR;
1473 nI.addRange(LR);
1474 }
Evan Cheng81a03822007-11-17 00:40:40 +00001475
Evan Chengf2fbca62007-11-12 06:35:08 +00001476 DOUT << "\t\t\t\tAdded new interval: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +00001477 nI.print(DOUT, tri_);
Evan Chengf2fbca62007-11-12 06:35:08 +00001478 DOUT << '\n';
1479 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001480 return CanFold;
Evan Chengf2fbca62007-11-12 06:35:08 +00001481}
Evan Cheng81a03822007-11-17 00:40:40 +00001482bool LiveIntervals::anyKillInMBBAfterIdx(const LiveInterval &li,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001483 const VNInfo *VNI,
1484 MachineBasicBlock *MBB, unsigned Idx) const {
Evan Cheng81a03822007-11-17 00:40:40 +00001485 unsigned End = getMBBEndIdx(MBB);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001486 for (unsigned j = 0, ee = VNI->kills.size(); j != ee; ++j) {
1487 unsigned KillIdx = VNI->kills[j];
1488 if (KillIdx > Idx && KillIdx < End)
1489 return true;
Evan Cheng81a03822007-11-17 00:40:40 +00001490 }
1491 return false;
1492}
1493
Evan Cheng063284c2008-02-21 00:34:19 +00001494/// RewriteInfo - Keep track of machine instrs that will be rewritten
1495/// during spilling.
Dan Gohman844731a2008-05-13 00:00:25 +00001496namespace {
1497 struct RewriteInfo {
1498 unsigned Index;
1499 MachineInstr *MI;
1500 bool HasUse;
1501 bool HasDef;
1502 RewriteInfo(unsigned i, MachineInstr *mi, bool u, bool d)
1503 : Index(i), MI(mi), HasUse(u), HasDef(d) {}
1504 };
Evan Cheng063284c2008-02-21 00:34:19 +00001505
Dan Gohman844731a2008-05-13 00:00:25 +00001506 struct RewriteInfoCompare {
1507 bool operator()(const RewriteInfo &LHS, const RewriteInfo &RHS) const {
1508 return LHS.Index < RHS.Index;
1509 }
1510 };
1511}
Evan Cheng063284c2008-02-21 00:34:19 +00001512
Evan Chengf2fbca62007-11-12 06:35:08 +00001513void LiveIntervals::
Evan Cheng81a03822007-11-17 00:40:40 +00001514rewriteInstructionsForSpills(const LiveInterval &li, bool TrySplit,
Evan Chengf2fbca62007-11-12 06:35:08 +00001515 LiveInterval::Ranges::const_iterator &I,
Evan Cheng81a03822007-11-17 00:40:40 +00001516 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
Evan Chengf2fbca62007-11-12 06:35:08 +00001517 unsigned Slot, int LdSlot,
1518 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
Evan Chengd70dbb52008-02-22 09:24:50 +00001519 VirtRegMap &vrm,
Evan Chengf2fbca62007-11-12 06:35:08 +00001520 const TargetRegisterClass* rc,
1521 SmallVector<int, 4> &ReMatIds,
Evan Cheng22f07ff2007-12-11 02:09:15 +00001522 const MachineLoopInfo *loopInfo,
Evan Cheng81a03822007-11-17 00:40:40 +00001523 BitVector &SpillMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001524 DenseMap<unsigned, std::vector<SRInfo> > &SpillIdxes,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001525 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001526 DenseMap<unsigned, std::vector<SRInfo> > &RestoreIdxes,
1527 DenseMap<unsigned,unsigned> &MBBVRegsMap,
Evan Chengc781a242009-05-03 18:32:42 +00001528 std::vector<LiveInterval*> &NewLIs) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001529 bool AllCanFold = true;
Evan Cheng81a03822007-11-17 00:40:40 +00001530 unsigned NewVReg = 0;
Evan Cheng063284c2008-02-21 00:34:19 +00001531 unsigned start = getBaseIndex(I->start);
Evan Chengf2fbca62007-11-12 06:35:08 +00001532 unsigned end = getBaseIndex(I->end-1) + InstrSlots::NUM;
Evan Chengf2fbca62007-11-12 06:35:08 +00001533
Evan Cheng063284c2008-02-21 00:34:19 +00001534 // First collect all the def / use in this live range that will be rewritten.
Evan Cheng7e073ba2008-04-09 20:57:25 +00001535 // Make sure they are sorted according to instruction index.
Evan Cheng063284c2008-02-21 00:34:19 +00001536 std::vector<RewriteInfo> RewriteMIs;
Evan Chengd70dbb52008-02-22 09:24:50 +00001537 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1538 re = mri_->reg_end(); ri != re; ) {
Evan Cheng419852c2008-04-03 16:39:43 +00001539 MachineInstr *MI = &*ri;
Evan Cheng063284c2008-02-21 00:34:19 +00001540 MachineOperand &O = ri.getOperand();
1541 ++ri;
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001542 assert(!O.isImplicit() && "Spilling register that's used as implicit use?");
Evan Cheng063284c2008-02-21 00:34:19 +00001543 unsigned index = getInstructionIndex(MI);
1544 if (index < start || index >= end)
1545 continue;
Evan Cheng79a796c2008-07-12 01:56:02 +00001546 if (O.isUse() && !li.liveAt(getUseIndex(index)))
1547 // Must be defined by an implicit def. It should not be spilled. Note,
1548 // this is for correctness reason. e.g.
1549 // 8 %reg1024<def> = IMPLICIT_DEF
1550 // 12 %reg1024<def> = INSERT_SUBREG %reg1024<kill>, %reg1025, 2
1551 // The live range [12, 14) are not part of the r1024 live interval since
1552 // it's defined by an implicit def. It will not conflicts with live
1553 // interval of r1025. Now suppose both registers are spilled, you can
Evan Chengb9890ae2008-07-12 02:22:07 +00001554 // easily see a situation where both registers are reloaded before
Evan Cheng79a796c2008-07-12 01:56:02 +00001555 // the INSERT_SUBREG and both target registers that would overlap.
1556 continue;
Evan Cheng063284c2008-02-21 00:34:19 +00001557 RewriteMIs.push_back(RewriteInfo(index, MI, O.isUse(), O.isDef()));
1558 }
1559 std::sort(RewriteMIs.begin(), RewriteMIs.end(), RewriteInfoCompare());
1560
Evan Cheng313d4b82008-02-23 00:33:04 +00001561 unsigned ImpUse = DefIsReMat ? getReMatImplicitUse(li, ReMatDefMI) : 0;
Evan Cheng063284c2008-02-21 00:34:19 +00001562 // Now rewrite the defs and uses.
1563 for (unsigned i = 0, e = RewriteMIs.size(); i != e; ) {
1564 RewriteInfo &rwi = RewriteMIs[i];
1565 ++i;
1566 unsigned index = rwi.Index;
1567 bool MIHasUse = rwi.HasUse;
1568 bool MIHasDef = rwi.HasDef;
1569 MachineInstr *MI = rwi.MI;
1570 // If MI def and/or use the same register multiple times, then there
1571 // are multiple entries.
Evan Cheng313d4b82008-02-23 00:33:04 +00001572 unsigned NumUses = MIHasUse;
Evan Cheng063284c2008-02-21 00:34:19 +00001573 while (i != e && RewriteMIs[i].MI == MI) {
1574 assert(RewriteMIs[i].Index == index);
Evan Cheng313d4b82008-02-23 00:33:04 +00001575 bool isUse = RewriteMIs[i].HasUse;
1576 if (isUse) ++NumUses;
1577 MIHasUse |= isUse;
Evan Cheng063284c2008-02-21 00:34:19 +00001578 MIHasDef |= RewriteMIs[i].HasDef;
1579 ++i;
1580 }
Evan Cheng81a03822007-11-17 00:40:40 +00001581 MachineBasicBlock *MBB = MI->getParent();
Evan Cheng313d4b82008-02-23 00:33:04 +00001582
Evan Cheng0a891ed2008-05-23 23:00:04 +00001583 if (ImpUse && MI != ReMatDefMI) {
Evan Cheng313d4b82008-02-23 00:33:04 +00001584 // Re-matting an instruction with virtual register use. Update the
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001585 // register interval's spill weight to HUGE_VALF to prevent it from
1586 // being spilled.
Evan Cheng313d4b82008-02-23 00:33:04 +00001587 LiveInterval &ImpLi = getInterval(ImpUse);
Evan Cheng24d2f8a2008-03-31 07:53:30 +00001588 ImpLi.weight = HUGE_VALF;
Evan Cheng313d4b82008-02-23 00:33:04 +00001589 }
1590
Evan Cheng063284c2008-02-21 00:34:19 +00001591 unsigned MBBId = MBB->getNumber();
Evan Cheng018f9b02007-12-05 03:22:34 +00001592 unsigned ThisVReg = 0;
Evan Cheng70306f82007-12-03 09:58:48 +00001593 if (TrySplit) {
Owen Anderson28998312008-08-13 22:28:50 +00001594 DenseMap<unsigned,unsigned>::iterator NVI = MBBVRegsMap.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001595 if (NVI != MBBVRegsMap.end()) {
Evan Cheng018f9b02007-12-05 03:22:34 +00001596 ThisVReg = NVI->second;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001597 // One common case:
1598 // x = use
1599 // ...
1600 // ...
1601 // def = ...
1602 // = use
1603 // It's better to start a new interval to avoid artifically
1604 // extend the new interval.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001605 if (MIHasDef && !MIHasUse) {
1606 MBBVRegsMap.erase(MBB->getNumber());
Evan Cheng018f9b02007-12-05 03:22:34 +00001607 ThisVReg = 0;
Evan Cheng1953d0c2007-11-29 10:12:14 +00001608 }
1609 }
Evan Chengcada2452007-11-28 01:28:46 +00001610 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001611
1612 bool IsNew = ThisVReg == 0;
1613 if (IsNew) {
1614 // This ends the previous live interval. If all of its def / use
1615 // can be folded, give it a low spill weight.
1616 if (NewVReg && TrySplit && AllCanFold) {
1617 LiveInterval &nI = getOrCreateInterval(NewVReg);
1618 nI.weight /= 10.0F;
1619 }
1620 AllCanFold = true;
1621 }
1622 NewVReg = ThisVReg;
1623
Evan Cheng81a03822007-11-17 00:40:40 +00001624 bool HasDef = false;
1625 bool HasUse = false;
Evan Chengd70dbb52008-02-22 09:24:50 +00001626 bool CanFold = rewriteInstructionForSpills(li, I->valno, TrySplit,
Evan Cheng9c3c2212008-06-06 07:54:39 +00001627 index, end, MI, ReMatOrigDefMI, ReMatDefMI,
1628 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
1629 CanDelete, vrm, rc, ReMatIds, loopInfo, NewVReg,
Evan Chengc781a242009-05-03 18:32:42 +00001630 ImpUse, HasDef, HasUse, MBBVRegsMap, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001631 if (!HasDef && !HasUse)
1632 continue;
1633
Evan Cheng018f9b02007-12-05 03:22:34 +00001634 AllCanFold &= CanFold;
1635
Evan Cheng81a03822007-11-17 00:40:40 +00001636 // Update weight of spill interval.
1637 LiveInterval &nI = getOrCreateInterval(NewVReg);
Evan Cheng70306f82007-12-03 09:58:48 +00001638 if (!TrySplit) {
Evan Cheng81a03822007-11-17 00:40:40 +00001639 // The spill weight is now infinity as it cannot be spilled again.
1640 nI.weight = HUGE_VALF;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001641 continue;
Evan Cheng81a03822007-11-17 00:40:40 +00001642 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001643
1644 // Keep track of the last def and first use in each MBB.
Evan Cheng0cbb1162007-11-29 01:06:25 +00001645 if (HasDef) {
1646 if (MI != ReMatOrigDefMI || !CanDelete) {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001647 bool HasKill = false;
1648 if (!HasUse)
1649 HasKill = anyKillInMBBAfterIdx(li, I->valno, MBB, getDefIndex(index));
1650 else {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001651 // If this is a two-address code, then this index starts a new VNInfo.
Evan Cheng3f32d652008-06-04 09:18:41 +00001652 const VNInfo *VNI = li.findDefinedVNInfo(getDefIndex(index));
Evan Cheng0cbb1162007-11-29 01:06:25 +00001653 if (VNI)
1654 HasKill = anyKillInMBBAfterIdx(li, VNI, MBB, getDefIndex(index));
1655 }
Owen Anderson28998312008-08-13 22:28:50 +00001656 DenseMap<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Chenge3110d02007-12-01 04:42:39 +00001657 SpillIdxes.find(MBBId);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001658 if (!HasKill) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001659 if (SII == SpillIdxes.end()) {
1660 std::vector<SRInfo> S;
1661 S.push_back(SRInfo(index, NewVReg, true));
1662 SpillIdxes.insert(std::make_pair(MBBId, S));
1663 } else if (SII->second.back().vreg != NewVReg) {
1664 SII->second.push_back(SRInfo(index, NewVReg, true));
1665 } else if ((int)index > SII->second.back().index) {
Evan Cheng0cbb1162007-11-29 01:06:25 +00001666 // If there is an earlier def and this is a two-address
1667 // instruction, then it's not possible to fold the store (which
1668 // would also fold the load).
Evan Cheng1953d0c2007-11-29 10:12:14 +00001669 SRInfo &Info = SII->second.back();
1670 Info.index = index;
1671 Info.canFold = !HasUse;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001672 }
1673 SpillMBBs.set(MBBId);
Evan Chenge3110d02007-12-01 04:42:39 +00001674 } else if (SII != SpillIdxes.end() &&
1675 SII->second.back().vreg == NewVReg &&
1676 (int)index > SII->second.back().index) {
1677 // There is an earlier def that's not killed (must be two-address).
1678 // The spill is no longer needed.
1679 SII->second.pop_back();
1680 if (SII->second.empty()) {
1681 SpillIdxes.erase(MBBId);
1682 SpillMBBs.reset(MBBId);
1683 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001684 }
1685 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001686 }
1687
1688 if (HasUse) {
Owen Anderson28998312008-08-13 22:28:50 +00001689 DenseMap<unsigned, std::vector<SRInfo> >::iterator SII =
Evan Cheng0cbb1162007-11-29 01:06:25 +00001690 SpillIdxes.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001691 if (SII != SpillIdxes.end() &&
1692 SII->second.back().vreg == NewVReg &&
1693 (int)index > SII->second.back().index)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001694 // Use(s) following the last def, it's not safe to fold the spill.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001695 SII->second.back().canFold = false;
Owen Anderson28998312008-08-13 22:28:50 +00001696 DenseMap<unsigned, std::vector<SRInfo> >::iterator RII =
Evan Cheng0cbb1162007-11-29 01:06:25 +00001697 RestoreIdxes.find(MBBId);
Evan Cheng1953d0c2007-11-29 10:12:14 +00001698 if (RII != RestoreIdxes.end() && RII->second.back().vreg == NewVReg)
Evan Cheng0cbb1162007-11-29 01:06:25 +00001699 // If we are splitting live intervals, only fold if it's the first
1700 // use and there isn't another use later in the MBB.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001701 RII->second.back().canFold = false;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001702 else if (IsNew) {
1703 // Only need a reload if there isn't an earlier def / use.
Evan Cheng1953d0c2007-11-29 10:12:14 +00001704 if (RII == RestoreIdxes.end()) {
1705 std::vector<SRInfo> Infos;
1706 Infos.push_back(SRInfo(index, NewVReg, true));
1707 RestoreIdxes.insert(std::make_pair(MBBId, Infos));
1708 } else {
1709 RII->second.push_back(SRInfo(index, NewVReg, true));
1710 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00001711 RestoreMBBs.set(MBBId);
1712 }
1713 }
1714
1715 // Update spill weight.
Evan Cheng22f07ff2007-12-11 02:09:15 +00001716 unsigned loopDepth = loopInfo->getLoopDepth(MBB);
Evan Chengc3417602008-06-21 06:45:54 +00001717 nI.weight += getSpillWeight(HasDef, HasUse, loopDepth);
Evan Chengf2fbca62007-11-12 06:35:08 +00001718 }
Evan Cheng018f9b02007-12-05 03:22:34 +00001719
1720 if (NewVReg && TrySplit && AllCanFold) {
1721 // If all of its def / use can be folded, give it a low spill weight.
1722 LiveInterval &nI = getOrCreateInterval(NewVReg);
1723 nI.weight /= 10.0F;
1724 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001725}
1726
Evan Cheng1953d0c2007-11-29 10:12:14 +00001727bool LiveIntervals::alsoFoldARestore(int Id, int index, unsigned vr,
1728 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001729 DenseMap<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001730 if (!RestoreMBBs[Id])
1731 return false;
1732 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1733 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1734 if (Restores[i].index == index &&
1735 Restores[i].vreg == vr &&
1736 Restores[i].canFold)
1737 return true;
1738 return false;
1739}
1740
1741void LiveIntervals::eraseRestoreInfo(int Id, int index, unsigned vr,
1742 BitVector &RestoreMBBs,
Owen Anderson28998312008-08-13 22:28:50 +00001743 DenseMap<unsigned,std::vector<SRInfo> > &RestoreIdxes) {
Evan Cheng1953d0c2007-11-29 10:12:14 +00001744 if (!RestoreMBBs[Id])
1745 return;
1746 std::vector<SRInfo> &Restores = RestoreIdxes[Id];
1747 for (unsigned i = 0, e = Restores.size(); i != e; ++i)
1748 if (Restores[i].index == index && Restores[i].vreg)
1749 Restores[i].index = -1;
1750}
Evan Cheng81a03822007-11-17 00:40:40 +00001751
Evan Cheng4cce6b42008-04-11 17:53:36 +00001752/// handleSpilledImpDefs - Remove IMPLICIT_DEF instructions which are being
1753/// spilled and create empty intervals for their uses.
1754void
1755LiveIntervals::handleSpilledImpDefs(const LiveInterval &li, VirtRegMap &vrm,
1756 const TargetRegisterClass* rc,
1757 std::vector<LiveInterval*> &NewLIs) {
Evan Cheng419852c2008-04-03 16:39:43 +00001758 for (MachineRegisterInfo::reg_iterator ri = mri_->reg_begin(li.reg),
1759 re = mri_->reg_end(); ri != re; ) {
Evan Cheng4cce6b42008-04-11 17:53:36 +00001760 MachineOperand &O = ri.getOperand();
Evan Cheng419852c2008-04-03 16:39:43 +00001761 MachineInstr *MI = &*ri;
1762 ++ri;
Evan Cheng4cce6b42008-04-11 17:53:36 +00001763 if (O.isDef()) {
1764 assert(MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF &&
1765 "Register def was not rewritten?");
1766 RemoveMachineInstrFromMaps(MI);
1767 vrm.RemoveMachineInstrFromMaps(MI);
1768 MI->eraseFromParent();
1769 } else {
1770 // This must be an use of an implicit_def so it's not part of the live
1771 // interval. Create a new empty live interval for it.
1772 // FIXME: Can we simply erase some of the instructions? e.g. Stores?
1773 unsigned NewVReg = mri_->createVirtualRegister(rc);
1774 vrm.grow();
1775 vrm.setIsImplicitlyDefined(NewVReg);
1776 NewLIs.push_back(&getOrCreateInterval(NewVReg));
1777 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1778 MachineOperand &MO = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001779 if (MO.isReg() && MO.getReg() == li.reg)
Evan Cheng4cce6b42008-04-11 17:53:36 +00001780 MO.setReg(NewVReg);
1781 }
1782 }
Evan Cheng419852c2008-04-03 16:39:43 +00001783 }
1784}
1785
Evan Chengf2fbca62007-11-12 06:35:08 +00001786std::vector<LiveInterval*> LiveIntervals::
Owen Andersond6664312008-08-18 18:05:32 +00001787addIntervalsForSpillsFast(const LiveInterval &li,
1788 const MachineLoopInfo *loopInfo,
Evan Chengc781a242009-05-03 18:32:42 +00001789 VirtRegMap &vrm) {
Owen Anderson17197312008-08-18 23:41:04 +00001790 unsigned slot = vrm.assignVirt2StackSlot(li.reg);
Owen Andersond6664312008-08-18 18:05:32 +00001791
1792 std::vector<LiveInterval*> added;
1793
1794 assert(li.weight != HUGE_VALF &&
1795 "attempt to spill already spilled interval!");
1796
1797 DOUT << "\t\t\t\tadding intervals for spills for interval: ";
1798 DEBUG(li.dump());
1799 DOUT << '\n';
1800
1801 const TargetRegisterClass* rc = mri_->getRegClass(li.reg);
1802
Owen Andersona41e47a2008-08-19 22:12:11 +00001803 MachineRegisterInfo::reg_iterator RI = mri_->reg_begin(li.reg);
1804 while (RI != mri_->reg_end()) {
1805 MachineInstr* MI = &*RI;
1806
1807 SmallVector<unsigned, 2> Indices;
1808 bool HasUse = false;
1809 bool HasDef = false;
1810
1811 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
1812 MachineOperand& mop = MI->getOperand(i);
Dan Gohmand735b802008-10-03 15:45:36 +00001813 if (!mop.isReg() || mop.getReg() != li.reg) continue;
Owen Andersona41e47a2008-08-19 22:12:11 +00001814
1815 HasUse |= MI->getOperand(i).isUse();
1816 HasDef |= MI->getOperand(i).isDef();
1817
1818 Indices.push_back(i);
1819 }
1820
1821 if (!tryFoldMemoryOperand(MI, vrm, NULL, getInstructionIndex(MI),
1822 Indices, true, slot, li.reg)) {
1823 unsigned NewVReg = mri_->createVirtualRegister(rc);
Owen Anderson9a032932008-08-18 21:20:32 +00001824 vrm.grow();
Owen Anderson17197312008-08-18 23:41:04 +00001825 vrm.assignVirt2StackSlot(NewVReg, slot);
1826
Owen Andersona41e47a2008-08-19 22:12:11 +00001827 // create a new register for this spill
1828 LiveInterval &nI = getOrCreateInterval(NewVReg);
Owen Andersond6664312008-08-18 18:05:32 +00001829
Owen Andersona41e47a2008-08-19 22:12:11 +00001830 // the spill weight is now infinity as it
1831 // cannot be spilled again
1832 nI.weight = HUGE_VALF;
1833
1834 // Rewrite register operands to use the new vreg.
1835 for (SmallVectorImpl<unsigned>::iterator I = Indices.begin(),
1836 E = Indices.end(); I != E; ++I) {
1837 MI->getOperand(*I).setReg(NewVReg);
1838
1839 if (MI->getOperand(*I).isUse())
1840 MI->getOperand(*I).setIsKill(true);
1841 }
1842
1843 // Fill in the new live interval.
1844 unsigned index = getInstructionIndex(MI);
1845 if (HasUse) {
1846 LiveRange LR(getLoadIndex(index), getUseIndex(index),
Lang Hames857c4e02009-06-17 21:01:20 +00001847 nI.getNextValue(0, 0, false, getVNInfoAllocator()));
Owen Andersona41e47a2008-08-19 22:12:11 +00001848 DOUT << " +" << LR;
1849 nI.addRange(LR);
1850 vrm.addRestorePoint(NewVReg, MI);
1851 }
1852 if (HasDef) {
1853 LiveRange LR(getDefIndex(index), getStoreIndex(index),
Lang Hames857c4e02009-06-17 21:01:20 +00001854 nI.getNextValue(0, 0, false, getVNInfoAllocator()));
Owen Andersona41e47a2008-08-19 22:12:11 +00001855 DOUT << " +" << LR;
1856 nI.addRange(LR);
1857 vrm.addSpillPoint(NewVReg, true, MI);
1858 }
1859
Owen Anderson17197312008-08-18 23:41:04 +00001860 added.push_back(&nI);
Owen Anderson8dc2cbe2008-08-18 18:38:12 +00001861
Owen Andersona41e47a2008-08-19 22:12:11 +00001862 DOUT << "\t\t\t\tadded new interval: ";
1863 DEBUG(nI.dump());
1864 DOUT << '\n';
Owen Andersona41e47a2008-08-19 22:12:11 +00001865 }
Owen Anderson9a032932008-08-18 21:20:32 +00001866
Owen Anderson9a032932008-08-18 21:20:32 +00001867
Owen Andersona41e47a2008-08-19 22:12:11 +00001868 RI = mri_->reg_begin(li.reg);
Owen Andersond6664312008-08-18 18:05:32 +00001869 }
Owen Andersond6664312008-08-18 18:05:32 +00001870
1871 return added;
1872}
1873
1874std::vector<LiveInterval*> LiveIntervals::
Evan Cheng81a03822007-11-17 00:40:40 +00001875addIntervalsForSpills(const LiveInterval &li,
Evan Chengdc377862008-09-30 15:44:16 +00001876 SmallVectorImpl<LiveInterval*> &SpillIs,
Evan Chengc781a242009-05-03 18:32:42 +00001877 const MachineLoopInfo *loopInfo, VirtRegMap &vrm) {
Owen Andersonae339ba2008-08-19 00:17:30 +00001878
1879 if (EnableFastSpilling)
Evan Chengc781a242009-05-03 18:32:42 +00001880 return addIntervalsForSpillsFast(li, loopInfo, vrm);
Owen Andersonae339ba2008-08-19 00:17:30 +00001881
Evan Chengf2fbca62007-11-12 06:35:08 +00001882 assert(li.weight != HUGE_VALF &&
1883 "attempt to spill already spilled interval!");
1884
1885 DOUT << "\t\t\t\tadding intervals for spills for interval: ";
Dan Gohman6f0d0242008-02-10 18:45:23 +00001886 li.print(DOUT, tri_);
Evan Chengf2fbca62007-11-12 06:35:08 +00001887 DOUT << '\n';
1888
Evan Cheng72eeb942008-12-05 17:00:16 +00001889 // Each bit specify whether a spill is required in the MBB.
Evan Cheng81a03822007-11-17 00:40:40 +00001890 BitVector SpillMBBs(mf_->getNumBlockIDs());
Owen Anderson28998312008-08-13 22:28:50 +00001891 DenseMap<unsigned, std::vector<SRInfo> > SpillIdxes;
Evan Cheng0cbb1162007-11-29 01:06:25 +00001892 BitVector RestoreMBBs(mf_->getNumBlockIDs());
Owen Anderson28998312008-08-13 22:28:50 +00001893 DenseMap<unsigned, std::vector<SRInfo> > RestoreIdxes;
1894 DenseMap<unsigned,unsigned> MBBVRegsMap;
Evan Chengf2fbca62007-11-12 06:35:08 +00001895 std::vector<LiveInterval*> NewLIs;
Evan Chengd70dbb52008-02-22 09:24:50 +00001896 const TargetRegisterClass* rc = mri_->getRegClass(li.reg);
Evan Chengf2fbca62007-11-12 06:35:08 +00001897
1898 unsigned NumValNums = li.getNumValNums();
1899 SmallVector<MachineInstr*, 4> ReMatDefs;
1900 ReMatDefs.resize(NumValNums, NULL);
1901 SmallVector<MachineInstr*, 4> ReMatOrigDefs;
1902 ReMatOrigDefs.resize(NumValNums, NULL);
1903 SmallVector<int, 4> ReMatIds;
1904 ReMatIds.resize(NumValNums, VirtRegMap::MAX_STACK_SLOT);
1905 BitVector ReMatDelete(NumValNums);
1906 unsigned Slot = VirtRegMap::MAX_STACK_SLOT;
1907
Evan Cheng81a03822007-11-17 00:40:40 +00001908 // Spilling a split live interval. It cannot be split any further. Also,
1909 // it's also guaranteed to be a single val# / range interval.
1910 if (vrm.getPreSplitReg(li.reg)) {
1911 vrm.setIsSplitFromReg(li.reg, 0);
Evan Chengd120ffd2007-12-05 10:24:35 +00001912 // Unset the split kill marker on the last use.
1913 unsigned KillIdx = vrm.getKillPoint(li.reg);
1914 if (KillIdx) {
1915 MachineInstr *KillMI = getInstructionFromIndex(KillIdx);
1916 assert(KillMI && "Last use disappeared?");
1917 int KillOp = KillMI->findRegisterUseOperandIdx(li.reg, true);
1918 assert(KillOp != -1 && "Last use disappeared?");
Chris Lattnerf7382302007-12-30 21:56:09 +00001919 KillMI->getOperand(KillOp).setIsKill(false);
Evan Chengd120ffd2007-12-05 10:24:35 +00001920 }
Evan Chengadf85902007-12-05 09:51:10 +00001921 vrm.removeKillPoint(li.reg);
Evan Cheng81a03822007-11-17 00:40:40 +00001922 bool DefIsReMat = vrm.isReMaterialized(li.reg);
1923 Slot = vrm.getStackSlot(li.reg);
1924 assert(Slot != VirtRegMap::MAX_STACK_SLOT);
1925 MachineInstr *ReMatDefMI = DefIsReMat ?
1926 vrm.getReMaterializedMI(li.reg) : NULL;
1927 int LdSlot = 0;
1928 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
1929 bool isLoad = isLoadSS ||
Dan Gohman15511cf2008-12-03 18:15:48 +00001930 (DefIsReMat && (ReMatDefMI->getDesc().canFoldAsLoad()));
Evan Cheng81a03822007-11-17 00:40:40 +00001931 bool IsFirstRange = true;
1932 for (LiveInterval::Ranges::const_iterator
1933 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1934 // If this is a split live interval with multiple ranges, it means there
1935 // are two-address instructions that re-defined the value. Only the
1936 // first def can be rematerialized!
1937 if (IsFirstRange) {
Evan Chengcb3c3302007-11-29 23:02:50 +00001938 // Note ReMatOrigDefMI has already been deleted.
Evan Cheng81a03822007-11-17 00:40:40 +00001939 rewriteInstructionsForSpills(li, false, I, NULL, ReMatDefMI,
1940 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chengd70dbb52008-02-22 09:24:50 +00001941 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001942 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Chengc781a242009-05-03 18:32:42 +00001943 MBBVRegsMap, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001944 } else {
1945 rewriteInstructionsForSpills(li, false, I, NULL, 0,
1946 Slot, 0, false, false, false,
Evan Chengd70dbb52008-02-22 09:24:50 +00001947 false, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00001948 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Chengc781a242009-05-03 18:32:42 +00001949 MBBVRegsMap, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001950 }
1951 IsFirstRange = false;
1952 }
Evan Cheng419852c2008-04-03 16:39:43 +00001953
Evan Cheng4cce6b42008-04-11 17:53:36 +00001954 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng81a03822007-11-17 00:40:40 +00001955 return NewLIs;
1956 }
1957
1958 bool TrySplit = SplitAtBB && !intervalIsInOneMBB(li);
Evan Cheng0cbb1162007-11-29 01:06:25 +00001959 if (SplitLimit != -1 && (int)numSplits >= SplitLimit)
1960 TrySplit = false;
1961 if (TrySplit)
1962 ++numSplits;
Evan Chengf2fbca62007-11-12 06:35:08 +00001963 bool NeedStackSlot = false;
1964 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
1965 i != e; ++i) {
1966 const VNInfo *VNI = *i;
1967 unsigned VN = VNI->id;
Lang Hames857c4e02009-06-17 21:01:20 +00001968 if (VNI->isUnused())
Evan Chengf2fbca62007-11-12 06:35:08 +00001969 continue; // Dead val#.
1970 // Is the def for the val# rematerializable?
Lang Hames857c4e02009-06-17 21:01:20 +00001971 MachineInstr *ReMatDefMI = VNI->isDefAccurate()
1972 ? getInstructionFromIndex(VNI->def) : 0;
Evan Cheng5ef3a042007-12-06 00:01:56 +00001973 bool dummy;
Evan Chengdc377862008-09-30 15:44:16 +00001974 if (ReMatDefMI && isReMaterializable(li, VNI, ReMatDefMI, SpillIs, dummy)) {
Evan Chengf2fbca62007-11-12 06:35:08 +00001975 // Remember how to remat the def of this val#.
Evan Cheng81a03822007-11-17 00:40:40 +00001976 ReMatOrigDefs[VN] = ReMatDefMI;
Dan Gohman2c3f7ae2008-07-17 23:49:46 +00001977 // Original def may be modified so we have to make a copy here.
Evan Cheng1ed99222008-07-19 00:37:25 +00001978 MachineInstr *Clone = mf_->CloneMachineInstr(ReMatDefMI);
1979 ClonedMIs.push_back(Clone);
1980 ReMatDefs[VN] = Clone;
Evan Chengf2fbca62007-11-12 06:35:08 +00001981
1982 bool CanDelete = true;
Lang Hames857c4e02009-06-17 21:01:20 +00001983 if (VNI->hasPHIKill()) {
Evan Chengc3fc7d92007-11-29 09:49:23 +00001984 // A kill is a phi node, not all of its uses can be rematerialized.
Evan Chengf2fbca62007-11-12 06:35:08 +00001985 // It must not be deleted.
Evan Chengc3fc7d92007-11-29 09:49:23 +00001986 CanDelete = false;
1987 // Need a stack slot if there is any live range where uses cannot be
1988 // rematerialized.
1989 NeedStackSlot = true;
Evan Chengf2fbca62007-11-12 06:35:08 +00001990 }
Evan Chengf2fbca62007-11-12 06:35:08 +00001991 if (CanDelete)
1992 ReMatDelete.set(VN);
1993 } else {
1994 // Need a stack slot if there is any live range where uses cannot be
1995 // rematerialized.
1996 NeedStackSlot = true;
1997 }
1998 }
1999
2000 // One stack slot per live interval.
Owen Andersonb98bbb72009-03-26 18:53:38 +00002001 if (NeedStackSlot && vrm.getPreSplitReg(li.reg) == 0) {
2002 if (vrm.getStackSlot(li.reg) == VirtRegMap::NO_STACK_SLOT)
2003 Slot = vrm.assignVirt2StackSlot(li.reg);
2004
2005 // This case only occurs when the prealloc splitter has already assigned
2006 // a stack slot to this vreg.
2007 else
2008 Slot = vrm.getStackSlot(li.reg);
2009 }
Evan Chengf2fbca62007-11-12 06:35:08 +00002010
2011 // Create new intervals and rewrite defs and uses.
2012 for (LiveInterval::Ranges::const_iterator
2013 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
Evan Cheng81a03822007-11-17 00:40:40 +00002014 MachineInstr *ReMatDefMI = ReMatDefs[I->valno->id];
2015 MachineInstr *ReMatOrigDefMI = ReMatOrigDefs[I->valno->id];
2016 bool DefIsReMat = ReMatDefMI != NULL;
Evan Chengf2fbca62007-11-12 06:35:08 +00002017 bool CanDelete = ReMatDelete[I->valno->id];
2018 int LdSlot = 0;
Evan Cheng81a03822007-11-17 00:40:40 +00002019 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
Evan Chengf2fbca62007-11-12 06:35:08 +00002020 bool isLoad = isLoadSS ||
Dan Gohman15511cf2008-12-03 18:15:48 +00002021 (DefIsReMat && ReMatDefMI->getDesc().canFoldAsLoad());
Evan Cheng81a03822007-11-17 00:40:40 +00002022 rewriteInstructionsForSpills(li, TrySplit, I, ReMatOrigDefMI, ReMatDefMI,
Evan Cheng0cbb1162007-11-29 01:06:25 +00002023 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
Evan Chengd70dbb52008-02-22 09:24:50 +00002024 CanDelete, vrm, rc, ReMatIds, loopInfo,
Evan Cheng0cbb1162007-11-29 01:06:25 +00002025 SpillMBBs, SpillIdxes, RestoreMBBs, RestoreIdxes,
Evan Chengc781a242009-05-03 18:32:42 +00002026 MBBVRegsMap, NewLIs);
Evan Chengf2fbca62007-11-12 06:35:08 +00002027 }
2028
Evan Cheng0cbb1162007-11-29 01:06:25 +00002029 // Insert spills / restores if we are splitting.
Evan Cheng419852c2008-04-03 16:39:43 +00002030 if (!TrySplit) {
Evan Cheng4cce6b42008-04-11 17:53:36 +00002031 handleSpilledImpDefs(li, vrm, rc, NewLIs);
Evan Cheng1953d0c2007-11-29 10:12:14 +00002032 return NewLIs;
Evan Cheng419852c2008-04-03 16:39:43 +00002033 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002034
Evan Chengb50bb8c2007-12-05 08:16:32 +00002035 SmallPtrSet<LiveInterval*, 4> AddedKill;
Evan Chengaee4af62007-12-02 08:30:39 +00002036 SmallVector<unsigned, 2> Ops;
Evan Cheng1953d0c2007-11-29 10:12:14 +00002037 if (NeedStackSlot) {
2038 int Id = SpillMBBs.find_first();
2039 while (Id != -1) {
2040 std::vector<SRInfo> &spills = SpillIdxes[Id];
2041 for (unsigned i = 0, e = spills.size(); i != e; ++i) {
2042 int index = spills[i].index;
2043 unsigned VReg = spills[i].vreg;
Evan Cheng597d10d2007-12-04 00:32:23 +00002044 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng0cbb1162007-11-29 01:06:25 +00002045 bool isReMat = vrm.isReMaterialized(VReg);
2046 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengaee4af62007-12-02 08:30:39 +00002047 bool CanFold = false;
2048 bool FoundUse = false;
2049 Ops.clear();
Evan Chengcddbb832007-11-30 21:23:43 +00002050 if (spills[i].canFold) {
Evan Chengaee4af62007-12-02 08:30:39 +00002051 CanFold = true;
Evan Cheng0cbb1162007-11-29 01:06:25 +00002052 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
2053 MachineOperand &MO = MI->getOperand(j);
Dan Gohmand735b802008-10-03 15:45:36 +00002054 if (!MO.isReg() || MO.getReg() != VReg)
Evan Cheng0cbb1162007-11-29 01:06:25 +00002055 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00002056
2057 Ops.push_back(j);
2058 if (MO.isDef())
Evan Chengcddbb832007-11-30 21:23:43 +00002059 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00002060 if (isReMat ||
2061 (!FoundUse && !alsoFoldARestore(Id, index, VReg,
2062 RestoreMBBs, RestoreIdxes))) {
2063 // MI has two-address uses of the same register. If the use
2064 // isn't the first and only use in the BB, then we can't fold
2065 // it. FIXME: Move this to rewriteInstructionsForSpills.
2066 CanFold = false;
Evan Chengcddbb832007-11-30 21:23:43 +00002067 break;
2068 }
Evan Chengaee4af62007-12-02 08:30:39 +00002069 FoundUse = true;
Evan Cheng0cbb1162007-11-29 01:06:25 +00002070 }
2071 }
2072 // Fold the store into the def if possible.
Evan Chengcddbb832007-11-30 21:23:43 +00002073 bool Folded = false;
Evan Chengaee4af62007-12-02 08:30:39 +00002074 if (CanFold && !Ops.empty()) {
2075 if (tryFoldMemoryOperand(MI, vrm, NULL, index, Ops, true, Slot,VReg)){
Evan Chengcddbb832007-11-30 21:23:43 +00002076 Folded = true;
Sebastian Redl48fe6352009-03-19 23:26:52 +00002077 if (FoundUse) {
Evan Chengaee4af62007-12-02 08:30:39 +00002078 // Also folded uses, do not issue a load.
2079 eraseRestoreInfo(Id, index, VReg, RestoreMBBs, RestoreIdxes);
Evan Chengf38d14f2007-12-05 09:05:34 +00002080 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
2081 }
Evan Cheng597d10d2007-12-04 00:32:23 +00002082 nI.removeRange(getDefIndex(index), getStoreIndex(index));
Evan Chengcddbb832007-11-30 21:23:43 +00002083 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002084 }
2085
Evan Cheng7e073ba2008-04-09 20:57:25 +00002086 // Otherwise tell the spiller to issue a spill.
Evan Chengb50bb8c2007-12-05 08:16:32 +00002087 if (!Folded) {
2088 LiveRange *LR = &nI.ranges[nI.ranges.size()-1];
2089 bool isKill = LR->end == getStoreIndex(index);
Evan Chengb0a6f622008-05-20 08:10:37 +00002090 if (!MI->registerDefIsDead(nI.reg))
2091 // No need to spill a dead def.
2092 vrm.addSpillPoint(VReg, isKill, MI);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002093 if (isKill)
2094 AddedKill.insert(&nI);
2095 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002096 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002097 Id = SpillMBBs.find_next(Id);
Evan Cheng0cbb1162007-11-29 01:06:25 +00002098 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002099 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002100
Evan Cheng1953d0c2007-11-29 10:12:14 +00002101 int Id = RestoreMBBs.find_first();
2102 while (Id != -1) {
2103 std::vector<SRInfo> &restores = RestoreIdxes[Id];
2104 for (unsigned i = 0, e = restores.size(); i != e; ++i) {
2105 int index = restores[i].index;
2106 if (index == -1)
2107 continue;
2108 unsigned VReg = restores[i].vreg;
Evan Cheng597d10d2007-12-04 00:32:23 +00002109 LiveInterval &nI = getOrCreateInterval(VReg);
Evan Cheng9c3c2212008-06-06 07:54:39 +00002110 bool isReMat = vrm.isReMaterialized(VReg);
Evan Cheng81a03822007-11-17 00:40:40 +00002111 MachineInstr *MI = getInstructionFromIndex(index);
Evan Chengaee4af62007-12-02 08:30:39 +00002112 bool CanFold = false;
2113 Ops.clear();
Evan Chengcddbb832007-11-30 21:23:43 +00002114 if (restores[i].canFold) {
Evan Chengaee4af62007-12-02 08:30:39 +00002115 CanFold = true;
Evan Cheng81a03822007-11-17 00:40:40 +00002116 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
2117 MachineOperand &MO = MI->getOperand(j);
Dan Gohmand735b802008-10-03 15:45:36 +00002118 if (!MO.isReg() || MO.getReg() != VReg)
Evan Cheng81a03822007-11-17 00:40:40 +00002119 continue;
Evan Chengaee4af62007-12-02 08:30:39 +00002120
Evan Cheng0cbb1162007-11-29 01:06:25 +00002121 if (MO.isDef()) {
Evan Chengaee4af62007-12-02 08:30:39 +00002122 // If this restore were to be folded, it would have been folded
2123 // already.
2124 CanFold = false;
Evan Cheng81a03822007-11-17 00:40:40 +00002125 break;
2126 }
Evan Chengaee4af62007-12-02 08:30:39 +00002127 Ops.push_back(j);
Evan Cheng81a03822007-11-17 00:40:40 +00002128 }
2129 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002130
2131 // Fold the load into the use if possible.
Evan Chengcddbb832007-11-30 21:23:43 +00002132 bool Folded = false;
Evan Chengaee4af62007-12-02 08:30:39 +00002133 if (CanFold && !Ops.empty()) {
Evan Cheng9c3c2212008-06-06 07:54:39 +00002134 if (!isReMat)
Evan Chengaee4af62007-12-02 08:30:39 +00002135 Folded = tryFoldMemoryOperand(MI, vrm, NULL,index,Ops,true,Slot,VReg);
2136 else {
Evan Cheng0cbb1162007-11-29 01:06:25 +00002137 MachineInstr *ReMatDefMI = vrm.getReMaterializedMI(VReg);
2138 int LdSlot = 0;
2139 bool isLoadSS = tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
2140 // If the rematerializable def is a load, also try to fold it.
Dan Gohman15511cf2008-12-03 18:15:48 +00002141 if (isLoadSS || ReMatDefMI->getDesc().canFoldAsLoad())
Evan Chengaee4af62007-12-02 08:30:39 +00002142 Folded = tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index,
2143 Ops, isLoadSS, LdSlot, VReg);
Evan Cheng650d7f32008-12-05 17:41:31 +00002144 if (!Folded) {
2145 unsigned ImpUse = getReMatImplicitUse(li, ReMatDefMI);
2146 if (ImpUse) {
2147 // Re-matting an instruction with virtual register use. Add the
2148 // register as an implicit use on the use MI and update the register
2149 // interval's spill weight to HUGE_VALF to prevent it from being
2150 // spilled.
2151 LiveInterval &ImpLi = getInterval(ImpUse);
2152 ImpLi.weight = HUGE_VALF;
2153 MI->addOperand(MachineOperand::CreateReg(ImpUse, false, true));
2154 }
Evan Chengd70dbb52008-02-22 09:24:50 +00002155 }
Evan Chengaee4af62007-12-02 08:30:39 +00002156 }
Evan Cheng0cbb1162007-11-29 01:06:25 +00002157 }
2158 // If folding is not possible / failed, then tell the spiller to issue a
2159 // load / rematerialization for us.
Evan Cheng597d10d2007-12-04 00:32:23 +00002160 if (Folded)
2161 nI.removeRange(getLoadIndex(index), getUseIndex(index)+1);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002162 else
Evan Cheng0cbb1162007-11-29 01:06:25 +00002163 vrm.addRestorePoint(VReg, MI);
Evan Cheng81a03822007-11-17 00:40:40 +00002164 }
Evan Cheng1953d0c2007-11-29 10:12:14 +00002165 Id = RestoreMBBs.find_next(Id);
Evan Cheng81a03822007-11-17 00:40:40 +00002166 }
2167
Evan Chengb50bb8c2007-12-05 08:16:32 +00002168 // Finalize intervals: add kills, finalize spill weights, and filter out
2169 // dead intervals.
Evan Cheng597d10d2007-12-04 00:32:23 +00002170 std::vector<LiveInterval*> RetNewLIs;
2171 for (unsigned i = 0, e = NewLIs.size(); i != e; ++i) {
2172 LiveInterval *LI = NewLIs[i];
2173 if (!LI->empty()) {
Owen Anderson496bac52008-07-23 19:47:27 +00002174 LI->weight /= InstrSlots::NUM * getApproximateInstructionCount(*LI);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002175 if (!AddedKill.count(LI)) {
2176 LiveRange *LR = &LI->ranges[LI->ranges.size()-1];
Evan Chengd120ffd2007-12-05 10:24:35 +00002177 unsigned LastUseIdx = getBaseIndex(LR->end);
2178 MachineInstr *LastUse = getInstructionFromIndex(LastUseIdx);
Evan Cheng6130f662008-03-05 00:59:57 +00002179 int UseIdx = LastUse->findRegisterUseOperandIdx(LI->reg, false);
Evan Chengb50bb8c2007-12-05 08:16:32 +00002180 assert(UseIdx != -1);
Evan Chenga24752f2009-03-19 20:30:06 +00002181 if (!LastUse->isRegTiedToDefOperand(UseIdx)) {
Evan Chengb50bb8c2007-12-05 08:16:32 +00002182 LastUse->getOperand(UseIdx).setIsKill();
Evan Chengd120ffd2007-12-05 10:24:35 +00002183 vrm.addKillPoint(LI->reg, LastUseIdx);
Evan Chengadf85902007-12-05 09:51:10 +00002184 }
Evan Chengb50bb8c2007-12-05 08:16:32 +00002185 }
Evan Cheng597d10d2007-12-04 00:32:23 +00002186 RetNewLIs.push_back(LI);
2187 }
2188 }
Evan Cheng81a03822007-11-17 00:40:40 +00002189
Evan Cheng4cce6b42008-04-11 17:53:36 +00002190 handleSpilledImpDefs(li, vrm, rc, RetNewLIs);
Evan Cheng597d10d2007-12-04 00:32:23 +00002191 return RetNewLIs;
Evan Chengf2fbca62007-11-12 06:35:08 +00002192}
Evan Cheng676dd7c2008-03-11 07:19:34 +00002193
2194/// hasAllocatableSuperReg - Return true if the specified physical register has
2195/// any super register that's allocatable.
2196bool LiveIntervals::hasAllocatableSuperReg(unsigned Reg) const {
2197 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS)
2198 if (allocatableRegs_[*AS] && hasInterval(*AS))
2199 return true;
2200 return false;
2201}
2202
2203/// getRepresentativeReg - Find the largest super register of the specified
2204/// physical register.
2205unsigned LiveIntervals::getRepresentativeReg(unsigned Reg) const {
2206 // Find the largest super-register that is allocatable.
2207 unsigned BestReg = Reg;
2208 for (const unsigned* AS = tri_->getSuperRegisters(Reg); *AS; ++AS) {
2209 unsigned SuperReg = *AS;
2210 if (!hasAllocatableSuperReg(SuperReg) && hasInterval(SuperReg)) {
2211 BestReg = SuperReg;
2212 break;
2213 }
2214 }
2215 return BestReg;
2216}
2217
2218/// getNumConflictsWithPhysReg - Return the number of uses and defs of the
2219/// specified interval that conflicts with the specified physical register.
2220unsigned LiveIntervals::getNumConflictsWithPhysReg(const LiveInterval &li,
2221 unsigned PhysReg) const {
2222 unsigned NumConflicts = 0;
2223 const LiveInterval &pli = getInterval(getRepresentativeReg(PhysReg));
2224 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
2225 E = mri_->reg_end(); I != E; ++I) {
2226 MachineOperand &O = I.getOperand();
2227 MachineInstr *MI = O.getParent();
2228 unsigned Index = getInstructionIndex(MI);
2229 if (pli.liveAt(Index))
2230 ++NumConflicts;
2231 }
2232 return NumConflicts;
2233}
2234
2235/// spillPhysRegAroundRegDefsUses - Spill the specified physical register
Evan Cheng2824a652009-03-23 18:24:37 +00002236/// around all defs and uses of the specified interval. Return true if it
2237/// was able to cut its interval.
2238bool LiveIntervals::spillPhysRegAroundRegDefsUses(const LiveInterval &li,
Evan Cheng676dd7c2008-03-11 07:19:34 +00002239 unsigned PhysReg, VirtRegMap &vrm) {
2240 unsigned SpillReg = getRepresentativeReg(PhysReg);
2241
2242 for (const unsigned *AS = tri_->getAliasSet(PhysReg); *AS; ++AS)
2243 // If there are registers which alias PhysReg, but which are not a
2244 // sub-register of the chosen representative super register. Assert
2245 // since we can't handle it yet.
Dan Gohman70f2f652009-04-13 15:22:29 +00002246 assert(*AS == SpillReg || !allocatableRegs_[*AS] || !hasInterval(*AS) ||
Evan Cheng676dd7c2008-03-11 07:19:34 +00002247 tri_->isSuperRegister(*AS, SpillReg));
2248
Evan Cheng2824a652009-03-23 18:24:37 +00002249 bool Cut = false;
Evan Cheng676dd7c2008-03-11 07:19:34 +00002250 LiveInterval &pli = getInterval(SpillReg);
2251 SmallPtrSet<MachineInstr*, 8> SeenMIs;
2252 for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li.reg),
2253 E = mri_->reg_end(); I != E; ++I) {
2254 MachineOperand &O = I.getOperand();
2255 MachineInstr *MI = O.getParent();
2256 if (SeenMIs.count(MI))
2257 continue;
2258 SeenMIs.insert(MI);
2259 unsigned Index = getInstructionIndex(MI);
2260 if (pli.liveAt(Index)) {
2261 vrm.addEmergencySpill(SpillReg, MI);
Evan Cheng5a3c6a82009-01-29 02:20:59 +00002262 unsigned StartIdx = getLoadIndex(Index);
2263 unsigned EndIdx = getStoreIndex(Index)+1;
Evan Cheng2824a652009-03-23 18:24:37 +00002264 if (pli.isInOneLiveRange(StartIdx, EndIdx)) {
Evan Cheng5a3c6a82009-01-29 02:20:59 +00002265 pli.removeRange(StartIdx, EndIdx);
Evan Cheng2824a652009-03-23 18:24:37 +00002266 Cut = true;
2267 } else {
Evan Cheng5a3c6a82009-01-29 02:20:59 +00002268 cerr << "Ran out of registers during register allocation!\n";
2269 if (MI->getOpcode() == TargetInstrInfo::INLINEASM) {
2270 cerr << "Please check your inline asm statement for invalid "
2271 << "constraints:\n";
2272 MI->print(cerr.stream(), tm_);
2273 }
2274 exit(1);
2275 }
Evan Cheng676dd7c2008-03-11 07:19:34 +00002276 for (const unsigned* AS = tri_->getSubRegisters(SpillReg); *AS; ++AS) {
2277 if (!hasInterval(*AS))
2278 continue;
2279 LiveInterval &spli = getInterval(*AS);
2280 if (spli.liveAt(Index))
2281 spli.removeRange(getLoadIndex(Index), getStoreIndex(Index)+1);
2282 }
2283 }
2284 }
Evan Cheng2824a652009-03-23 18:24:37 +00002285 return Cut;
Evan Cheng676dd7c2008-03-11 07:19:34 +00002286}
Owen Andersonc4dc1322008-06-05 17:15:43 +00002287
2288LiveRange LiveIntervals::addLiveRangeToEndOfBlock(unsigned reg,
2289 MachineInstr* startInst) {
2290 LiveInterval& Interval = getOrCreateInterval(reg);
2291 VNInfo* VN = Interval.getNextValue(
2292 getInstructionIndex(startInst) + InstrSlots::DEF,
Lang Hames857c4e02009-06-17 21:01:20 +00002293 startInst, true, getVNInfoAllocator());
2294 VN->setHasPHIKill(true);
Owen Andersonc4dc1322008-06-05 17:15:43 +00002295 VN->kills.push_back(getMBBEndIdx(startInst->getParent()));
2296 LiveRange LR(getInstructionIndex(startInst) + InstrSlots::DEF,
2297 getMBBEndIdx(startInst->getParent()) + 1, VN);
2298 Interval.addRange(LR);
2299
2300 return LR;
2301}