blob: ab56ec32721dd13d3251f6a47aee1ee4ff86a029 [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000017#include "LiveIntervalUnion.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000018#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000019#include "RegAllocBase.h"
20#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000021#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000022#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000023#include "VirtRegMap.h"
24#include "VirtRegRewriter.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000025#include "llvm/Analysis/AliasAnalysis.h"
26#include "llvm/Function.h"
27#include "llvm/PassAnalysisSupport.h"
28#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000029#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000030#include "llvm/CodeGen/LiveIntervalAnalysis.h"
31#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000032#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000033#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000035#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000036#include "llvm/CodeGen/MachineRegisterInfo.h"
37#include "llvm/CodeGen/Passes.h"
38#include "llvm/CodeGen/RegAllocRegistry.h"
39#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000040#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Support/Debug.h"
42#include "llvm/Support/ErrorHandling.h"
43#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000044#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000045
46using namespace llvm;
47
48static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
49 createGreedyRegisterAllocator);
50
51namespace {
52class RAGreedy : public MachineFunctionPass, public RegAllocBase {
53 // context
54 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055 BitVector ReservedRegs;
56
57 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000058 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000059 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000060 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000061 MachineLoopInfo *Loops;
62 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000063 EdgeBundles *Bundles;
64 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000065
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000066 // state
67 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000068 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000069
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000070 // splitting state.
71
72 /// All basic blocks where the current register is live.
73 SmallVector<SpillPlacement::BlockConstraint, 8> SpillConstraints;
74
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +000075 /// For every instruction in SA->UseSlots, store the previous non-copy
76 /// instruction.
77 SmallVector<SlotIndex, 8> PrevSlot;
78
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000079public:
80 RAGreedy();
81
82 /// Return the pass name.
83 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000084 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000085 }
86
87 /// RAGreedy analysis usage.
88 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
89
90 virtual void releaseMemory();
91
92 virtual Spiller &spiller() { return *SpillerInstance; }
93
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +000094 virtual float getPriority(LiveInterval *LI);
Jakob Stoklund Olesend0bec3e2010-12-08 22:22:41 +000095
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +000096 virtual unsigned selectOrSplit(LiveInterval&,
97 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000098
99 /// Perform register allocation.
100 virtual bool runOnMachineFunction(MachineFunction &mf);
101
102 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000103
104private:
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000105 bool checkUncachedInterference(LiveInterval&, unsigned);
106 LiveInterval *getSingleInterference(LiveInterval&, unsigned);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000107 bool reassignVReg(LiveInterval &InterferingVReg, unsigned OldPhysReg);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000108 float calcInterferenceWeight(LiveInterval&, unsigned);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000109 float calcInterferenceInfo(LiveInterval&, unsigned);
110 float calcGlobalSplitCost(const BitVector&);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000111 void splitAroundRegion(LiveInterval&, unsigned, const BitVector&,
112 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000113 void calcGapWeights(unsigned, SmallVectorImpl<float>&);
114 SlotIndex getPrevMappedIndex(const MachineInstr*);
115 void calcPrevSlots();
116 unsigned nextSplitPoint(unsigned);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000117
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000118 unsigned tryReassignOrEvict(LiveInterval&, AllocationOrder&,
119 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000120 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
121 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000122 unsigned tryLocalSplit(LiveInterval&, AllocationOrder&,
123 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000124 unsigned trySplit(LiveInterval&, AllocationOrder&,
125 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000126 unsigned trySpillInterferences(LiveInterval&, AllocationOrder&,
127 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000128};
129} // end anonymous namespace
130
131char RAGreedy::ID = 0;
132
133FunctionPass* llvm::createGreedyRegisterAllocator() {
134 return new RAGreedy();
135}
136
137RAGreedy::RAGreedy(): MachineFunctionPass(ID) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000138 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000139 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
140 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
141 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
142 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
143 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
144 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
145 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
146 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000147 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000148 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000149 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
150 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000151}
152
153void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
154 AU.setPreservesCFG();
155 AU.addRequired<AliasAnalysis>();
156 AU.addPreserved<AliasAnalysis>();
157 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000158 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000159 AU.addPreserved<SlotIndexes>();
160 if (StrongPHIElim)
161 AU.addRequiredID(StrongPHIEliminationID);
162 AU.addRequiredTransitive<RegisterCoalescer>();
163 AU.addRequired<CalculateSpillWeights>();
164 AU.addRequired<LiveStacks>();
165 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000166 AU.addRequired<MachineDominatorTree>();
167 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000168 AU.addRequired<MachineLoopInfo>();
169 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000170 AU.addRequired<MachineLoopRanges>();
171 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000172 AU.addRequired<VirtRegMap>();
173 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000174 AU.addRequired<EdgeBundles>();
175 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000176 MachineFunctionPass::getAnalysisUsage(AU);
177}
178
179void RAGreedy::releaseMemory() {
180 SpillerInstance.reset(0);
181 RegAllocBase::releaseMemory();
182}
183
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000184float RAGreedy::getPriority(LiveInterval *LI) {
185 float Priority = LI->weight;
186
187 // Prioritize hinted registers so they are allocated first.
188 std::pair<unsigned, unsigned> Hint;
189 if (Hint.first || Hint.second) {
190 // The hint can be target specific, a virtual register, or a physreg.
191 Priority *= 2;
192
193 // Prefer physreg hints above anything else.
194 if (Hint.first == 0 && TargetRegisterInfo::isPhysicalRegister(Hint.second))
195 Priority *= 2;
196 }
197 return Priority;
198}
199
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000200
201//===----------------------------------------------------------------------===//
202// Register Reassignment
203//===----------------------------------------------------------------------===//
204
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000205// Check interference without using the cache.
206bool RAGreedy::checkUncachedInterference(LiveInterval &VirtReg,
207 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000208 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
209 LiveIntervalUnion::Query subQ(&VirtReg, &PhysReg2LiveUnion[*AliasI]);
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000210 if (subQ.checkInterference())
211 return true;
212 }
213 return false;
214}
215
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000216/// getSingleInterference - Return the single interfering virtual register
217/// assigned to PhysReg. Return 0 if more than one virtual register is
218/// interfering.
219LiveInterval *RAGreedy::getSingleInterference(LiveInterval &VirtReg,
220 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000221 // Check physreg and aliases.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000222 LiveInterval *Interference = 0;
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000223 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000224 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
225 if (Q.checkInterference()) {
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000226 if (Interference)
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000227 return 0;
228 Q.collectInterferingVRegs(1);
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000229 if (!Q.seenAllInterferences())
230 return 0;
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000231 Interference = Q.interferingVRegs().front();
232 }
233 }
234 return Interference;
235}
236
Andrew Trickb853e6c2010-12-09 18:15:21 +0000237// Attempt to reassign this virtual register to a different physical register.
238//
239// FIXME: we are not yet caching these "second-level" interferences discovered
240// in the sub-queries. These interferences can change with each call to
241// selectOrSplit. However, we could implement a "may-interfere" cache that
242// could be conservatively dirtied when we reassign or split.
243//
244// FIXME: This may result in a lot of alias queries. We could summarize alias
245// live intervals in their parent register's live union, but it's messy.
246bool RAGreedy::reassignVReg(LiveInterval &InterferingVReg,
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000247 unsigned WantedPhysReg) {
248 assert(TargetRegisterInfo::isVirtualRegister(InterferingVReg.reg) &&
249 "Can only reassign virtual registers");
250 assert(TRI->regsOverlap(WantedPhysReg, VRM->getPhys(InterferingVReg.reg)) &&
Andrew Trickb853e6c2010-12-09 18:15:21 +0000251 "inconsistent phys reg assigment");
252
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +0000253 AllocationOrder Order(InterferingVReg.reg, *VRM, ReservedRegs);
254 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000255 // Don't reassign to a WantedPhysReg alias.
256 if (TRI->regsOverlap(PhysReg, WantedPhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000257 continue;
258
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000259 if (checkUncachedInterference(InterferingVReg, PhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000260 continue;
261
Andrew Trickb853e6c2010-12-09 18:15:21 +0000262 // Reassign the interfering virtual reg to this physical reg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000263 unsigned OldAssign = VRM->getPhys(InterferingVReg.reg);
264 DEBUG(dbgs() << "reassigning: " << InterferingVReg << " from " <<
265 TRI->getName(OldAssign) << " to " << TRI->getName(PhysReg) << '\n');
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000266 unassign(InterferingVReg, OldAssign);
267 assign(InterferingVReg, PhysReg);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000268 return true;
269 }
270 return false;
271}
272
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000273/// tryReassignOrEvict - Try to reassign a single interferences to a different
274/// physreg, or evict a single interference with a lower spill weight.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000275/// @param VirtReg Currently unassigned virtual register.
276/// @param Order Physregs to try.
277/// @return Physreg to assign VirtReg, or 0.
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000278unsigned RAGreedy::tryReassignOrEvict(LiveInterval &VirtReg,
279 AllocationOrder &Order,
280 SmallVectorImpl<LiveInterval*> &NewVRegs){
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000281 NamedRegionTimer T("Reassign", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000282
283 // Keep track of the lightest single interference seen so far.
284 float BestWeight = VirtReg.weight;
285 LiveInterval *BestVirt = 0;
286 unsigned BestPhys = 0;
287
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000288 Order.rewind();
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000289 while (unsigned PhysReg = Order.next()) {
290 LiveInterval *InterferingVReg = getSingleInterference(VirtReg, PhysReg);
291 if (!InterferingVReg)
292 continue;
293 if (TargetRegisterInfo::isPhysicalRegister(InterferingVReg->reg))
294 continue;
295 if (reassignVReg(*InterferingVReg, PhysReg))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000296 return PhysReg;
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +0000297
298 // Cannot reassign, is this an eviction candidate?
299 if (InterferingVReg->weight < BestWeight) {
300 BestVirt = InterferingVReg;
301 BestPhys = PhysReg;
302 BestWeight = InterferingVReg->weight;
303 }
304 }
305
306 // Nothing reassigned, can we evict a lighter single interference?
307 if (BestVirt) {
308 DEBUG(dbgs() << "evicting lighter " << *BestVirt << '\n');
309 unassign(*BestVirt, VRM->getPhys(BestVirt->reg));
310 NewVRegs.push_back(BestVirt);
311 return BestPhys;
312 }
313
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000314 return 0;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000315}
316
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000317
318//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000319// Region Splitting
320//===----------------------------------------------------------------------===//
321
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000322/// calcInterferenceInfo - Compute per-block outgoing and ingoing constraints
323/// when considering interference from PhysReg. Also compute an optimistic local
324/// cost of this interference pattern.
325///
326/// The final cost of a split is the local cost + global cost of preferences
327/// broken by SpillPlacement.
328///
329float RAGreedy::calcInterferenceInfo(LiveInterval &VirtReg, unsigned PhysReg) {
330 // Reset interference dependent info.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000331 SpillConstraints.resize(SA->LiveBlocks.size());
332 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
333 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000334 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000335 BC.Number = BI.MBB->getNumber();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000336 BC.Entry = (BI.Uses && BI.LiveIn) ?
337 SpillPlacement::PrefReg : SpillPlacement::DontCare;
338 BC.Exit = (BI.Uses && BI.LiveOut) ?
339 SpillPlacement::PrefReg : SpillPlacement::DontCare;
340 BI.OverlapEntry = BI.OverlapExit = false;
341 }
342
343 // Add interference info from each PhysReg alias.
344 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
345 if (!query(VirtReg, *AI).checkInterference())
346 continue;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000347 LiveIntervalUnion::SegmentIter IntI =
348 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
349 if (!IntI.valid())
350 continue;
351
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000352 // Determine which blocks have interference live in or after the last split
353 // point.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000354 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
355 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000356 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
357 SlotIndex Start, Stop;
358 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
359
360 // Skip interference-free blocks.
361 if (IntI.start() >= Stop)
362 continue;
363
364 // Is the interference live-in?
365 if (BI.LiveIn) {
366 IntI.advanceTo(Start);
367 if (!IntI.valid())
368 break;
369 if (IntI.start() <= Start)
370 BC.Entry = SpillPlacement::MustSpill;
371 }
372
373 // Is the interference overlapping the last split point?
374 if (BI.LiveOut) {
375 if (IntI.stop() < BI.LastSplitPoint)
376 IntI.advanceTo(BI.LastSplitPoint.getPrevSlot());
377 if (!IntI.valid())
378 break;
379 if (IntI.start() < Stop)
380 BC.Exit = SpillPlacement::MustSpill;
381 }
382 }
383
384 // Rewind iterator and check other interferences.
385 IntI.find(VirtReg.beginIndex());
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000386 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
387 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000388 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
389 SlotIndex Start, Stop;
390 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
391
392 // Skip interference-free blocks.
393 if (IntI.start() >= Stop)
394 continue;
395
396 // Handle transparent blocks with interference separately.
397 // Transparent blocks never incur any fixed cost.
398 if (BI.LiveThrough && !BI.Uses) {
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000399 IntI.advanceTo(Start);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000400 if (!IntI.valid())
401 break;
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000402 if (IntI.start() >= Stop)
403 continue;
404
405 if (BC.Entry != SpillPlacement::MustSpill)
406 BC.Entry = SpillPlacement::PrefSpill;
407 if (BC.Exit != SpillPlacement::MustSpill)
408 BC.Exit = SpillPlacement::PrefSpill;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000409 continue;
410 }
411
412 // Now we only have blocks with uses left.
413 // Check if the interference overlaps the uses.
414 assert(BI.Uses && "Non-transparent block without any uses");
415
416 // Check interference on entry.
417 if (BI.LiveIn && BC.Entry != SpillPlacement::MustSpill) {
418 IntI.advanceTo(Start);
419 if (!IntI.valid())
420 break;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000421 // Not live in, but before the first use.
Jakob Stoklund Olesena50c5392011-02-08 23:02:58 +0000422 if (IntI.start() < BI.FirstUse)
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000423 BC.Entry = SpillPlacement::PrefSpill;
424 }
425
426 // Does interference overlap the uses in the entry segment
427 // [FirstUse;Kill)?
428 if (BI.LiveIn && !BI.OverlapEntry) {
429 IntI.advanceTo(BI.FirstUse);
430 if (!IntI.valid())
431 break;
432 // A live-through interval has no kill.
433 // Check [FirstUse;LastUse) instead.
434 if (IntI.start() < (BI.LiveThrough ? BI.LastUse : BI.Kill))
435 BI.OverlapEntry = true;
436 }
437
438 // Does interference overlap the uses in the exit segment [Def;LastUse)?
439 if (BI.LiveOut && !BI.LiveThrough && !BI.OverlapExit) {
440 IntI.advanceTo(BI.Def);
441 if (!IntI.valid())
442 break;
443 if (IntI.start() < BI.LastUse)
444 BI.OverlapExit = true;
445 }
446
447 // Check interference on exit.
448 if (BI.LiveOut && BC.Exit != SpillPlacement::MustSpill) {
449 // Check interference between LastUse and Stop.
450 if (BC.Exit != SpillPlacement::PrefSpill) {
451 IntI.advanceTo(BI.LastUse);
452 if (!IntI.valid())
453 break;
454 if (IntI.start() < Stop)
455 BC.Exit = SpillPlacement::PrefSpill;
456 }
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000457 }
458 }
459 }
460
461 // Accumulate a local cost of this interference pattern.
462 float LocalCost = 0;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000463 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
464 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000465 if (!BI.Uses)
466 continue;
467 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
468 unsigned Inserts = 0;
469
470 // Do we need spill code for the entry segment?
471 if (BI.LiveIn)
472 Inserts += BI.OverlapEntry || BC.Entry != SpillPlacement::PrefReg;
473
474 // For the exit segment?
475 if (BI.LiveOut)
476 Inserts += BI.OverlapExit || BC.Exit != SpillPlacement::PrefReg;
477
478 // The local cost of spill code in this block is the block frequency times
479 // the number of spill instructions inserted.
480 if (Inserts)
481 LocalCost += Inserts * SpillPlacer->getBlockFrequency(BI.MBB);
482 }
483 DEBUG(dbgs() << "Local cost of " << PrintReg(PhysReg, TRI) << " = "
484 << LocalCost << '\n');
485 return LocalCost;
486}
487
488/// calcGlobalSplitCost - Return the global split cost of following the split
489/// pattern in LiveBundles. This cost should be added to the local cost of the
490/// interference pattern in SpillConstraints.
491///
492float RAGreedy::calcGlobalSplitCost(const BitVector &LiveBundles) {
493 float GlobalCost = 0;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000494 for (unsigned i = 0, e = SpillConstraints.size(); i != e; ++i) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000495 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
496 unsigned Inserts = 0;
497 // Broken entry preference?
498 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 0)] !=
499 (BC.Entry == SpillPlacement::PrefReg);
500 // Broken exit preference?
501 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 1)] !=
502 (BC.Exit == SpillPlacement::PrefReg);
503 if (Inserts)
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000504 GlobalCost +=
505 Inserts * SpillPlacer->getBlockFrequency(SA->LiveBlocks[i].MBB);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000506 }
507 DEBUG(dbgs() << "Global cost = " << GlobalCost << '\n');
508 return GlobalCost;
509}
510
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000511/// splitAroundRegion - Split VirtReg around the region determined by
512/// LiveBundles. Make an effort to avoid interference from PhysReg.
513///
514/// The 'register' interval is going to contain as many uses as possible while
515/// avoiding interference. The 'stack' interval is the complement constructed by
516/// SplitEditor. It will contain the rest.
517///
518void RAGreedy::splitAroundRegion(LiveInterval &VirtReg, unsigned PhysReg,
519 const BitVector &LiveBundles,
520 SmallVectorImpl<LiveInterval*> &NewVRegs) {
521 DEBUG({
522 dbgs() << "Splitting around region for " << PrintReg(PhysReg, TRI)
523 << " with bundles";
524 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
525 dbgs() << " EB#" << i;
526 dbgs() << ".\n";
527 });
528
529 // First compute interference ranges in the live blocks.
530 typedef std::pair<SlotIndex, SlotIndex> IndexPair;
531 SmallVector<IndexPair, 8> InterferenceRanges;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000532 InterferenceRanges.resize(SA->LiveBlocks.size());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000533 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
534 if (!query(VirtReg, *AI).checkInterference())
535 continue;
536 LiveIntervalUnion::SegmentIter IntI =
537 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
538 if (!IntI.valid())
539 continue;
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000540 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
541 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000542 IndexPair &IP = InterferenceRanges[i];
543 SlotIndex Start, Stop;
544 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
545 // Skip interference-free blocks.
546 if (IntI.start() >= Stop)
547 continue;
548
549 // First interference in block.
550 if (BI.LiveIn) {
551 IntI.advanceTo(Start);
552 if (!IntI.valid())
553 break;
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000554 if (IntI.start() >= Stop)
555 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000556 if (!IP.first.isValid() || IntI.start() < IP.first)
557 IP.first = IntI.start();
558 }
559
560 // Last interference in block.
561 if (BI.LiveOut) {
562 IntI.advanceTo(Stop);
563 if (!IntI.valid() || IntI.start() >= Stop)
564 --IntI;
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000565 if (IntI.stop() <= Start)
566 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000567 if (!IP.second.isValid() || IntI.stop() > IP.second)
568 IP.second = IntI.stop();
569 }
570 }
571 }
572
573 SmallVector<LiveInterval*, 4> SpillRegs;
574 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
575 SplitEditor SE(*SA, *LIS, *VRM, *DomTree, LREdit);
576
577 // Create the main cross-block interval.
578 SE.openIntv();
579
580 // First add all defs that are live out of a block.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000581 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
582 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000583 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
584 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
585
586 // Should the register be live out?
587 if (!BI.LiveOut || !RegOut)
588 continue;
589
590 IndexPair &IP = InterferenceRanges[i];
591 SlotIndex Start, Stop;
592 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
593
594 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000595 << Bundles->getBundle(BI.MBB->getNumber(), 1)
596 << " intf [" << IP.first << ';' << IP.second << ')');
597
598 // The interference interval should either be invalid or overlap MBB.
599 assert((!IP.first.isValid() || IP.first < Stop) && "Bad interference");
600 assert((!IP.second.isValid() || IP.second > Start) && "Bad interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000601
602 // Check interference leaving the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000603 if (!IP.second.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000604 // Block is interference-free.
605 DEBUG(dbgs() << ", no interference");
606 if (!BI.Uses) {
607 assert(BI.LiveThrough && "No uses, but not live through block?");
608 // Block is live-through without interference.
609 DEBUG(dbgs() << ", no uses"
610 << (RegIn ? ", live-through.\n" : ", stack in.\n"));
611 if (!RegIn)
612 SE.enterIntvAtEnd(*BI.MBB);
613 continue;
614 }
615 if (!BI.LiveThrough) {
616 DEBUG(dbgs() << ", not live-through.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000617 SE.useIntv(SE.enterIntvBefore(BI.Def), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000618 continue;
619 }
620 if (!RegIn) {
621 // Block is live-through, but entry bundle is on the stack.
622 // Reload just before the first use.
623 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000624 SE.useIntv(SE.enterIntvBefore(BI.FirstUse), Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000625 continue;
626 }
627 DEBUG(dbgs() << ", live-through.\n");
628 continue;
629 }
630
631 // Block has interference.
632 DEBUG(dbgs() << ", interference to " << IP.second);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000633
634 if (!BI.LiveThrough && IP.second <= BI.Def) {
635 // The interference doesn't reach the outgoing segment.
636 DEBUG(dbgs() << " doesn't affect def from " << BI.Def << '\n');
637 SE.useIntv(BI.Def, Stop);
638 continue;
639 }
640
641
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000642 if (!BI.Uses) {
643 // No uses in block, avoid interference by reloading as late as possible.
644 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000645 SlotIndex SegStart = SE.enterIntvAtEnd(*BI.MBB);
646 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000647 continue;
648 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000649
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000650 if (IP.second.getBoundaryIndex() < BI.LastUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000651 // There are interference-free uses at the end of the block.
652 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000653 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000654 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
655 IP.second.getBoundaryIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000656 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
657 SlotIndex Use = *UI;
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000658 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesen8a2bbde2011-02-08 23:26:48 +0000659 // Only attempt a split befroe the last split point.
660 if (Use.getBaseIndex() <= BI.LastSplitPoint) {
661 DEBUG(dbgs() << ", free use at " << Use << ".\n");
662 SlotIndex SegStart = SE.enterIntvBefore(Use);
663 assert(SegStart >= IP.second && "Couldn't avoid interference");
664 assert(SegStart < BI.LastSplitPoint && "Impossible split point");
665 SE.useIntv(SegStart, Stop);
666 continue;
667 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000668 }
669
670 // Interference is after the last use.
671 DEBUG(dbgs() << " after last use.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000672 SlotIndex SegStart = SE.enterIntvAtEnd(*BI.MBB);
673 assert(SegStart >= IP.second && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000674 }
675
676 // Now all defs leading to live bundles are handled, do everything else.
Jakob Stoklund Olesenf0ac26c2011-02-09 22:50:26 +0000677 for (unsigned i = 0, e = SA->LiveBlocks.size(); i != e; ++i) {
678 SplitAnalysis::BlockInfo &BI = SA->LiveBlocks[i];
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000679 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
680 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
681
682 // Is the register live-in?
683 if (!BI.LiveIn || !RegIn)
684 continue;
685
686 // We have an incoming register. Check for interference.
687 IndexPair &IP = InterferenceRanges[i];
688 SlotIndex Start, Stop;
689 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
690
691 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
692 << " -> BB#" << BI.MBB->getNumber());
693
694 // Check interference entering the block.
Jakob Stoklund Olesen2dfbb3e2011-02-03 20:29:43 +0000695 if (!IP.first.isValid()) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000696 // Block is interference-free.
697 DEBUG(dbgs() << ", no interference");
698 if (!BI.Uses) {
699 assert(BI.LiveThrough && "No uses, but not live through block?");
700 // Block is live-through without interference.
701 if (RegOut) {
702 DEBUG(dbgs() << ", no uses, live-through.\n");
703 SE.useIntv(Start, Stop);
704 } else {
705 DEBUG(dbgs() << ", no uses, stack-out.\n");
706 SE.leaveIntvAtTop(*BI.MBB);
707 }
708 continue;
709 }
710 if (!BI.LiveThrough) {
711 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +0000712 SE.useIntv(Start, SE.leaveIntvAfter(BI.Kill));
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000713 continue;
714 }
715 if (!RegOut) {
716 // Block is live-through, but exit bundle is on the stack.
717 // Spill immediately after the last use.
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000718 if (BI.LastUse < BI.LastSplitPoint) {
719 DEBUG(dbgs() << ", uses, stack-out.\n");
720 SE.useIntv(Start, SE.leaveIntvAfter(BI.LastUse));
721 continue;
722 }
723 // The last use is after the last split point, it is probably an
724 // indirect jump.
725 DEBUG(dbgs() << ", uses at " << BI.LastUse << " after split point "
726 << BI.LastSplitPoint << ", stack-out.\n");
Jakob Stoklund Olesen23cd57c2011-02-09 23:33:02 +0000727 SlotIndex SegEnd = SE.leaveIntvBefore(BI.LastSplitPoint);
728 SE.useIntv(Start, SegEnd);
Jakob Stoklund Olesen5c716bd2011-02-08 18:50:21 +0000729 // Run a double interval from the split to the last use.
730 // This makes it possible to spill the complement without affecting the
731 // indirect branch.
732 SE.overlapIntv(SegEnd, BI.LastUse);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000733 continue;
734 }
735 // Register is live-through.
736 DEBUG(dbgs() << ", uses, live-through.\n");
737 SE.useIntv(Start, Stop);
738 continue;
739 }
740
741 // Block has interference.
742 DEBUG(dbgs() << ", interference from " << IP.first);
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000743
744 if (!BI.LiveThrough && IP.first >= BI.Kill) {
745 // The interference doesn't reach the outgoing segment.
746 DEBUG(dbgs() << " doesn't affect kill at " << BI.Kill << '\n');
747 SE.useIntv(Start, BI.Kill);
748 continue;
749 }
750
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000751 if (!BI.Uses) {
752 // No uses in block, avoid interference by spilling as soon as possible.
753 DEBUG(dbgs() << ", no uses.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000754 SlotIndex SegEnd = SE.leaveIntvAtTop(*BI.MBB);
755 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000756 continue;
757 }
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000758 if (IP.first.getBaseIndex() > BI.FirstUse) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000759 // There are interference-free uses at the beginning of the block.
760 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000761 SmallVectorImpl<SlotIndex>::const_iterator UI =
Jakob Stoklund Olesenfe3f99f2011-02-05 01:06:39 +0000762 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
763 IP.first.getBaseIndex());
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000764 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
765 SlotIndex Use = (--UI)->getBoundaryIndex();
766 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000767 SlotIndex SegEnd = SE.leaveIntvAfter(Use);
768 assert(SegEnd <= IP.first && "Couldn't avoid interference");
769 SE.useIntv(Start, SegEnd);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000770 continue;
771 }
772
773 // Interference is before the first use.
774 DEBUG(dbgs() << " before first use.\n");
Jakob Stoklund Olesende710952011-02-05 01:06:36 +0000775 SlotIndex SegEnd = SE.leaveIntvAtTop(*BI.MBB);
776 assert(SegEnd <= IP.first && "Couldn't avoid interference");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000777 }
778
779 SE.closeIntv();
780
781 // FIXME: Should we be more aggressive about splitting the stack region into
782 // per-block segments? The current approach allows the stack region to
783 // separate into connected components. Some components may be allocatable.
784 SE.finish();
785
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000786 if (VerifyEnabled) {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000787 MF->verify(this, "After splitting live range around region");
Jakob Stoklund Olesen9b3d24b2011-02-04 19:33:07 +0000788
789#ifndef NDEBUG
790 // Make sure that at least one of the new intervals can allocate to PhysReg.
791 // That was the whole point of splitting the live range.
792 bool found = false;
793 for (LiveRangeEdit::iterator I = LREdit.begin(), E = LREdit.end(); I != E;
794 ++I)
795 if (!checkUncachedInterference(**I, PhysReg)) {
796 found = true;
797 break;
798 }
799 assert(found && "No allocatable intervals after pointless splitting");
800#endif
801 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000802}
803
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000804unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
805 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000806 BitVector LiveBundles, BestBundles;
807 float BestCost = 0;
808 unsigned BestReg = 0;
809 Order.rewind();
810 while (unsigned PhysReg = Order.next()) {
811 float Cost = calcInterferenceInfo(VirtReg, PhysReg);
812 if (BestReg && Cost >= BestCost)
813 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000814
815 SpillPlacer->placeSpills(SpillConstraints, LiveBundles);
816 // No live bundles, defer to splitSingleBlocks().
817 if (!LiveBundles.any())
818 continue;
819
820 Cost += calcGlobalSplitCost(LiveBundles);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000821 if (!BestReg || Cost < BestCost) {
822 BestReg = PhysReg;
823 BestCost = Cost;
824 BestBundles.swap(LiveBundles);
825 }
826 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000827
828 if (!BestReg)
829 return 0;
830
831 splitAroundRegion(VirtReg, BestReg, BestBundles, NewVRegs);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000832 return 0;
833}
834
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000835
836//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +0000837// Local Splitting
838//===----------------------------------------------------------------------===//
839
840
841/// calcGapWeights - Compute the maximum spill weight that needs to be evicted
842/// in order to use PhysReg between two entries in SA->UseSlots.
843///
844/// GapWeight[i] represents the gap between UseSlots[i] and UseSlots[i+1].
845///
846void RAGreedy::calcGapWeights(unsigned PhysReg,
847 SmallVectorImpl<float> &GapWeight) {
848 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
849 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
850 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
851 const unsigned NumGaps = Uses.size()-1;
852
853 // Start and end points for the interference check.
854 SlotIndex StartIdx = BI.LiveIn ? BI.FirstUse.getBaseIndex() : BI.FirstUse;
855 SlotIndex StopIdx = BI.LiveOut ? BI.LastUse.getBoundaryIndex() : BI.LastUse;
856
857 GapWeight.assign(NumGaps, 0.0f);
858
859 // Add interference from each overlapping register.
860 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
861 if (!query(const_cast<LiveInterval&>(SA->getParent()), *AI)
862 .checkInterference())
863 continue;
864
865 // We know that VirtReg is a continuous interval from FirstUse to LastUse,
866 // so we don't need InterferenceQuery.
867 //
868 // Interference that overlaps an instruction is counted in both gaps
869 // surrounding the instruction. The exception is interference before
870 // StartIdx and after StopIdx.
871 //
872 LiveIntervalUnion::SegmentIter IntI = PhysReg2LiveUnion[*AI].find(StartIdx);
873 for (unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
874 // Skip the gaps before IntI.
875 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
876 if (++Gap == NumGaps)
877 break;
878 if (Gap == NumGaps)
879 break;
880
881 // Update the gaps covered by IntI.
882 const float weight = IntI.value()->weight;
883 for (; Gap != NumGaps; ++Gap) {
884 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
885 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
886 break;
887 }
888 if (Gap == NumGaps)
889 break;
890 }
891 }
892}
893
894/// getPrevMappedIndex - Return the slot index of the last non-copy instruction
895/// before MI that has a slot index. If MI is the first mapped instruction in
896/// its block, return the block start index instead.
897///
898SlotIndex RAGreedy::getPrevMappedIndex(const MachineInstr *MI) {
899 assert(MI && "Missing MachineInstr");
900 const MachineBasicBlock *MBB = MI->getParent();
901 MachineBasicBlock::const_iterator B = MBB->begin(), I = MI;
902 while (I != B)
903 if (!(--I)->isDebugValue() && !I->isCopy())
904 return Indexes->getInstructionIndex(I);
905 return Indexes->getMBBStartIdx(MBB);
906}
907
908/// calcPrevSlots - Fill in the PrevSlot array with the index of the previous
909/// real non-copy instruction for each instruction in SA->UseSlots.
910///
911void RAGreedy::calcPrevSlots() {
912 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
913 PrevSlot.clear();
914 PrevSlot.reserve(Uses.size());
915 for (unsigned i = 0, e = Uses.size(); i != e; ++i) {
916 const MachineInstr *MI = Indexes->getInstructionFromIndex(Uses[i]);
917 PrevSlot.push_back(getPrevMappedIndex(MI).getDefIndex());
918 }
919}
920
921/// nextSplitPoint - Find the next index into SA->UseSlots > i such that it may
922/// be beneficial to split before UseSlots[i].
923///
924/// 0 is always a valid split point
925unsigned RAGreedy::nextSplitPoint(unsigned i) {
926 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
927 const unsigned Size = Uses.size();
928 assert(i != Size && "No split points after the end");
929 // Allow split before i when Uses[i] is not adjacent to the previous use.
930 while (++i != Size && PrevSlot[i].getBaseIndex() <= Uses[i-1].getBaseIndex())
931 ;
932 return i;
933}
934
935/// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
936/// basic block.
937///
938unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
939 SmallVectorImpl<LiveInterval*> &NewVRegs) {
940 assert(SA->LiveBlocks.size() == 1 && "Not a local interval");
941 const SplitAnalysis::BlockInfo &BI = SA->LiveBlocks.front();
942
943 // Note that it is possible to have an interval that is live-in or live-out
944 // while only covering a single block - A phi-def can use undef values from
945 // predecessors, and the block could be a single-block loop.
946 // We don't bother doing anything clever about such a case, we simply assume
947 // that the interval is continuous from FirstUse to LastUse. We should make
948 // sure that we don't do anything illegal to such an interval, though.
949
950 const SmallVectorImpl<SlotIndex> &Uses = SA->UseSlots;
951 if (Uses.size() <= 2)
952 return 0;
953 const unsigned NumGaps = Uses.size()-1;
954
955 DEBUG({
956 dbgs() << "tryLocalSplit: ";
957 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
958 dbgs() << ' ' << SA->UseSlots[i];
959 dbgs() << '\n';
960 });
961
962 // For every use, find the previous mapped non-copy instruction.
963 // We use this to detect valid split points, and to estimate new interval
964 // sizes.
965 calcPrevSlots();
966
967 unsigned BestBefore = NumGaps;
968 unsigned BestAfter = 0;
969 float BestDiff = 0;
970
971 const float blockFreq = SpillPlacer->getBlockFrequency(BI.MBB);
972 SmallVector<float, 8> GapWeight;
973
974 Order.rewind();
975 while (unsigned PhysReg = Order.next()) {
976 // Keep track of the largest spill weight that would need to be evicted in
977 // order to make use of PhysReg between UseSlots[i] and UseSlots[i+1].
978 calcGapWeights(PhysReg, GapWeight);
979
980 // Try to find the best sequence of gaps to close.
981 // The new spill weight must be larger than any gap interference.
982
983 // We will split before Uses[SplitBefore] and after Uses[SplitAfter].
984 unsigned SplitBefore = 0, SplitAfter = nextSplitPoint(1) - 1;
985
986 // MaxGap should always be max(GapWeight[SplitBefore..SplitAfter-1]).
987 // It is the spill weight that needs to be evicted.
988 float MaxGap = GapWeight[0];
989 for (unsigned i = 1; i != SplitAfter; ++i)
990 MaxGap = std::max(MaxGap, GapWeight[i]);
991
992 for (;;) {
993 // Live before/after split?
994 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
995 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
996
997 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << ' '
998 << Uses[SplitBefore] << '-' << Uses[SplitAfter]
999 << " i=" << MaxGap);
1000
1001 // Stop before the interval gets so big we wouldn't be making progress.
1002 if (!LiveBefore && !LiveAfter) {
1003 DEBUG(dbgs() << " all\n");
1004 break;
1005 }
1006 // Should the interval be extended or shrunk?
1007 bool Shrink = true;
1008 if (MaxGap < HUGE_VALF) {
1009 // Estimate the new spill weight.
1010 //
1011 // Each instruction reads and writes the register, except the first
1012 // instr doesn't read when !FirstLive, and the last instr doesn't write
1013 // when !LastLive.
1014 //
1015 // We will be inserting copies before and after, so the total number of
1016 // reads and writes is 2 * EstUses.
1017 //
1018 const unsigned EstUses = 2*(SplitAfter - SplitBefore) +
1019 2*(LiveBefore + LiveAfter);
1020
1021 // Try to guess the size of the new interval. This should be trivial,
1022 // but the slot index of an inserted copy can be a lot smaller than the
1023 // instruction it is inserted before if there are many dead indexes
1024 // between them.
1025 //
1026 // We measure the distance from the instruction before SplitBefore to
1027 // get a conservative estimate.
1028 //
1029 // The final distance can still be different if inserting copies
1030 // triggers a slot index renumbering.
1031 //
1032 const float EstWeight = normalizeSpillWeight(blockFreq * EstUses,
1033 PrevSlot[SplitBefore].distance(Uses[SplitAfter]));
1034 // Would this split be possible to allocate?
1035 // Never allocate all gaps, we wouldn't be making progress.
1036 float Diff = EstWeight - MaxGap;
1037 DEBUG(dbgs() << " w=" << EstWeight << " d=" << Diff);
1038 if (Diff > 0) {
1039 Shrink = false;
1040 if (Diff > BestDiff) {
1041 DEBUG(dbgs() << " (best)");
1042 BestDiff = Diff;
1043 BestBefore = SplitBefore;
1044 BestAfter = SplitAfter;
1045 }
1046 }
1047 }
1048
1049 // Try to shrink.
1050 if (Shrink) {
1051 SplitBefore = nextSplitPoint(SplitBefore);
1052 if (SplitBefore < SplitAfter) {
1053 DEBUG(dbgs() << " shrink\n");
1054 // Recompute the max when necessary.
1055 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1056 MaxGap = GapWeight[SplitBefore];
1057 for (unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1058 MaxGap = std::max(MaxGap, GapWeight[i]);
1059 }
1060 continue;
1061 }
1062 MaxGap = 0;
1063 }
1064
1065 // Try to extend the interval.
1066 if (SplitAfter >= NumGaps) {
1067 DEBUG(dbgs() << " end\n");
1068 break;
1069 }
1070
1071 DEBUG(dbgs() << " extend\n");
1072 for (unsigned e = nextSplitPoint(SplitAfter + 1) - 1;
1073 SplitAfter != e; ++SplitAfter)
1074 MaxGap = std::max(MaxGap, GapWeight[SplitAfter]);
1075 continue;
1076 }
1077 }
1078
1079 // Didn't find any candidates?
1080 if (BestBefore == NumGaps)
1081 return 0;
1082
1083 DEBUG(dbgs() << "Best local split range: " << Uses[BestBefore]
1084 << '-' << Uses[BestAfter] << ", " << BestDiff
1085 << ", " << (BestAfter - BestBefore + 1) << " instrs\n");
1086
1087 SmallVector<LiveInterval*, 4> SpillRegs;
1088 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
1089 SplitEditor SE(*SA, *LIS, *VRM, *DomTree, LREdit);
1090
1091 SE.openIntv();
1092 SlotIndex SegStart = SE.enterIntvBefore(Uses[BestBefore]);
1093 SlotIndex SegStop = SE.leaveIntvAfter(Uses[BestAfter]);
1094 SE.useIntv(SegStart, SegStop);
1095 SE.closeIntv();
1096 SE.finish();
1097
1098 return 0;
1099}
1100
1101//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001102// Live Range Splitting
1103//===----------------------------------------------------------------------===//
1104
1105/// trySplit - Try to split VirtReg or one of its interferences, making it
1106/// assignable.
1107/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1108unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1109 SmallVectorImpl<LiveInterval*>&NewVRegs) {
1110 NamedRegionTimer T("Splitter", TimerGroupName, TimePassesIsEnabled);
1111 SA->analyze(&VirtReg);
1112
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001113 // Local intervals are handled separately.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001114 if (LIS->intervalIsInOneMBB(VirtReg))
Jakob Stoklund Olesen034a80d2011-02-17 19:13:53 +00001115 return tryLocalSplit(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001116
1117 // First try to split around a region spanning multiple blocks.
1118 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1119 if (PhysReg || !NewVRegs.empty())
1120 return PhysReg;
1121
1122 // Then isolate blocks with multiple uses.
1123 SplitAnalysis::BlockPtrSet Blocks;
1124 if (SA->getMultiUseBlocks(Blocks)) {
1125 SmallVector<LiveInterval*, 4> SpillRegs;
1126 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
1127 SplitEditor(*SA, *LIS, *VRM, *DomTree, LREdit).splitSingleBlocks(Blocks);
Jakob Stoklund Olesen207c8682011-02-03 17:04:16 +00001128 if (VerifyEnabled)
1129 MF->verify(this, "After splitting live range around basic blocks");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001130 }
1131
1132 // Don't assign any physregs.
1133 return 0;
1134}
1135
1136
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001137//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001138// Spilling
1139//===----------------------------------------------------------------------===//
1140
1141/// calcInterferenceWeight - Calculate the combined spill weight of
1142/// interferences when assigning VirtReg to PhysReg.
1143float RAGreedy::calcInterferenceWeight(LiveInterval &VirtReg, unsigned PhysReg){
1144 float Sum = 0;
1145 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
1146 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
1147 Q.collectInterferingVRegs();
1148 if (Q.seenUnspillableVReg())
1149 return HUGE_VALF;
1150 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i)
1151 Sum += Q.interferingVRegs()[i]->weight;
1152 }
1153 return Sum;
1154}
1155
1156/// trySpillInterferences - Try to spill interfering registers instead of the
1157/// current one. Only do it if the accumulated spill weight is smaller than the
1158/// current spill weight.
1159unsigned RAGreedy::trySpillInterferences(LiveInterval &VirtReg,
1160 AllocationOrder &Order,
1161 SmallVectorImpl<LiveInterval*> &NewVRegs) {
1162 NamedRegionTimer T("Spill Interference", TimerGroupName, TimePassesIsEnabled);
1163 unsigned BestPhys = 0;
Duncan Sands2aea4902010-12-28 10:07:15 +00001164 float BestWeight = 0;
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001165
1166 Order.rewind();
1167 while (unsigned PhysReg = Order.next()) {
1168 float Weight = calcInterferenceWeight(VirtReg, PhysReg);
1169 if (Weight == HUGE_VALF || Weight >= VirtReg.weight)
1170 continue;
1171 if (!BestPhys || Weight < BestWeight)
1172 BestPhys = PhysReg, BestWeight = Weight;
1173 }
1174
1175 // No candidates found.
1176 if (!BestPhys)
1177 return 0;
1178
1179 // Collect all interfering registers.
1180 SmallVector<LiveInterval*, 8> Spills;
1181 for (const unsigned *AI = TRI->getOverlaps(BestPhys); *AI; ++AI) {
1182 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
1183 Spills.append(Q.interferingVRegs().begin(), Q.interferingVRegs().end());
1184 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
1185 LiveInterval *VReg = Q.interferingVRegs()[i];
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +00001186 unassign(*VReg, *AI);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001187 }
1188 }
1189
1190 // Spill them all.
1191 DEBUG(dbgs() << "spilling " << Spills.size() << " interferences with weight "
1192 << BestWeight << '\n');
1193 for (unsigned i = 0, e = Spills.size(); i != e; ++i)
1194 spiller().spill(Spills[i], NewVRegs, Spills);
1195 return BestPhys;
1196}
1197
1198
1199//===----------------------------------------------------------------------===//
1200// Main Entry Point
1201//===----------------------------------------------------------------------===//
1202
1203unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001204 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001205 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +00001206 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
1207 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001208 if (!checkPhysRegInterference(VirtReg, PhysReg))
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001209 return PhysReg;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001210 }
Andrew Trickb853e6c2010-12-09 18:15:21 +00001211
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001212 // Try to reassign interferences.
Jakob Stoklund Olesen27106382011-02-09 01:14:03 +00001213 if (unsigned PhysReg = tryReassignOrEvict(VirtReg, Order, NewVRegs))
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001214 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +00001215
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001216 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
1217
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +00001218 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001219 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1220 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +00001221 return PhysReg;
1222
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001223 // Try to spill another interfering reg with less spill weight.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001224 PhysReg = trySpillInterferences(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +00001225 if (PhysReg)
1226 return PhysReg;
1227
1228 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001229 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001230 SmallVector<LiveInterval*, 1> pendingSpills;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +00001231 spiller().spill(&VirtReg, NewVRegs, pendingSpills);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001232
1233 // The live virtual register requesting allocation was spilled, so tell
1234 // the caller not to allocate anything during this round.
1235 return 0;
1236}
1237
1238bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
1239 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
1240 << "********** Function: "
1241 << ((Value*)mf.getFunction())->getName() << '\n');
1242
1243 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001244 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +00001245 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +00001246
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +00001247 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001248 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +00001249 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001250 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001251 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001252 Loops = &getAnalysis<MachineLoopInfo>();
1253 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001254 Bundles = &getAnalysis<EdgeBundles>();
1255 SpillPlacer = &getAnalysis<SpillPlacement>();
1256
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001257 SA.reset(new SplitAnalysis(*MF, *LIS, *Loops));
1258
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001259 allocatePhysRegs();
1260 addMBBLiveIns(MF);
Jakob Stoklund Olesen8a61da82011-02-08 21:13:03 +00001261 LIS->addKillFlags();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001262
1263 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001264 {
1265 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
1266 std::auto_ptr<VirtRegRewriter> rewriter(createVirtRegRewriter());
1267 rewriter->runOnMachineFunction(*MF, *VRM, LIS);
1268 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001269
1270 // The pass output is in VirtRegMap. Release all the transient data.
1271 releaseMemory();
1272
1273 return true;
1274}