blob: fa3b5871fcefe4a87fc291868a21b1f7da40581a [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000017#include "LiveIntervalUnion.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000018#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000019#include "RegAllocBase.h"
20#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000021#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000022#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000023#include "VirtRegMap.h"
24#include "VirtRegRewriter.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000025#include "llvm/Analysis/AliasAnalysis.h"
26#include "llvm/Function.h"
27#include "llvm/PassAnalysisSupport.h"
28#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000029#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000030#include "llvm/CodeGen/LiveIntervalAnalysis.h"
31#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000032#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000033#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000035#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000036#include "llvm/CodeGen/MachineRegisterInfo.h"
37#include "llvm/CodeGen/Passes.h"
38#include "llvm/CodeGen/RegAllocRegistry.h"
39#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000040#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Support/Debug.h"
42#include "llvm/Support/ErrorHandling.h"
43#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000044#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000045
46using namespace llvm;
47
48static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
49 createGreedyRegisterAllocator);
50
51namespace {
52class RAGreedy : public MachineFunctionPass, public RegAllocBase {
53 // context
54 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055 BitVector ReservedRegs;
56
57 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000058 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000059 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000060 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000061 MachineLoopInfo *Loops;
62 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000063 EdgeBundles *Bundles;
64 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000065
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000066 // state
67 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000068 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000069
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000070 // splitting state.
71
72 /// All basic blocks where the current register is live.
73 SmallVector<SpillPlacement::BlockConstraint, 8> SpillConstraints;
74
75 /// Additional information about basic blocks where the current variable is
76 /// live. Such a block will look like one of these templates:
77 ///
78 /// 1. | o---x | Internal to block. Variable is only live in this block.
79 /// 2. |---x | Live-in, kill.
80 /// 3. | o---| Def, live-out.
81 /// 4. |---x o---| Live-in, kill, def, live-out.
82 /// 5. |---o---o---| Live-through with uses or defs.
83 /// 6. |-----------| Live-through without uses. Transparent.
84 ///
85 struct BlockInfo {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +000086 MachineBasicBlock *MBB;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000087 SlotIndex FirstUse; ///< First instr using current reg.
88 SlotIndex LastUse; ///< Last instr using current reg.
89 SlotIndex Kill; ///< Interval end point inside block.
90 SlotIndex Def; ///< Interval start point inside block.
91 bool Uses; ///< Current reg has uses or defs in block.
92 bool LiveThrough; ///< Live in whole block (Templ 5. or 6. above).
93 bool LiveIn; ///< Current reg is live in.
94 bool LiveOut; ///< Current reg is live out.
95
96 // Per-interference pattern scratch data.
97 bool OverlapEntry; ///< Interference overlaps entering interval.
98 bool OverlapExit; ///< Interference overlaps exiting interval.
99 };
100
101 /// Basic blocks where var is live. This array is parallel to
102 /// SpillConstraints.
103 SmallVector<BlockInfo, 8> LiveBlocks;
104
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000105public:
106 RAGreedy();
107
108 /// Return the pass name.
109 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000110 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000111 }
112
113 /// RAGreedy analysis usage.
114 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
115
116 virtual void releaseMemory();
117
118 virtual Spiller &spiller() { return *SpillerInstance; }
119
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000120 virtual float getPriority(LiveInterval *LI);
Jakob Stoklund Olesend0bec3e2010-12-08 22:22:41 +0000121
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000122 virtual unsigned selectOrSplit(LiveInterval&,
123 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000124
125 /// Perform register allocation.
126 virtual bool runOnMachineFunction(MachineFunction &mf);
127
128 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000129
130private:
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000131 bool checkUncachedInterference(LiveInterval&, unsigned);
132 LiveInterval *getSingleInterference(LiveInterval&, unsigned);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000133 bool reassignVReg(LiveInterval &InterferingVReg, unsigned OldPhysReg);
134 bool reassignInterferences(LiveInterval &VirtReg, unsigned PhysReg);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000135 float calcInterferenceWeight(LiveInterval&, unsigned);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000136 void calcLiveBlockInfo(LiveInterval&);
137 float calcInterferenceInfo(LiveInterval&, unsigned);
138 float calcGlobalSplitCost(const BitVector&);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000139 void splitAroundRegion(LiveInterval&, unsigned, const BitVector&,
140 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000141
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000142 unsigned tryReassign(LiveInterval&, AllocationOrder&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000143 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
144 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000145 unsigned trySplit(LiveInterval&, AllocationOrder&,
146 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000147 unsigned trySpillInterferences(LiveInterval&, AllocationOrder&,
148 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000149};
150} // end anonymous namespace
151
152char RAGreedy::ID = 0;
153
154FunctionPass* llvm::createGreedyRegisterAllocator() {
155 return new RAGreedy();
156}
157
158RAGreedy::RAGreedy(): MachineFunctionPass(ID) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000159 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000160 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
161 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
162 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
163 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
164 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
165 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
166 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
167 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000168 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000169 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000170 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
171 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000172}
173
174void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
175 AU.setPreservesCFG();
176 AU.addRequired<AliasAnalysis>();
177 AU.addPreserved<AliasAnalysis>();
178 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000179 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000180 AU.addPreserved<SlotIndexes>();
181 if (StrongPHIElim)
182 AU.addRequiredID(StrongPHIEliminationID);
183 AU.addRequiredTransitive<RegisterCoalescer>();
184 AU.addRequired<CalculateSpillWeights>();
185 AU.addRequired<LiveStacks>();
186 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000187 AU.addRequired<MachineDominatorTree>();
188 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000189 AU.addRequired<MachineLoopInfo>();
190 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000191 AU.addRequired<MachineLoopRanges>();
192 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000193 AU.addRequired<VirtRegMap>();
194 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000195 AU.addRequired<EdgeBundles>();
196 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000197 MachineFunctionPass::getAnalysisUsage(AU);
198}
199
200void RAGreedy::releaseMemory() {
201 SpillerInstance.reset(0);
202 RegAllocBase::releaseMemory();
203}
204
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000205float RAGreedy::getPriority(LiveInterval *LI) {
206 float Priority = LI->weight;
207
208 // Prioritize hinted registers so they are allocated first.
209 std::pair<unsigned, unsigned> Hint;
210 if (Hint.first || Hint.second) {
211 // The hint can be target specific, a virtual register, or a physreg.
212 Priority *= 2;
213
214 // Prefer physreg hints above anything else.
215 if (Hint.first == 0 && TargetRegisterInfo::isPhysicalRegister(Hint.second))
216 Priority *= 2;
217 }
218 return Priority;
219}
220
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000221
222//===----------------------------------------------------------------------===//
223// Register Reassignment
224//===----------------------------------------------------------------------===//
225
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000226// Check interference without using the cache.
227bool RAGreedy::checkUncachedInterference(LiveInterval &VirtReg,
228 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000229 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
230 LiveIntervalUnion::Query subQ(&VirtReg, &PhysReg2LiveUnion[*AliasI]);
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000231 if (subQ.checkInterference())
232 return true;
233 }
234 return false;
235}
236
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000237/// getSingleInterference - Return the single interfering virtual register
238/// assigned to PhysReg. Return 0 if more than one virtual register is
239/// interfering.
240LiveInterval *RAGreedy::getSingleInterference(LiveInterval &VirtReg,
241 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000242 // Check physreg and aliases.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000243 LiveInterval *Interference = 0;
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000244 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000245 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
246 if (Q.checkInterference()) {
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000247 if (Interference)
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000248 return 0;
249 Q.collectInterferingVRegs(1);
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000250 if (!Q.seenAllInterferences())
251 return 0;
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000252 Interference = Q.interferingVRegs().front();
253 }
254 }
255 return Interference;
256}
257
Andrew Trickb853e6c2010-12-09 18:15:21 +0000258// Attempt to reassign this virtual register to a different physical register.
259//
260// FIXME: we are not yet caching these "second-level" interferences discovered
261// in the sub-queries. These interferences can change with each call to
262// selectOrSplit. However, we could implement a "may-interfere" cache that
263// could be conservatively dirtied when we reassign or split.
264//
265// FIXME: This may result in a lot of alias queries. We could summarize alias
266// live intervals in their parent register's live union, but it's messy.
267bool RAGreedy::reassignVReg(LiveInterval &InterferingVReg,
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000268 unsigned WantedPhysReg) {
269 assert(TargetRegisterInfo::isVirtualRegister(InterferingVReg.reg) &&
270 "Can only reassign virtual registers");
271 assert(TRI->regsOverlap(WantedPhysReg, VRM->getPhys(InterferingVReg.reg)) &&
Andrew Trickb853e6c2010-12-09 18:15:21 +0000272 "inconsistent phys reg assigment");
273
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +0000274 AllocationOrder Order(InterferingVReg.reg, *VRM, ReservedRegs);
275 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000276 // Don't reassign to a WantedPhysReg alias.
277 if (TRI->regsOverlap(PhysReg, WantedPhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000278 continue;
279
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000280 if (checkUncachedInterference(InterferingVReg, PhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000281 continue;
282
Andrew Trickb853e6c2010-12-09 18:15:21 +0000283 // Reassign the interfering virtual reg to this physical reg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000284 unsigned OldAssign = VRM->getPhys(InterferingVReg.reg);
285 DEBUG(dbgs() << "reassigning: " << InterferingVReg << " from " <<
286 TRI->getName(OldAssign) << " to " << TRI->getName(PhysReg) << '\n');
287 PhysReg2LiveUnion[OldAssign].extract(InterferingVReg);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000288 VRM->clearVirt(InterferingVReg.reg);
289 VRM->assignVirt2Phys(InterferingVReg.reg, PhysReg);
290 PhysReg2LiveUnion[PhysReg].unify(InterferingVReg);
291
292 return true;
293 }
294 return false;
295}
296
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000297/// reassignInterferences - Reassign all interferences to different physical
298/// registers such that Virtreg can be assigned to PhysReg.
299/// Currently this only works with a single interference.
300/// @param VirtReg Currently unassigned virtual register.
301/// @param PhysReg Physical register to be cleared.
302/// @return True on success, false if nothing was changed.
Andrew Trickb853e6c2010-12-09 18:15:21 +0000303bool RAGreedy::reassignInterferences(LiveInterval &VirtReg, unsigned PhysReg) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000304 LiveInterval *InterferingVReg = getSingleInterference(VirtReg, PhysReg);
305 if (!InterferingVReg)
Andrew Trickb853e6c2010-12-09 18:15:21 +0000306 return false;
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000307 if (TargetRegisterInfo::isPhysicalRegister(InterferingVReg->reg))
308 return false;
309 return reassignVReg(*InterferingVReg, PhysReg);
310}
Andrew Trickb853e6c2010-12-09 18:15:21 +0000311
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000312/// tryReassign - Try to reassign interferences to different physregs.
313/// @param VirtReg Currently unassigned virtual register.
314/// @param Order Physregs to try.
315/// @return Physreg to assign VirtReg, or 0.
316unsigned RAGreedy::tryReassign(LiveInterval &VirtReg, AllocationOrder &Order) {
317 NamedRegionTimer T("Reassign", TimerGroupName, TimePassesIsEnabled);
318 Order.rewind();
319 while (unsigned PhysReg = Order.next())
320 if (reassignInterferences(VirtReg, PhysReg))
321 return PhysReg;
322 return 0;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000323}
324
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000325
326//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000327// Region Splitting
328//===----------------------------------------------------------------------===//
329
330/// calcLiveBlockInfo - Fill the LiveBlocks array with information about blocks
331/// where VirtReg is live.
332/// The SpillConstraints array is minimally initialized with MBB->getNumber().
333void RAGreedy::calcLiveBlockInfo(LiveInterval &VirtReg) {
334 LiveBlocks.clear();
335 SpillConstraints.clear();
336
337 assert(!VirtReg.empty() && "Cannot allocate an empty interval");
338 LiveInterval::const_iterator LVI = VirtReg.begin();
339 LiveInterval::const_iterator LVE = VirtReg.end();
340
341 SmallVectorImpl<SlotIndex>::const_iterator UseI, UseE;
342 UseI = SA->UseSlots.begin();
343 UseE = SA->UseSlots.end();
344
345 // Loop over basic blocks where VirtReg is live.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000346 MachineFunction::iterator MFI = Indexes->getMBBFromIndex(LVI->start);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000347 for (;;) {
348 // Block constraints depend on the interference pattern.
349 // Just allocate them here, don't compute anything.
350 SpillPlacement::BlockConstraint BC;
351 BC.Number = MFI->getNumber();
352 SpillConstraints.push_back(BC);
353
354 BlockInfo BI;
355 BI.MBB = MFI;
356 SlotIndex Start, Stop;
357 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
358
359 // LVI is the first live segment overlapping MBB.
360 BI.LiveIn = LVI->start <= Start;
361 if (!BI.LiveIn)
362 BI.Def = LVI->start;
363
364 // Find the first and last uses in the block.
365 BI.Uses = SA->hasUses(MFI);
366 if (BI.Uses && UseI != UseE) {
367 BI.FirstUse = *UseI;
368 assert(BI.FirstUse >= Start);
369 do ++UseI;
370 while (UseI != UseE && *UseI < Stop);
371 BI.LastUse = UseI[-1];
372 assert(BI.LastUse < Stop);
373 }
374
375 // Look for gaps in the live range.
376 bool hasGap = false;
377 BI.LiveOut = true;
378 while (LVI->end < Stop) {
379 SlotIndex LastStop = LVI->end;
380 if (++LVI == LVE || LVI->start >= Stop) {
381 BI.Kill = LastStop;
382 BI.LiveOut = false;
383 break;
384 }
385 if (LastStop < LVI->start) {
386 hasGap = true;
387 BI.Kill = LastStop;
388 BI.Def = LVI->start;
389 }
390 }
391
392 // Don't set LiveThrough when the block has a gap.
393 BI.LiveThrough = !hasGap && BI.LiveIn && BI.LiveOut;
394 LiveBlocks.push_back(BI);
395
396 // LVI is now at LVE or LVI->end >= Stop.
397 if (LVI == LVE)
398 break;
399
400 // Live segment ends exactly at Stop. Move to the next segment.
401 if (LVI->end == Stop && ++LVI == LVE)
402 break;
403
404 // Pick the next basic block.
405 if (LVI->start < Stop)
406 ++MFI;
407 else
408 MFI = Indexes->getMBBFromIndex(LVI->start);
409 }
410}
411
412/// calcInterferenceInfo - Compute per-block outgoing and ingoing constraints
413/// when considering interference from PhysReg. Also compute an optimistic local
414/// cost of this interference pattern.
415///
416/// The final cost of a split is the local cost + global cost of preferences
417/// broken by SpillPlacement.
418///
419float RAGreedy::calcInterferenceInfo(LiveInterval &VirtReg, unsigned PhysReg) {
420 // Reset interference dependent info.
421 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
422 BlockInfo &BI = LiveBlocks[i];
423 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
424 BC.Entry = (BI.Uses && BI.LiveIn) ?
425 SpillPlacement::PrefReg : SpillPlacement::DontCare;
426 BC.Exit = (BI.Uses && BI.LiveOut) ?
427 SpillPlacement::PrefReg : SpillPlacement::DontCare;
428 BI.OverlapEntry = BI.OverlapExit = false;
429 }
430
431 // Add interference info from each PhysReg alias.
432 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
433 if (!query(VirtReg, *AI).checkInterference())
434 continue;
435 DEBUG(PhysReg2LiveUnion[*AI].print(dbgs(), TRI));
436 LiveIntervalUnion::SegmentIter IntI =
437 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
438 if (!IntI.valid())
439 continue;
440
441 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
442 BlockInfo &BI = LiveBlocks[i];
443 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
444 SlotIndex Start, Stop;
445 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
446
447 // Skip interference-free blocks.
448 if (IntI.start() >= Stop)
449 continue;
450
451 // Handle transparent blocks with interference separately.
452 // Transparent blocks never incur any fixed cost.
453 if (BI.LiveThrough && !BI.Uses) {
454 // Check if interference is live-in - force spill.
455 if (BC.Entry != SpillPlacement::MustSpill) {
456 BC.Entry = SpillPlacement::PrefSpill;
457 IntI.advanceTo(Start);
458 if (IntI.valid() && IntI.start() <= Start)
459 BC.Entry = SpillPlacement::MustSpill;
460 }
461
462 // Check if interference is live-out - force spill.
463 if (BC.Exit != SpillPlacement::MustSpill) {
464 BC.Exit = SpillPlacement::PrefSpill;
465 IntI.advanceTo(Stop);
466 if (IntI.valid() && IntI.start() < Stop)
467 BC.Exit = SpillPlacement::MustSpill;
468 }
469
470 // Nothing more to do for this transparent block.
471 if (!IntI.valid())
472 break;
473 continue;
474 }
475
476 // Now we only have blocks with uses left.
477 // Check if the interference overlaps the uses.
478 assert(BI.Uses && "Non-transparent block without any uses");
479
480 // Check interference on entry.
481 if (BI.LiveIn && BC.Entry != SpillPlacement::MustSpill) {
482 IntI.advanceTo(Start);
483 if (!IntI.valid())
484 break;
485
486 // Interference is live-in - force spill.
487 if (IntI.start() <= Start)
488 BC.Entry = SpillPlacement::MustSpill;
489 // Not live in, but before the first use.
490 else if (IntI.start() < BI.FirstUse)
491 BC.Entry = SpillPlacement::PrefSpill;
492 }
493
494 // Does interference overlap the uses in the entry segment
495 // [FirstUse;Kill)?
496 if (BI.LiveIn && !BI.OverlapEntry) {
497 IntI.advanceTo(BI.FirstUse);
498 if (!IntI.valid())
499 break;
500 // A live-through interval has no kill.
501 // Check [FirstUse;LastUse) instead.
502 if (IntI.start() < (BI.LiveThrough ? BI.LastUse : BI.Kill))
503 BI.OverlapEntry = true;
504 }
505
506 // Does interference overlap the uses in the exit segment [Def;LastUse)?
507 if (BI.LiveOut && !BI.LiveThrough && !BI.OverlapExit) {
508 IntI.advanceTo(BI.Def);
509 if (!IntI.valid())
510 break;
511 if (IntI.start() < BI.LastUse)
512 BI.OverlapExit = true;
513 }
514
515 // Check interference on exit.
516 if (BI.LiveOut && BC.Exit != SpillPlacement::MustSpill) {
517 // Check interference between LastUse and Stop.
518 if (BC.Exit != SpillPlacement::PrefSpill) {
519 IntI.advanceTo(BI.LastUse);
520 if (!IntI.valid())
521 break;
522 if (IntI.start() < Stop)
523 BC.Exit = SpillPlacement::PrefSpill;
524 }
525 // Is the interference live-out?
526 IntI.advanceTo(Stop);
527 if (!IntI.valid())
528 break;
529 if (IntI.start() < Stop)
530 BC.Exit = SpillPlacement::MustSpill;
531 }
532 }
533 }
534
535 // Accumulate a local cost of this interference pattern.
536 float LocalCost = 0;
537 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
538 BlockInfo &BI = LiveBlocks[i];
539 if (!BI.Uses)
540 continue;
541 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
542 unsigned Inserts = 0;
543
544 // Do we need spill code for the entry segment?
545 if (BI.LiveIn)
546 Inserts += BI.OverlapEntry || BC.Entry != SpillPlacement::PrefReg;
547
548 // For the exit segment?
549 if (BI.LiveOut)
550 Inserts += BI.OverlapExit || BC.Exit != SpillPlacement::PrefReg;
551
552 // The local cost of spill code in this block is the block frequency times
553 // the number of spill instructions inserted.
554 if (Inserts)
555 LocalCost += Inserts * SpillPlacer->getBlockFrequency(BI.MBB);
556 }
557 DEBUG(dbgs() << "Local cost of " << PrintReg(PhysReg, TRI) << " = "
558 << LocalCost << '\n');
559 return LocalCost;
560}
561
562/// calcGlobalSplitCost - Return the global split cost of following the split
563/// pattern in LiveBundles. This cost should be added to the local cost of the
564/// interference pattern in SpillConstraints.
565///
566float RAGreedy::calcGlobalSplitCost(const BitVector &LiveBundles) {
567 float GlobalCost = 0;
568 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
569 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
570 unsigned Inserts = 0;
571 // Broken entry preference?
572 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 0)] !=
573 (BC.Entry == SpillPlacement::PrefReg);
574 // Broken exit preference?
575 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 1)] !=
576 (BC.Exit == SpillPlacement::PrefReg);
577 if (Inserts)
578 GlobalCost += Inserts * SpillPlacer->getBlockFrequency(LiveBlocks[i].MBB);
579 }
580 DEBUG(dbgs() << "Global cost = " << GlobalCost << '\n');
581 return GlobalCost;
582}
583
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000584/// splitAroundRegion - Split VirtReg around the region determined by
585/// LiveBundles. Make an effort to avoid interference from PhysReg.
586///
587/// The 'register' interval is going to contain as many uses as possible while
588/// avoiding interference. The 'stack' interval is the complement constructed by
589/// SplitEditor. It will contain the rest.
590///
591void RAGreedy::splitAroundRegion(LiveInterval &VirtReg, unsigned PhysReg,
592 const BitVector &LiveBundles,
593 SmallVectorImpl<LiveInterval*> &NewVRegs) {
594 DEBUG({
595 dbgs() << "Splitting around region for " << PrintReg(PhysReg, TRI)
596 << " with bundles";
597 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
598 dbgs() << " EB#" << i;
599 dbgs() << ".\n";
600 });
601
602 // First compute interference ranges in the live blocks.
603 typedef std::pair<SlotIndex, SlotIndex> IndexPair;
604 SmallVector<IndexPair, 8> InterferenceRanges;
605 InterferenceRanges.resize(LiveBlocks.size());
606 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
607 if (!query(VirtReg, *AI).checkInterference())
608 continue;
609 LiveIntervalUnion::SegmentIter IntI =
610 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
611 if (!IntI.valid())
612 continue;
613 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
614 BlockInfo &BI = LiveBlocks[i];
615 if (!BI.Uses)
616 continue;
617 IndexPair &IP = InterferenceRanges[i];
618 SlotIndex Start, Stop;
619 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
620 // Skip interference-free blocks.
621 if (IntI.start() >= Stop)
622 continue;
623
624 // First interference in block.
625 if (BI.LiveIn) {
626 IntI.advanceTo(Start);
627 if (!IntI.valid())
628 break;
629 if (!IP.first.isValid() || IntI.start() < IP.first)
630 IP.first = IntI.start();
631 }
632
633 // Last interference in block.
634 if (BI.LiveOut) {
635 IntI.advanceTo(Stop);
636 if (!IntI.valid() || IntI.start() >= Stop)
637 --IntI;
638 if (!IP.second.isValid() || IntI.stop() > IP.second)
639 IP.second = IntI.stop();
640 }
641 }
642 }
643
644 SmallVector<LiveInterval*, 4> SpillRegs;
645 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
646 SplitEditor SE(*SA, *LIS, *VRM, *DomTree, LREdit);
647
648 // Create the main cross-block interval.
649 SE.openIntv();
650
651 // First add all defs that are live out of a block.
652 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
653 BlockInfo &BI = LiveBlocks[i];
654 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
655 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
656
657 // Should the register be live out?
658 if (!BI.LiveOut || !RegOut)
659 continue;
660
661 IndexPair &IP = InterferenceRanges[i];
662 SlotIndex Start, Stop;
663 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
664
665 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
666 << Bundles->getBundle(BI.MBB->getNumber(), 1));
667
668 // Check interference leaving the block.
669 if (!IP.second.isValid() || IP.second < Start) {
670 // Block is interference-free.
671 DEBUG(dbgs() << ", no interference");
672 if (!BI.Uses) {
673 assert(BI.LiveThrough && "No uses, but not live through block?");
674 // Block is live-through without interference.
675 DEBUG(dbgs() << ", no uses"
676 << (RegIn ? ", live-through.\n" : ", stack in.\n"));
677 if (!RegIn)
678 SE.enterIntvAtEnd(*BI.MBB);
679 continue;
680 }
681 if (!BI.LiveThrough) {
682 DEBUG(dbgs() << ", not live-through.\n");
683 SE.enterIntvBefore(BI.Def);
684 SE.useIntv(BI.Def, Stop);
685 continue;
686 }
687 if (!RegIn) {
688 // Block is live-through, but entry bundle is on the stack.
689 // Reload just before the first use.
690 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
691 SE.enterIntvBefore(BI.FirstUse);
692 SE.useIntv(BI.FirstUse, Stop);
693 continue;
694 }
695 DEBUG(dbgs() << ", live-through.\n");
696 continue;
697 }
698
699 // Block has interference.
700 DEBUG(dbgs() << ", interference to " << IP.second);
701 if (!BI.Uses) {
702 // No uses in block, avoid interference by reloading as late as possible.
703 DEBUG(dbgs() << ", no uses.\n");
704 SE.enterIntvAtEnd(*BI.MBB);
705 continue;
706 }
707 if (IP.second < BI.LastUse) {
708 // There are interference-free uses at the end of the block.
709 // Find the first use that can get the live-out register.
710 SlotIndex Use = *std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
711 IP.second);
712 DEBUG(dbgs() << ", free use at " << Use << ".\n");
713 assert(Use > IP.second && Use <= BI.LastUse);
714 SE.enterIntvBefore(Use);
715 SE.useIntv(Use, Stop);
716 continue;
717 }
718
719 // Interference is after the last use.
720 DEBUG(dbgs() << " after last use.\n");
721 SE.enterIntvAtEnd(*BI.MBB);
722 }
723
724 // Now all defs leading to live bundles are handled, do everything else.
725 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
726 BlockInfo &BI = LiveBlocks[i];
727 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
728 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
729
730 // Is the register live-in?
731 if (!BI.LiveIn || !RegIn)
732 continue;
733
734 // We have an incoming register. Check for interference.
735 IndexPair &IP = InterferenceRanges[i];
736 SlotIndex Start, Stop;
737 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
738
739 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
740 << " -> BB#" << BI.MBB->getNumber());
741
742 // Check interference entering the block.
743 if (!IP.first.isValid() || IP.first > Stop) {
744 // Block is interference-free.
745 DEBUG(dbgs() << ", no interference");
746 if (!BI.Uses) {
747 assert(BI.LiveThrough && "No uses, but not live through block?");
748 // Block is live-through without interference.
749 if (RegOut) {
750 DEBUG(dbgs() << ", no uses, live-through.\n");
751 SE.useIntv(Start, Stop);
752 } else {
753 DEBUG(dbgs() << ", no uses, stack-out.\n");
754 SE.leaveIntvAtTop(*BI.MBB);
755 }
756 continue;
757 }
758 if (!BI.LiveThrough) {
759 DEBUG(dbgs() << ", killed in block.\n");
760 SE.useIntv(Start, BI.Kill);
761 SE.leaveIntvAfter(BI.Kill);
762 continue;
763 }
764 if (!RegOut) {
765 // Block is live-through, but exit bundle is on the stack.
766 // Spill immediately after the last use.
767 DEBUG(dbgs() << ", uses, stack-out.\n");
768 SE.useIntv(Start, BI.LastUse);
769 SE.leaveIntvAfter(BI.LastUse);
770 continue;
771 }
772 // Register is live-through.
773 DEBUG(dbgs() << ", uses, live-through.\n");
774 SE.useIntv(Start, Stop);
775 continue;
776 }
777
778 // Block has interference.
779 DEBUG(dbgs() << ", interference from " << IP.first);
780 if (!BI.Uses) {
781 // No uses in block, avoid interference by spilling as soon as possible.
782 DEBUG(dbgs() << ", no uses.\n");
783 SE.leaveIntvAtTop(*BI.MBB);
784 continue;
785 }
786 if (IP.first > BI.FirstUse) {
787 // There are interference-free uses at the beginning of the block.
788 // Find the last use that can get the register.
789 SlotIndex Use = std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(),
790 IP.second)[-1];
791 DEBUG(dbgs() << ", free use at " << Use << ".\n");
792 assert(Use >= BI.FirstUse && Use < IP.first);
793 SE.useIntv(Start, Use);
794 SE.leaveIntvAfter(Use);
795 continue;
796 }
797
798 // Interference is before the first use.
799 DEBUG(dbgs() << " before first use.\n");
800 SE.leaveIntvAtTop(*BI.MBB);
801 }
802
803 SE.closeIntv();
804
805 // FIXME: Should we be more aggressive about splitting the stack region into
806 // per-block segments? The current approach allows the stack region to
807 // separate into connected components. Some components may be allocatable.
808 SE.finish();
809
810 if (VerifyEnabled)
811 MF->verify(this, "After splitting live range around region");
812}
813
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000814unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
815 SmallVectorImpl<LiveInterval*> &NewVRegs) {
816 calcLiveBlockInfo(VirtReg);
817 BitVector LiveBundles, BestBundles;
818 float BestCost = 0;
819 unsigned BestReg = 0;
820 Order.rewind();
821 while (unsigned PhysReg = Order.next()) {
822 float Cost = calcInterferenceInfo(VirtReg, PhysReg);
823 if (BestReg && Cost >= BestCost)
824 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000825
826 SpillPlacer->placeSpills(SpillConstraints, LiveBundles);
827 // No live bundles, defer to splitSingleBlocks().
828 if (!LiveBundles.any())
829 continue;
830
831 Cost += calcGlobalSplitCost(LiveBundles);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000832 if (!BestReg || Cost < BestCost) {
833 BestReg = PhysReg;
834 BestCost = Cost;
835 BestBundles.swap(LiveBundles);
836 }
837 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000838
839 if (!BestReg)
840 return 0;
841
842 splitAroundRegion(VirtReg, BestReg, BestBundles, NewVRegs);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000843 return 0;
844}
845
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000846
847//===----------------------------------------------------------------------===//
848// Live Range Splitting
849//===----------------------------------------------------------------------===//
850
851/// trySplit - Try to split VirtReg or one of its interferences, making it
852/// assignable.
853/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
854unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
855 SmallVectorImpl<LiveInterval*>&NewVRegs) {
856 NamedRegionTimer T("Splitter", TimerGroupName, TimePassesIsEnabled);
857 SA->analyze(&VirtReg);
858
859 // Don't attempt splitting on local intervals for now. TBD.
860 if (LIS->intervalIsInOneMBB(VirtReg))
861 return 0;
862
863 // First try to split around a region spanning multiple blocks.
864 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
865 if (PhysReg || !NewVRegs.empty())
866 return PhysReg;
867
868 // Then isolate blocks with multiple uses.
869 SplitAnalysis::BlockPtrSet Blocks;
870 if (SA->getMultiUseBlocks(Blocks)) {
871 SmallVector<LiveInterval*, 4> SpillRegs;
872 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
873 SplitEditor(*SA, *LIS, *VRM, *DomTree, LREdit).splitSingleBlocks(Blocks);
874 }
875
876 // Don't assign any physregs.
877 return 0;
878}
879
880
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000881//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000882// Spilling
883//===----------------------------------------------------------------------===//
884
885/// calcInterferenceWeight - Calculate the combined spill weight of
886/// interferences when assigning VirtReg to PhysReg.
887float RAGreedy::calcInterferenceWeight(LiveInterval &VirtReg, unsigned PhysReg){
888 float Sum = 0;
889 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
890 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
891 Q.collectInterferingVRegs();
892 if (Q.seenUnspillableVReg())
893 return HUGE_VALF;
894 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i)
895 Sum += Q.interferingVRegs()[i]->weight;
896 }
897 return Sum;
898}
899
900/// trySpillInterferences - Try to spill interfering registers instead of the
901/// current one. Only do it if the accumulated spill weight is smaller than the
902/// current spill weight.
903unsigned RAGreedy::trySpillInterferences(LiveInterval &VirtReg,
904 AllocationOrder &Order,
905 SmallVectorImpl<LiveInterval*> &NewVRegs) {
906 NamedRegionTimer T("Spill Interference", TimerGroupName, TimePassesIsEnabled);
907 unsigned BestPhys = 0;
Duncan Sands2aea4902010-12-28 10:07:15 +0000908 float BestWeight = 0;
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000909
910 Order.rewind();
911 while (unsigned PhysReg = Order.next()) {
912 float Weight = calcInterferenceWeight(VirtReg, PhysReg);
913 if (Weight == HUGE_VALF || Weight >= VirtReg.weight)
914 continue;
915 if (!BestPhys || Weight < BestWeight)
916 BestPhys = PhysReg, BestWeight = Weight;
917 }
918
919 // No candidates found.
920 if (!BestPhys)
921 return 0;
922
923 // Collect all interfering registers.
924 SmallVector<LiveInterval*, 8> Spills;
925 for (const unsigned *AI = TRI->getOverlaps(BestPhys); *AI; ++AI) {
926 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
927 Spills.append(Q.interferingVRegs().begin(), Q.interferingVRegs().end());
928 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
929 LiveInterval *VReg = Q.interferingVRegs()[i];
930 PhysReg2LiveUnion[*AI].extract(*VReg);
931 VRM->clearVirt(VReg->reg);
932 }
933 }
934
935 // Spill them all.
936 DEBUG(dbgs() << "spilling " << Spills.size() << " interferences with weight "
937 << BestWeight << '\n');
938 for (unsigned i = 0, e = Spills.size(); i != e; ++i)
939 spiller().spill(Spills[i], NewVRegs, Spills);
940 return BestPhys;
941}
942
943
944//===----------------------------------------------------------------------===//
945// Main Entry Point
946//===----------------------------------------------------------------------===//
947
948unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000949 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000950 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +0000951 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
952 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000953 if (!checkPhysRegInterference(VirtReg, PhysReg))
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000954 return PhysReg;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000955 }
Andrew Trickb853e6c2010-12-09 18:15:21 +0000956
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000957 // Try to reassign interferences.
958 if (unsigned PhysReg = tryReassign(VirtReg, Order))
959 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000960
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000961 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
962
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000963 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000964 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
965 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000966 return PhysReg;
967
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000968 // Try to spill another interfering reg with less spill weight.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000969 PhysReg = trySpillInterferences(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000970 if (PhysReg)
971 return PhysReg;
972
973 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000974 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000975 SmallVector<LiveInterval*, 1> pendingSpills;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000976 spiller().spill(&VirtReg, NewVRegs, pendingSpills);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000977
978 // The live virtual register requesting allocation was spilled, so tell
979 // the caller not to allocate anything during this round.
980 return 0;
981}
982
983bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
984 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
985 << "********** Function: "
986 << ((Value*)mf.getFunction())->getName() << '\n');
987
988 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +0000989 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +0000990 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +0000991
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +0000992 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000993 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000994 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000995 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +0000996 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000997 Loops = &getAnalysis<MachineLoopInfo>();
998 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000999 Bundles = &getAnalysis<EdgeBundles>();
1000 SpillPlacer = &getAnalysis<SpillPlacement>();
1001
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001002 SA.reset(new SplitAnalysis(*MF, *LIS, *Loops));
1003
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001004 allocatePhysRegs();
1005 addMBBLiveIns(MF);
1006
1007 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001008 {
1009 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
1010 std::auto_ptr<VirtRegRewriter> rewriter(createVirtRegRewriter());
1011 rewriter->runOnMachineFunction(*MF, *VRM, LIS);
1012 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001013
1014 // The pass output is in VirtRegMap. Release all the transient data.
1015 releaseMemory();
1016
1017 return true;
1018}