blob: d970c7d16a7aca67bf1e1aac60423615f2565ac3 [file] [log] [blame]
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001//===-- RegAllocGreedy.cpp - greedy register allocator --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RAGreedy function pass for register allocation in
11// optimized builds.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "regalloc"
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +000016#include "AllocationOrder.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000017#include "LiveIntervalUnion.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000018#include "LiveRangeEdit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000019#include "RegAllocBase.h"
20#include "Spiller.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000021#include "SpillPlacement.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000022#include "SplitKit.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000023#include "VirtRegMap.h"
24#include "VirtRegRewriter.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000025#include "llvm/Analysis/AliasAnalysis.h"
26#include "llvm/Function.h"
27#include "llvm/PassAnalysisSupport.h"
28#include "llvm/CodeGen/CalcSpillWeights.h"
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000029#include "llvm/CodeGen/EdgeBundles.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000030#include "llvm/CodeGen/LiveIntervalAnalysis.h"
31#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000032#include "llvm/CodeGen/MachineDominators.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000033#include "llvm/CodeGen/MachineFunctionPass.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000034#include "llvm/CodeGen/MachineLoopInfo.h"
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000035#include "llvm/CodeGen/MachineLoopRanges.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000036#include "llvm/CodeGen/MachineRegisterInfo.h"
37#include "llvm/CodeGen/Passes.h"
38#include "llvm/CodeGen/RegAllocRegistry.h"
39#include "llvm/CodeGen/RegisterCoalescer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000040#include "llvm/Target/TargetOptions.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000041#include "llvm/Support/Debug.h"
42#include "llvm/Support/ErrorHandling.h"
43#include "llvm/Support/raw_ostream.h"
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +000044#include "llvm/Support/Timer.h"
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000045
46using namespace llvm;
47
48static RegisterRegAlloc greedyRegAlloc("greedy", "greedy register allocator",
49 createGreedyRegisterAllocator);
50
51namespace {
52class RAGreedy : public MachineFunctionPass, public RegAllocBase {
53 // context
54 MachineFunction *MF;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000055 BitVector ReservedRegs;
56
57 // analyses
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000058 SlotIndexes *Indexes;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000059 LiveStacks *LS;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000060 MachineDominatorTree *DomTree;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000061 MachineLoopInfo *Loops;
62 MachineLoopRanges *LoopRanges;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000063 EdgeBundles *Bundles;
64 SpillPlacement *SpillPlacer;
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +000065
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000066 // state
67 std::auto_ptr<Spiller> SpillerInstance;
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +000068 std::auto_ptr<SplitAnalysis> SA;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +000069
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000070 // splitting state.
71
72 /// All basic blocks where the current register is live.
73 SmallVector<SpillPlacement::BlockConstraint, 8> SpillConstraints;
74
75 /// Additional information about basic blocks where the current variable is
76 /// live. Such a block will look like one of these templates:
77 ///
78 /// 1. | o---x | Internal to block. Variable is only live in this block.
79 /// 2. |---x | Live-in, kill.
80 /// 3. | o---| Def, live-out.
81 /// 4. |---x o---| Live-in, kill, def, live-out.
82 /// 5. |---o---o---| Live-through with uses or defs.
83 /// 6. |-----------| Live-through without uses. Transparent.
84 ///
85 struct BlockInfo {
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +000086 MachineBasicBlock *MBB;
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +000087 SlotIndex FirstUse; ///< First instr using current reg.
88 SlotIndex LastUse; ///< Last instr using current reg.
89 SlotIndex Kill; ///< Interval end point inside block.
90 SlotIndex Def; ///< Interval start point inside block.
91 bool Uses; ///< Current reg has uses or defs in block.
92 bool LiveThrough; ///< Live in whole block (Templ 5. or 6. above).
93 bool LiveIn; ///< Current reg is live in.
94 bool LiveOut; ///< Current reg is live out.
95
96 // Per-interference pattern scratch data.
97 bool OverlapEntry; ///< Interference overlaps entering interval.
98 bool OverlapExit; ///< Interference overlaps exiting interval.
99 };
100
101 /// Basic blocks where var is live. This array is parallel to
102 /// SpillConstraints.
103 SmallVector<BlockInfo, 8> LiveBlocks;
104
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000105public:
106 RAGreedy();
107
108 /// Return the pass name.
109 virtual const char* getPassName() const {
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000110 return "Greedy Register Allocator";
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000111 }
112
113 /// RAGreedy analysis usage.
114 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
115
116 virtual void releaseMemory();
117
118 virtual Spiller &spiller() { return *SpillerInstance; }
119
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000120 virtual float getPriority(LiveInterval *LI);
Jakob Stoklund Olesend0bec3e2010-12-08 22:22:41 +0000121
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000122 virtual unsigned selectOrSplit(LiveInterval&,
123 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000124
125 /// Perform register allocation.
126 virtual bool runOnMachineFunction(MachineFunction &mf);
127
128 static char ID;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000129
130private:
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000131 bool checkUncachedInterference(LiveInterval&, unsigned);
132 LiveInterval *getSingleInterference(LiveInterval&, unsigned);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000133 bool reassignVReg(LiveInterval &InterferingVReg, unsigned OldPhysReg);
134 bool reassignInterferences(LiveInterval &VirtReg, unsigned PhysReg);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000135 float calcInterferenceWeight(LiveInterval&, unsigned);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000136 void calcLiveBlockInfo(LiveInterval&);
137 float calcInterferenceInfo(LiveInterval&, unsigned);
138 float calcGlobalSplitCost(const BitVector&);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000139 void splitAroundRegion(LiveInterval&, unsigned, const BitVector&,
140 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000141
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000142 unsigned tryReassign(LiveInterval&, AllocationOrder&);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000143 unsigned tryRegionSplit(LiveInterval&, AllocationOrder&,
144 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000145 unsigned trySplit(LiveInterval&, AllocationOrder&,
146 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000147 unsigned trySpillInterferences(LiveInterval&, AllocationOrder&,
148 SmallVectorImpl<LiveInterval*>&);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000149};
150} // end anonymous namespace
151
152char RAGreedy::ID = 0;
153
154FunctionPass* llvm::createGreedyRegisterAllocator() {
155 return new RAGreedy();
156}
157
158RAGreedy::RAGreedy(): MachineFunctionPass(ID) {
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000159 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000160 initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
161 initializeSlotIndexesPass(*PassRegistry::getPassRegistry());
162 initializeStrongPHIEliminationPass(*PassRegistry::getPassRegistry());
163 initializeRegisterCoalescerAnalysisGroup(*PassRegistry::getPassRegistry());
164 initializeCalculateSpillWeightsPass(*PassRegistry::getPassRegistry());
165 initializeLiveStacksPass(*PassRegistry::getPassRegistry());
166 initializeMachineDominatorTreePass(*PassRegistry::getPassRegistry());
167 initializeMachineLoopInfoPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000168 initializeMachineLoopRangesPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000169 initializeVirtRegMapPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000170 initializeEdgeBundlesPass(*PassRegistry::getPassRegistry());
171 initializeSpillPlacementPass(*PassRegistry::getPassRegistry());
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000172}
173
174void RAGreedy::getAnalysisUsage(AnalysisUsage &AU) const {
175 AU.setPreservesCFG();
176 AU.addRequired<AliasAnalysis>();
177 AU.addPreserved<AliasAnalysis>();
178 AU.addRequired<LiveIntervals>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000179 AU.addRequired<SlotIndexes>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000180 AU.addPreserved<SlotIndexes>();
181 if (StrongPHIElim)
182 AU.addRequiredID(StrongPHIEliminationID);
183 AU.addRequiredTransitive<RegisterCoalescer>();
184 AU.addRequired<CalculateSpillWeights>();
185 AU.addRequired<LiveStacks>();
186 AU.addPreserved<LiveStacks>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000187 AU.addRequired<MachineDominatorTree>();
188 AU.addPreserved<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000189 AU.addRequired<MachineLoopInfo>();
190 AU.addPreserved<MachineLoopInfo>();
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +0000191 AU.addRequired<MachineLoopRanges>();
192 AU.addPreserved<MachineLoopRanges>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000193 AU.addRequired<VirtRegMap>();
194 AU.addPreserved<VirtRegMap>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000195 AU.addRequired<EdgeBundles>();
196 AU.addRequired<SpillPlacement>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000197 MachineFunctionPass::getAnalysisUsage(AU);
198}
199
200void RAGreedy::releaseMemory() {
201 SpillerInstance.reset(0);
202 RegAllocBase::releaseMemory();
203}
204
Jakob Stoklund Olesen90c1d7d2010-12-08 22:57:16 +0000205float RAGreedy::getPriority(LiveInterval *LI) {
206 float Priority = LI->weight;
207
208 // Prioritize hinted registers so they are allocated first.
209 std::pair<unsigned, unsigned> Hint;
210 if (Hint.first || Hint.second) {
211 // The hint can be target specific, a virtual register, or a physreg.
212 Priority *= 2;
213
214 // Prefer physreg hints above anything else.
215 if (Hint.first == 0 && TargetRegisterInfo::isPhysicalRegister(Hint.second))
216 Priority *= 2;
217 }
218 return Priority;
219}
220
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000221
222//===----------------------------------------------------------------------===//
223// Register Reassignment
224//===----------------------------------------------------------------------===//
225
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000226// Check interference without using the cache.
227bool RAGreedy::checkUncachedInterference(LiveInterval &VirtReg,
228 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000229 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
230 LiveIntervalUnion::Query subQ(&VirtReg, &PhysReg2LiveUnion[*AliasI]);
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000231 if (subQ.checkInterference())
232 return true;
233 }
234 return false;
235}
236
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000237/// getSingleInterference - Return the single interfering virtual register
238/// assigned to PhysReg. Return 0 if more than one virtual register is
239/// interfering.
240LiveInterval *RAGreedy::getSingleInterference(LiveInterval &VirtReg,
241 unsigned PhysReg) {
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000242 // Check physreg and aliases.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000243 LiveInterval *Interference = 0;
Jakob Stoklund Olesen257c5562010-12-14 23:38:19 +0000244 for (const unsigned *AliasI = TRI->getOverlaps(PhysReg); *AliasI; ++AliasI) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000245 LiveIntervalUnion::Query &Q = query(VirtReg, *AliasI);
246 if (Q.checkInterference()) {
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000247 if (Interference)
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000248 return 0;
249 Q.collectInterferingVRegs(1);
Jakob Stoklund Olesend84de8c2010-12-14 17:47:36 +0000250 if (!Q.seenAllInterferences())
251 return 0;
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000252 Interference = Q.interferingVRegs().front();
253 }
254 }
255 return Interference;
256}
257
Andrew Trickb853e6c2010-12-09 18:15:21 +0000258// Attempt to reassign this virtual register to a different physical register.
259//
260// FIXME: we are not yet caching these "second-level" interferences discovered
261// in the sub-queries. These interferences can change with each call to
262// selectOrSplit. However, we could implement a "may-interfere" cache that
263// could be conservatively dirtied when we reassign or split.
264//
265// FIXME: This may result in a lot of alias queries. We could summarize alias
266// live intervals in their parent register's live union, but it's messy.
267bool RAGreedy::reassignVReg(LiveInterval &InterferingVReg,
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000268 unsigned WantedPhysReg) {
269 assert(TargetRegisterInfo::isVirtualRegister(InterferingVReg.reg) &&
270 "Can only reassign virtual registers");
271 assert(TRI->regsOverlap(WantedPhysReg, VRM->getPhys(InterferingVReg.reg)) &&
Andrew Trickb853e6c2010-12-09 18:15:21 +0000272 "inconsistent phys reg assigment");
273
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +0000274 AllocationOrder Order(InterferingVReg.reg, *VRM, ReservedRegs);
275 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000276 // Don't reassign to a WantedPhysReg alias.
277 if (TRI->regsOverlap(PhysReg, WantedPhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000278 continue;
279
Jakob Stoklund Olesen6ce219e2010-12-10 20:45:04 +0000280 if (checkUncachedInterference(InterferingVReg, PhysReg))
Andrew Trickb853e6c2010-12-09 18:15:21 +0000281 continue;
282
Andrew Trickb853e6c2010-12-09 18:15:21 +0000283 // Reassign the interfering virtual reg to this physical reg.
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000284 unsigned OldAssign = VRM->getPhys(InterferingVReg.reg);
285 DEBUG(dbgs() << "reassigning: " << InterferingVReg << " from " <<
286 TRI->getName(OldAssign) << " to " << TRI->getName(PhysReg) << '\n');
287 PhysReg2LiveUnion[OldAssign].extract(InterferingVReg);
Andrew Trickb853e6c2010-12-09 18:15:21 +0000288 VRM->clearVirt(InterferingVReg.reg);
289 VRM->assignVirt2Phys(InterferingVReg.reg, PhysReg);
290 PhysReg2LiveUnion[PhysReg].unify(InterferingVReg);
291
292 return true;
293 }
294 return false;
295}
296
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000297/// reassignInterferences - Reassign all interferences to different physical
298/// registers such that Virtreg can be assigned to PhysReg.
299/// Currently this only works with a single interference.
300/// @param VirtReg Currently unassigned virtual register.
301/// @param PhysReg Physical register to be cleared.
302/// @return True on success, false if nothing was changed.
Andrew Trickb853e6c2010-12-09 18:15:21 +0000303bool RAGreedy::reassignInterferences(LiveInterval &VirtReg, unsigned PhysReg) {
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000304 LiveInterval *InterferingVReg = getSingleInterference(VirtReg, PhysReg);
305 if (!InterferingVReg)
Andrew Trickb853e6c2010-12-09 18:15:21 +0000306 return false;
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000307 if (TargetRegisterInfo::isPhysicalRegister(InterferingVReg->reg))
308 return false;
309 return reassignVReg(*InterferingVReg, PhysReg);
310}
Andrew Trickb853e6c2010-12-09 18:15:21 +0000311
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000312/// tryReassign - Try to reassign interferences to different physregs.
313/// @param VirtReg Currently unassigned virtual register.
314/// @param Order Physregs to try.
315/// @return Physreg to assign VirtReg, or 0.
316unsigned RAGreedy::tryReassign(LiveInterval &VirtReg, AllocationOrder &Order) {
317 NamedRegionTimer T("Reassign", TimerGroupName, TimePassesIsEnabled);
318 Order.rewind();
319 while (unsigned PhysReg = Order.next())
320 if (reassignInterferences(VirtReg, PhysReg))
321 return PhysReg;
322 return 0;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000323}
324
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000325
326//===----------------------------------------------------------------------===//
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000327// Region Splitting
328//===----------------------------------------------------------------------===//
329
330/// calcLiveBlockInfo - Fill the LiveBlocks array with information about blocks
331/// where VirtReg is live.
332/// The SpillConstraints array is minimally initialized with MBB->getNumber().
333void RAGreedy::calcLiveBlockInfo(LiveInterval &VirtReg) {
334 LiveBlocks.clear();
335 SpillConstraints.clear();
336
337 assert(!VirtReg.empty() && "Cannot allocate an empty interval");
338 LiveInterval::const_iterator LVI = VirtReg.begin();
339 LiveInterval::const_iterator LVE = VirtReg.end();
340
341 SmallVectorImpl<SlotIndex>::const_iterator UseI, UseE;
342 UseI = SA->UseSlots.begin();
343 UseE = SA->UseSlots.end();
344
345 // Loop over basic blocks where VirtReg is live.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000346 MachineFunction::iterator MFI = Indexes->getMBBFromIndex(LVI->start);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000347 for (;;) {
348 // Block constraints depend on the interference pattern.
349 // Just allocate them here, don't compute anything.
350 SpillPlacement::BlockConstraint BC;
351 BC.Number = MFI->getNumber();
352 SpillConstraints.push_back(BC);
353
354 BlockInfo BI;
355 BI.MBB = MFI;
356 SlotIndex Start, Stop;
357 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
358
359 // LVI is the first live segment overlapping MBB.
360 BI.LiveIn = LVI->start <= Start;
361 if (!BI.LiveIn)
362 BI.Def = LVI->start;
363
364 // Find the first and last uses in the block.
365 BI.Uses = SA->hasUses(MFI);
366 if (BI.Uses && UseI != UseE) {
367 BI.FirstUse = *UseI;
368 assert(BI.FirstUse >= Start);
369 do ++UseI;
370 while (UseI != UseE && *UseI < Stop);
371 BI.LastUse = UseI[-1];
372 assert(BI.LastUse < Stop);
373 }
374
375 // Look for gaps in the live range.
376 bool hasGap = false;
377 BI.LiveOut = true;
378 while (LVI->end < Stop) {
379 SlotIndex LastStop = LVI->end;
380 if (++LVI == LVE || LVI->start >= Stop) {
381 BI.Kill = LastStop;
382 BI.LiveOut = false;
383 break;
384 }
385 if (LastStop < LVI->start) {
386 hasGap = true;
387 BI.Kill = LastStop;
388 BI.Def = LVI->start;
389 }
390 }
391
392 // Don't set LiveThrough when the block has a gap.
393 BI.LiveThrough = !hasGap && BI.LiveIn && BI.LiveOut;
394 LiveBlocks.push_back(BI);
395
396 // LVI is now at LVE or LVI->end >= Stop.
397 if (LVI == LVE)
398 break;
399
400 // Live segment ends exactly at Stop. Move to the next segment.
401 if (LVI->end == Stop && ++LVI == LVE)
402 break;
403
404 // Pick the next basic block.
405 if (LVI->start < Stop)
406 ++MFI;
407 else
408 MFI = Indexes->getMBBFromIndex(LVI->start);
409 }
410}
411
412/// calcInterferenceInfo - Compute per-block outgoing and ingoing constraints
413/// when considering interference from PhysReg. Also compute an optimistic local
414/// cost of this interference pattern.
415///
416/// The final cost of a split is the local cost + global cost of preferences
417/// broken by SpillPlacement.
418///
419float RAGreedy::calcInterferenceInfo(LiveInterval &VirtReg, unsigned PhysReg) {
420 // Reset interference dependent info.
421 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
422 BlockInfo &BI = LiveBlocks[i];
423 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
424 BC.Entry = (BI.Uses && BI.LiveIn) ?
425 SpillPlacement::PrefReg : SpillPlacement::DontCare;
426 BC.Exit = (BI.Uses && BI.LiveOut) ?
427 SpillPlacement::PrefReg : SpillPlacement::DontCare;
428 BI.OverlapEntry = BI.OverlapExit = false;
429 }
430
431 // Add interference info from each PhysReg alias.
432 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
433 if (!query(VirtReg, *AI).checkInterference())
434 continue;
435 DEBUG(PhysReg2LiveUnion[*AI].print(dbgs(), TRI));
436 LiveIntervalUnion::SegmentIter IntI =
437 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
438 if (!IntI.valid())
439 continue;
440
441 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
442 BlockInfo &BI = LiveBlocks[i];
443 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
444 SlotIndex Start, Stop;
445 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
446
447 // Skip interference-free blocks.
448 if (IntI.start() >= Stop)
449 continue;
450
451 // Handle transparent blocks with interference separately.
452 // Transparent blocks never incur any fixed cost.
453 if (BI.LiveThrough && !BI.Uses) {
454 // Check if interference is live-in - force spill.
455 if (BC.Entry != SpillPlacement::MustSpill) {
456 BC.Entry = SpillPlacement::PrefSpill;
457 IntI.advanceTo(Start);
458 if (IntI.valid() && IntI.start() <= Start)
459 BC.Entry = SpillPlacement::MustSpill;
460 }
461
462 // Check if interference is live-out - force spill.
463 if (BC.Exit != SpillPlacement::MustSpill) {
464 BC.Exit = SpillPlacement::PrefSpill;
465 IntI.advanceTo(Stop);
466 if (IntI.valid() && IntI.start() < Stop)
467 BC.Exit = SpillPlacement::MustSpill;
468 }
469
470 // Nothing more to do for this transparent block.
471 if (!IntI.valid())
472 break;
473 continue;
474 }
475
476 // Now we only have blocks with uses left.
477 // Check if the interference overlaps the uses.
478 assert(BI.Uses && "Non-transparent block without any uses");
479
480 // Check interference on entry.
481 if (BI.LiveIn && BC.Entry != SpillPlacement::MustSpill) {
482 IntI.advanceTo(Start);
483 if (!IntI.valid())
484 break;
485
486 // Interference is live-in - force spill.
487 if (IntI.start() <= Start)
488 BC.Entry = SpillPlacement::MustSpill;
489 // Not live in, but before the first use.
490 else if (IntI.start() < BI.FirstUse)
491 BC.Entry = SpillPlacement::PrefSpill;
492 }
493
494 // Does interference overlap the uses in the entry segment
495 // [FirstUse;Kill)?
496 if (BI.LiveIn && !BI.OverlapEntry) {
497 IntI.advanceTo(BI.FirstUse);
498 if (!IntI.valid())
499 break;
500 // A live-through interval has no kill.
501 // Check [FirstUse;LastUse) instead.
502 if (IntI.start() < (BI.LiveThrough ? BI.LastUse : BI.Kill))
503 BI.OverlapEntry = true;
504 }
505
506 // Does interference overlap the uses in the exit segment [Def;LastUse)?
507 if (BI.LiveOut && !BI.LiveThrough && !BI.OverlapExit) {
508 IntI.advanceTo(BI.Def);
509 if (!IntI.valid())
510 break;
511 if (IntI.start() < BI.LastUse)
512 BI.OverlapExit = true;
513 }
514
515 // Check interference on exit.
516 if (BI.LiveOut && BC.Exit != SpillPlacement::MustSpill) {
517 // Check interference between LastUse and Stop.
518 if (BC.Exit != SpillPlacement::PrefSpill) {
519 IntI.advanceTo(BI.LastUse);
520 if (!IntI.valid())
521 break;
522 if (IntI.start() < Stop)
523 BC.Exit = SpillPlacement::PrefSpill;
524 }
525 // Is the interference live-out?
526 IntI.advanceTo(Stop);
527 if (!IntI.valid())
528 break;
529 if (IntI.start() < Stop)
530 BC.Exit = SpillPlacement::MustSpill;
531 }
532 }
533 }
534
535 // Accumulate a local cost of this interference pattern.
536 float LocalCost = 0;
537 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
538 BlockInfo &BI = LiveBlocks[i];
539 if (!BI.Uses)
540 continue;
541 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
542 unsigned Inserts = 0;
543
544 // Do we need spill code for the entry segment?
545 if (BI.LiveIn)
546 Inserts += BI.OverlapEntry || BC.Entry != SpillPlacement::PrefReg;
547
548 // For the exit segment?
549 if (BI.LiveOut)
550 Inserts += BI.OverlapExit || BC.Exit != SpillPlacement::PrefReg;
551
552 // The local cost of spill code in this block is the block frequency times
553 // the number of spill instructions inserted.
554 if (Inserts)
555 LocalCost += Inserts * SpillPlacer->getBlockFrequency(BI.MBB);
556 }
557 DEBUG(dbgs() << "Local cost of " << PrintReg(PhysReg, TRI) << " = "
558 << LocalCost << '\n');
559 return LocalCost;
560}
561
562/// calcGlobalSplitCost - Return the global split cost of following the split
563/// pattern in LiveBundles. This cost should be added to the local cost of the
564/// interference pattern in SpillConstraints.
565///
566float RAGreedy::calcGlobalSplitCost(const BitVector &LiveBundles) {
567 float GlobalCost = 0;
568 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
569 SpillPlacement::BlockConstraint &BC = SpillConstraints[i];
570 unsigned Inserts = 0;
571 // Broken entry preference?
572 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 0)] !=
573 (BC.Entry == SpillPlacement::PrefReg);
574 // Broken exit preference?
575 Inserts += LiveBundles[Bundles->getBundle(BC.Number, 1)] !=
576 (BC.Exit == SpillPlacement::PrefReg);
577 if (Inserts)
578 GlobalCost += Inserts * SpillPlacer->getBlockFrequency(LiveBlocks[i].MBB);
579 }
580 DEBUG(dbgs() << "Global cost = " << GlobalCost << '\n');
581 return GlobalCost;
582}
583
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000584/// splitAroundRegion - Split VirtReg around the region determined by
585/// LiveBundles. Make an effort to avoid interference from PhysReg.
586///
587/// The 'register' interval is going to contain as many uses as possible while
588/// avoiding interference. The 'stack' interval is the complement constructed by
589/// SplitEditor. It will contain the rest.
590///
591void RAGreedy::splitAroundRegion(LiveInterval &VirtReg, unsigned PhysReg,
592 const BitVector &LiveBundles,
593 SmallVectorImpl<LiveInterval*> &NewVRegs) {
594 DEBUG({
595 dbgs() << "Splitting around region for " << PrintReg(PhysReg, TRI)
596 << " with bundles";
597 for (int i = LiveBundles.find_first(); i>=0; i = LiveBundles.find_next(i))
598 dbgs() << " EB#" << i;
599 dbgs() << ".\n";
600 });
601
602 // First compute interference ranges in the live blocks.
603 typedef std::pair<SlotIndex, SlotIndex> IndexPair;
604 SmallVector<IndexPair, 8> InterferenceRanges;
605 InterferenceRanges.resize(LiveBlocks.size());
606 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
607 if (!query(VirtReg, *AI).checkInterference())
608 continue;
609 LiveIntervalUnion::SegmentIter IntI =
610 PhysReg2LiveUnion[*AI].find(VirtReg.beginIndex());
611 if (!IntI.valid())
612 continue;
613 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
614 BlockInfo &BI = LiveBlocks[i];
615 if (!BI.Uses)
616 continue;
617 IndexPair &IP = InterferenceRanges[i];
618 SlotIndex Start, Stop;
619 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
620 // Skip interference-free blocks.
621 if (IntI.start() >= Stop)
622 continue;
623
624 // First interference in block.
625 if (BI.LiveIn) {
626 IntI.advanceTo(Start);
627 if (!IntI.valid())
628 break;
629 if (!IP.first.isValid() || IntI.start() < IP.first)
630 IP.first = IntI.start();
631 }
632
633 // Last interference in block.
634 if (BI.LiveOut) {
635 IntI.advanceTo(Stop);
636 if (!IntI.valid() || IntI.start() >= Stop)
637 --IntI;
638 if (!IP.second.isValid() || IntI.stop() > IP.second)
639 IP.second = IntI.stop();
640 }
641 }
642 }
643
644 SmallVector<LiveInterval*, 4> SpillRegs;
645 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
646 SplitEditor SE(*SA, *LIS, *VRM, *DomTree, LREdit);
647
648 // Create the main cross-block interval.
649 SE.openIntv();
650
651 // First add all defs that are live out of a block.
652 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
653 BlockInfo &BI = LiveBlocks[i];
654 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
655 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
656
657 // Should the register be live out?
658 if (!BI.LiveOut || !RegOut)
659 continue;
660
661 IndexPair &IP = InterferenceRanges[i];
662 SlotIndex Start, Stop;
663 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
664
665 DEBUG(dbgs() << "BB#" << BI.MBB->getNumber() << " -> EB#"
666 << Bundles->getBundle(BI.MBB->getNumber(), 1));
667
668 // Check interference leaving the block.
669 if (!IP.second.isValid() || IP.second < Start) {
670 // Block is interference-free.
671 DEBUG(dbgs() << ", no interference");
672 if (!BI.Uses) {
673 assert(BI.LiveThrough && "No uses, but not live through block?");
674 // Block is live-through without interference.
675 DEBUG(dbgs() << ", no uses"
676 << (RegIn ? ", live-through.\n" : ", stack in.\n"));
677 if (!RegIn)
678 SE.enterIntvAtEnd(*BI.MBB);
679 continue;
680 }
681 if (!BI.LiveThrough) {
682 DEBUG(dbgs() << ", not live-through.\n");
683 SE.enterIntvBefore(BI.Def);
Eric Christopher0f438112011-02-03 06:18:29 +0000684 SE.useIntv(BI.Def, Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000685 continue;
686 }
687 if (!RegIn) {
688 // Block is live-through, but entry bundle is on the stack.
689 // Reload just before the first use.
690 DEBUG(dbgs() << ", not live-in, enter before first use.\n");
691 SE.enterIntvBefore(BI.FirstUse);
Eric Christopher0f438112011-02-03 06:18:29 +0000692 SE.useIntv(BI.FirstUse, Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000693 continue;
694 }
695 DEBUG(dbgs() << ", live-through.\n");
696 continue;
697 }
698
699 // Block has interference.
700 DEBUG(dbgs() << ", interference to " << IP.second);
701 if (!BI.Uses) {
702 // No uses in block, avoid interference by reloading as late as possible.
703 DEBUG(dbgs() << ", no uses.\n");
704 SE.enterIntvAtEnd(*BI.MBB);
705 continue;
706 }
707 if (IP.second < BI.LastUse) {
708 // There are interference-free uses at the end of the block.
709 // Find the first use that can get the live-out register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000710 SmallVectorImpl<SlotIndex>::const_iterator UI =
711 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(), IP.second);
712 assert(UI != SA->UseSlots.end() && "Couldn't find last use");
713 SlotIndex Use = *UI;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000714 DEBUG(dbgs() << ", free use at " << Use << ".\n");
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000715 assert(Use <= BI.LastUse && "Couldn't find last use");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000716 SE.enterIntvBefore(Use);
Eric Christopher0f438112011-02-03 06:18:29 +0000717 SE.useIntv(Use, Stop);
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000718 continue;
719 }
720
721 // Interference is after the last use.
722 DEBUG(dbgs() << " after last use.\n");
723 SE.enterIntvAtEnd(*BI.MBB);
724 }
725
726 // Now all defs leading to live bundles are handled, do everything else.
727 for (unsigned i = 0, e = LiveBlocks.size(); i != e; ++i) {
728 BlockInfo &BI = LiveBlocks[i];
729 bool RegIn = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 0)];
730 bool RegOut = LiveBundles[Bundles->getBundle(BI.MBB->getNumber(), 1)];
731
732 // Is the register live-in?
733 if (!BI.LiveIn || !RegIn)
734 continue;
735
736 // We have an incoming register. Check for interference.
737 IndexPair &IP = InterferenceRanges[i];
738 SlotIndex Start, Stop;
739 tie(Start, Stop) = Indexes->getMBBRange(BI.MBB);
740
741 DEBUG(dbgs() << "EB#" << Bundles->getBundle(BI.MBB->getNumber(), 0)
742 << " -> BB#" << BI.MBB->getNumber());
743
744 // Check interference entering the block.
745 if (!IP.first.isValid() || IP.first > Stop) {
746 // Block is interference-free.
747 DEBUG(dbgs() << ", no interference");
748 if (!BI.Uses) {
749 assert(BI.LiveThrough && "No uses, but not live through block?");
750 // Block is live-through without interference.
751 if (RegOut) {
752 DEBUG(dbgs() << ", no uses, live-through.\n");
753 SE.useIntv(Start, Stop);
754 } else {
755 DEBUG(dbgs() << ", no uses, stack-out.\n");
756 SE.leaveIntvAtTop(*BI.MBB);
757 }
758 continue;
759 }
760 if (!BI.LiveThrough) {
761 DEBUG(dbgs() << ", killed in block.\n");
Jakob Stoklund Olesen92e69392011-01-19 23:14:56 +0000762 SE.useIntv(Start, BI.Kill.getBoundaryIndex());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000763 SE.leaveIntvAfter(BI.Kill);
764 continue;
765 }
766 if (!RegOut) {
767 // Block is live-through, but exit bundle is on the stack.
768 // Spill immediately after the last use.
769 DEBUG(dbgs() << ", uses, stack-out.\n");
Jakob Stoklund Olesen92e69392011-01-19 23:14:56 +0000770 SE.useIntv(Start, BI.LastUse.getBoundaryIndex());
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000771 SE.leaveIntvAfter(BI.LastUse);
772 continue;
773 }
774 // Register is live-through.
775 DEBUG(dbgs() << ", uses, live-through.\n");
776 SE.useIntv(Start, Stop);
777 continue;
778 }
779
780 // Block has interference.
781 DEBUG(dbgs() << ", interference from " << IP.first);
782 if (!BI.Uses) {
783 // No uses in block, avoid interference by spilling as soon as possible.
784 DEBUG(dbgs() << ", no uses.\n");
785 SE.leaveIntvAtTop(*BI.MBB);
786 continue;
787 }
788 if (IP.first > BI.FirstUse) {
789 // There are interference-free uses at the beginning of the block.
790 // Find the last use that can get the register.
Jakob Stoklund Olesenc0de9952011-01-20 17:45:23 +0000791 SmallVectorImpl<SlotIndex>::const_iterator UI =
792 std::lower_bound(SA->UseSlots.begin(), SA->UseSlots.end(), IP.first);
793 assert(UI != SA->UseSlots.begin() && "Couldn't find first use");
794 SlotIndex Use = (--UI)->getBoundaryIndex();
795 DEBUG(dbgs() << ", free use at " << *UI << ".\n");
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000796 assert(Use >= BI.FirstUse && Use < IP.first);
797 SE.useIntv(Start, Use);
798 SE.leaveIntvAfter(Use);
799 continue;
800 }
801
802 // Interference is before the first use.
803 DEBUG(dbgs() << " before first use.\n");
804 SE.leaveIntvAtTop(*BI.MBB);
805 }
806
807 SE.closeIntv();
808
809 // FIXME: Should we be more aggressive about splitting the stack region into
810 // per-block segments? The current approach allows the stack region to
811 // separate into connected components. Some components may be allocatable.
812 SE.finish();
813
814 if (VerifyEnabled)
815 MF->verify(this, "After splitting live range around region");
816}
817
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000818unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
819 SmallVectorImpl<LiveInterval*> &NewVRegs) {
820 calcLiveBlockInfo(VirtReg);
821 BitVector LiveBundles, BestBundles;
822 float BestCost = 0;
823 unsigned BestReg = 0;
824 Order.rewind();
825 while (unsigned PhysReg = Order.next()) {
826 float Cost = calcInterferenceInfo(VirtReg, PhysReg);
827 if (BestReg && Cost >= BestCost)
828 continue;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000829
830 SpillPlacer->placeSpills(SpillConstraints, LiveBundles);
831 // No live bundles, defer to splitSingleBlocks().
832 if (!LiveBundles.any())
833 continue;
834
835 Cost += calcGlobalSplitCost(LiveBundles);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000836 if (!BestReg || Cost < BestCost) {
837 BestReg = PhysReg;
838 BestCost = Cost;
839 BestBundles.swap(LiveBundles);
840 }
841 }
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000842
843 if (!BestReg)
844 return 0;
845
846 splitAroundRegion(VirtReg, BestReg, BestBundles, NewVRegs);
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000847 return 0;
848}
849
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000850
851//===----------------------------------------------------------------------===//
852// Live Range Splitting
853//===----------------------------------------------------------------------===//
854
855/// trySplit - Try to split VirtReg or one of its interferences, making it
856/// assignable.
857/// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
858unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
859 SmallVectorImpl<LiveInterval*>&NewVRegs) {
860 NamedRegionTimer T("Splitter", TimerGroupName, TimePassesIsEnabled);
861 SA->analyze(&VirtReg);
862
863 // Don't attempt splitting on local intervals for now. TBD.
864 if (LIS->intervalIsInOneMBB(VirtReg))
865 return 0;
866
867 // First try to split around a region spanning multiple blocks.
868 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
869 if (PhysReg || !NewVRegs.empty())
870 return PhysReg;
871
872 // Then isolate blocks with multiple uses.
873 SplitAnalysis::BlockPtrSet Blocks;
874 if (SA->getMultiUseBlocks(Blocks)) {
875 SmallVector<LiveInterval*, 4> SpillRegs;
876 LiveRangeEdit LREdit(VirtReg, NewVRegs, SpillRegs);
877 SplitEditor(*SA, *LIS, *VRM, *DomTree, LREdit).splitSingleBlocks(Blocks);
878 }
879
880 // Don't assign any physregs.
881 return 0;
882}
883
884
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000885//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000886// Spilling
887//===----------------------------------------------------------------------===//
888
889/// calcInterferenceWeight - Calculate the combined spill weight of
890/// interferences when assigning VirtReg to PhysReg.
891float RAGreedy::calcInterferenceWeight(LiveInterval &VirtReg, unsigned PhysReg){
892 float Sum = 0;
893 for (const unsigned *AI = TRI->getOverlaps(PhysReg); *AI; ++AI) {
894 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
895 Q.collectInterferingVRegs();
896 if (Q.seenUnspillableVReg())
897 return HUGE_VALF;
898 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i)
899 Sum += Q.interferingVRegs()[i]->weight;
900 }
901 return Sum;
902}
903
904/// trySpillInterferences - Try to spill interfering registers instead of the
905/// current one. Only do it if the accumulated spill weight is smaller than the
906/// current spill weight.
907unsigned RAGreedy::trySpillInterferences(LiveInterval &VirtReg,
908 AllocationOrder &Order,
909 SmallVectorImpl<LiveInterval*> &NewVRegs) {
910 NamedRegionTimer T("Spill Interference", TimerGroupName, TimePassesIsEnabled);
911 unsigned BestPhys = 0;
Duncan Sands2aea4902010-12-28 10:07:15 +0000912 float BestWeight = 0;
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000913
914 Order.rewind();
915 while (unsigned PhysReg = Order.next()) {
916 float Weight = calcInterferenceWeight(VirtReg, PhysReg);
917 if (Weight == HUGE_VALF || Weight >= VirtReg.weight)
918 continue;
919 if (!BestPhys || Weight < BestWeight)
920 BestPhys = PhysReg, BestWeight = Weight;
921 }
922
923 // No candidates found.
924 if (!BestPhys)
925 return 0;
926
927 // Collect all interfering registers.
928 SmallVector<LiveInterval*, 8> Spills;
929 for (const unsigned *AI = TRI->getOverlaps(BestPhys); *AI; ++AI) {
930 LiveIntervalUnion::Query &Q = query(VirtReg, *AI);
931 Spills.append(Q.interferingVRegs().begin(), Q.interferingVRegs().end());
932 for (unsigned i = 0, e = Q.interferingVRegs().size(); i != e; ++i) {
933 LiveInterval *VReg = Q.interferingVRegs()[i];
934 PhysReg2LiveUnion[*AI].extract(*VReg);
935 VRM->clearVirt(VReg->reg);
936 }
937 }
938
939 // Spill them all.
940 DEBUG(dbgs() << "spilling " << Spills.size() << " interferences with weight "
941 << BestWeight << '\n');
942 for (unsigned i = 0, e = Spills.size(); i != e; ++i)
943 spiller().spill(Spills[i], NewVRegs, Spills);
944 return BestPhys;
945}
946
947
948//===----------------------------------------------------------------------===//
949// Main Entry Point
950//===----------------------------------------------------------------------===//
951
952unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000953 SmallVectorImpl<LiveInterval*> &NewVRegs) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000954 // First try assigning a free register.
Jakob Stoklund Olesendd479e92010-12-10 22:21:05 +0000955 AllocationOrder Order(VirtReg.reg, *VRM, ReservedRegs);
956 while (unsigned PhysReg = Order.next()) {
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000957 if (!checkPhysRegInterference(VirtReg, PhysReg))
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000958 return PhysReg;
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000959 }
Andrew Trickb853e6c2010-12-09 18:15:21 +0000960
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000961 // Try to reassign interferences.
962 if (unsigned PhysReg = tryReassign(VirtReg, Order))
963 return PhysReg;
Andrew Trickb853e6c2010-12-09 18:15:21 +0000964
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000965 assert(NewVRegs.empty() && "Cannot append to existing NewVRegs");
966
Jakob Stoklund Olesen46c83c82010-12-14 00:37:49 +0000967 // Try splitting VirtReg or interferences.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000968 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
969 if (PhysReg || !NewVRegs.empty())
Jakob Stoklund Olesenb64d92e2010-12-14 00:37:44 +0000970 return PhysReg;
971
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000972 // Try to spill another interfering reg with less spill weight.
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000973 PhysReg = trySpillInterferences(VirtReg, Order, NewVRegs);
Jakob Stoklund Olesen770d42d2010-12-22 22:01:30 +0000974 if (PhysReg)
975 return PhysReg;
976
977 // Finally spill VirtReg itself.
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +0000978 NamedRegionTimer T("Spiller", TimerGroupName, TimePassesIsEnabled);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000979 SmallVector<LiveInterval*, 1> pendingSpills;
Jakob Stoklund Olesenccdb3fc2011-01-19 22:11:48 +0000980 spiller().spill(&VirtReg, NewVRegs, pendingSpills);
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000981
982 // The live virtual register requesting allocation was spilled, so tell
983 // the caller not to allocate anything during this round.
984 return 0;
985}
986
987bool RAGreedy::runOnMachineFunction(MachineFunction &mf) {
988 DEBUG(dbgs() << "********** GREEDY REGISTER ALLOCATION **********\n"
989 << "********** Function: "
990 << ((Value*)mf.getFunction())->getName() << '\n');
991
992 MF = &mf;
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +0000993 if (VerifyEnabled)
Jakob Stoklund Olesen89cab932010-12-18 00:06:56 +0000994 MF->verify(this, "Before greedy register allocator");
Jakob Stoklund Olesenaf249642010-12-17 23:16:35 +0000995
Jakob Stoklund Olesen4680dec2010-12-10 23:49:00 +0000996 RegAllocBase::init(getAnalysis<VirtRegMap>(), getAnalysis<LiveIntervals>());
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +0000997 Indexes = &getAnalysis<SlotIndexes>();
Jakob Stoklund Olesenf428eb62010-12-17 23:16:32 +0000998 DomTree = &getAnalysis<MachineDominatorTree>();
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +0000999 ReservedRegs = TRI->getReservedRegs(*MF);
Jakob Stoklund Olesenf6dff842010-12-10 22:54:44 +00001000 SpillerInstance.reset(createInlineSpiller(*this, *MF, *VRM));
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001001 Loops = &getAnalysis<MachineLoopInfo>();
1002 LoopRanges = &getAnalysis<MachineLoopRanges>();
Jakob Stoklund Olesenb5fa9332011-01-18 21:13:27 +00001003 Bundles = &getAnalysis<EdgeBundles>();
1004 SpillPlacer = &getAnalysis<SpillPlacement>();
1005
Jakob Stoklund Olesend0bb5e22010-12-15 23:46:13 +00001006 SA.reset(new SplitAnalysis(*MF, *LIS, *Loops));
1007
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001008 allocatePhysRegs();
1009 addMBBLiveIns(MF);
1010
1011 // Run rewriter
Jakob Stoklund Olesen533f58e2010-12-11 00:19:56 +00001012 {
1013 NamedRegionTimer T("Rewriter", TimerGroupName, TimePassesIsEnabled);
1014 std::auto_ptr<VirtRegRewriter> rewriter(createVirtRegRewriter());
1015 rewriter->runOnMachineFunction(*MF, *VRM, LIS);
1016 }
Jakob Stoklund Olesencba2e062010-12-08 03:26:16 +00001017
1018 // The pass output is in VirtRegMap. Release all the transient data.
1019 releaseMemory();
1020
1021 return true;
1022}