blob: f08bc20d954c0b52a50660f5fa1bd45aabb5a6d5 [file] [log] [blame]
Evan Cheng09e8ca82008-10-20 21:44:59 +00001//===-- PreAllocSplitting.cpp - Pre-allocation Interval Spltting Pass. ----===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the machine instruction level pre-register allocation
11// live interval splitting pass. It finds live interval barriers, i.e.
12// instructions which will kill all physical registers in certain register
13// classes, and split all live intervals which cross the barrier.
14//
15//===----------------------------------------------------------------------===//
16
17#define DEBUG_TYPE "pre-alloc-split"
18#include "llvm/CodeGen/LiveIntervalAnalysis.h"
Evan Chengd0e32c52008-10-29 05:06:14 +000019#include "llvm/CodeGen/LiveStackAnalysis.h"
Owen Andersonf1f75b12008-11-04 22:22:41 +000020#include "llvm/CodeGen/MachineDominators.h"
Evan Chengf5cd4f02008-10-23 20:43:13 +000021#include "llvm/CodeGen/MachineFrameInfo.h"
Evan Cheng09e8ca82008-10-20 21:44:59 +000022#include "llvm/CodeGen/MachineFunctionPass.h"
23#include "llvm/CodeGen/MachineLoopInfo.h"
24#include "llvm/CodeGen/MachineRegisterInfo.h"
25#include "llvm/CodeGen/Passes.h"
26#include "llvm/CodeGen/RegisterCoalescer.h"
Evan Chengf5cd4f02008-10-23 20:43:13 +000027#include "llvm/Target/TargetInstrInfo.h"
Evan Cheng09e8ca82008-10-20 21:44:59 +000028#include "llvm/Target/TargetMachine.h"
29#include "llvm/Target/TargetOptions.h"
30#include "llvm/Target/TargetRegisterInfo.h"
31#include "llvm/Support/CommandLine.h"
32#include "llvm/Support/Debug.h"
Evan Chengd0e32c52008-10-29 05:06:14 +000033#include "llvm/ADT/DenseMap.h"
Evan Cheng54898932008-10-29 08:39:34 +000034#include "llvm/ADT/DepthFirstIterator.h"
Evan Cheng09e8ca82008-10-20 21:44:59 +000035#include "llvm/ADT/SmallPtrSet.h"
Evan Chengf5cd4f02008-10-23 20:43:13 +000036#include "llvm/ADT/Statistic.h"
Evan Cheng09e8ca82008-10-20 21:44:59 +000037using namespace llvm;
38
Evan Chengae7fa5b2008-10-28 01:48:24 +000039static cl::opt<int> PreSplitLimit("pre-split-limit", cl::init(-1), cl::Hidden);
Owen Anderson45e68552009-01-29 05:28:55 +000040static cl::opt<int> DeadSplitLimit("dead-split-limit", cl::init(-1), cl::Hidden);
Evan Chengae7fa5b2008-10-28 01:48:24 +000041
Owen Anderson45e68552009-01-29 05:28:55 +000042STATISTIC(NumSplits, "Number of intervals split");
Owen Anderson75fa96b2008-11-19 04:28:29 +000043STATISTIC(NumRemats, "Number of intervals split by rematerialization");
Owen Anderson7b9d67c2008-12-02 18:53:47 +000044STATISTIC(NumFolds, "Number of intervals split with spill folding");
Owen Anderson2ebf63f2008-12-18 01:27:19 +000045STATISTIC(NumRenumbers, "Number of intervals renumbered into new registers");
Owen Anderson956ec272009-01-23 00:23:32 +000046STATISTIC(NumDeadSpills, "Number of dead spills removed");
Evan Chengf5cd4f02008-10-23 20:43:13 +000047
Evan Cheng09e8ca82008-10-20 21:44:59 +000048namespace {
49 class VISIBILITY_HIDDEN PreAllocSplitting : public MachineFunctionPass {
Evan Chengd0e32c52008-10-29 05:06:14 +000050 MachineFunction *CurrMF;
Evan Chengf5cd4f02008-10-23 20:43:13 +000051 const TargetMachine *TM;
52 const TargetInstrInfo *TII;
Owen Anderson3ef45492009-01-29 22:13:06 +000053 const TargetRegisterInfo* TRI;
Evan Chengf5cd4f02008-10-23 20:43:13 +000054 MachineFrameInfo *MFI;
55 MachineRegisterInfo *MRI;
56 LiveIntervals *LIs;
Evan Chengd0e32c52008-10-29 05:06:14 +000057 LiveStacks *LSs;
Evan Cheng09e8ca82008-10-20 21:44:59 +000058
Evan Chengf5cd4f02008-10-23 20:43:13 +000059 // Barrier - Current barrier being processed.
60 MachineInstr *Barrier;
61
62 // BarrierMBB - Basic block where the barrier resides in.
63 MachineBasicBlock *BarrierMBB;
64
65 // Barrier - Current barrier index.
66 unsigned BarrierIdx;
67
68 // CurrLI - Current live interval being split.
69 LiveInterval *CurrLI;
70
Evan Chengd0e32c52008-10-29 05:06:14 +000071 // CurrSLI - Current stack slot live interval.
72 LiveInterval *CurrSLI;
73
74 // CurrSValNo - Current val# for the stack slot live interval.
75 VNInfo *CurrSValNo;
76
77 // IntervalSSMap - A map from live interval to spill slots.
78 DenseMap<unsigned, int> IntervalSSMap;
Evan Chengf5cd4f02008-10-23 20:43:13 +000079
Evan Cheng54898932008-10-29 08:39:34 +000080 // Def2SpillMap - A map from a def instruction index to spill index.
81 DenseMap<unsigned, unsigned> Def2SpillMap;
Evan Cheng06587492008-10-24 02:05:00 +000082
Evan Cheng09e8ca82008-10-20 21:44:59 +000083 public:
84 static char ID;
85 PreAllocSplitting() : MachineFunctionPass(&ID) {}
86
87 virtual bool runOnMachineFunction(MachineFunction &MF);
88
89 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
90 AU.addRequired<LiveIntervals>();
91 AU.addPreserved<LiveIntervals>();
Evan Chengd0e32c52008-10-29 05:06:14 +000092 AU.addRequired<LiveStacks>();
93 AU.addPreserved<LiveStacks>();
Evan Cheng09e8ca82008-10-20 21:44:59 +000094 AU.addPreserved<RegisterCoalescer>();
95 if (StrongPHIElim)
96 AU.addPreservedID(StrongPHIEliminationID);
97 else
98 AU.addPreservedID(PHIEliminationID);
Owen Andersonf1f75b12008-11-04 22:22:41 +000099 AU.addRequired<MachineDominatorTree>();
100 AU.addRequired<MachineLoopInfo>();
101 AU.addPreserved<MachineDominatorTree>();
102 AU.addPreserved<MachineLoopInfo>();
Evan Cheng09e8ca82008-10-20 21:44:59 +0000103 MachineFunctionPass::getAnalysisUsage(AU);
104 }
105
106 virtual void releaseMemory() {
Evan Chengd0e32c52008-10-29 05:06:14 +0000107 IntervalSSMap.clear();
Evan Cheng54898932008-10-29 08:39:34 +0000108 Def2SpillMap.clear();
Evan Cheng09e8ca82008-10-20 21:44:59 +0000109 }
110
111 virtual const char *getPassName() const {
112 return "Pre-Register Allocaton Live Interval Splitting";
113 }
Evan Chengf5cd4f02008-10-23 20:43:13 +0000114
115 /// print - Implement the dump method.
116 virtual void print(std::ostream &O, const Module* M = 0) const {
117 LIs->print(O, M);
118 }
119
120 void print(std::ostream *O, const Module* M = 0) const {
121 if (O) print(*O, M);
122 }
123
124 private:
125 MachineBasicBlock::iterator
126 findNextEmptySlot(MachineBasicBlock*, MachineInstr*,
127 unsigned&);
128
129 MachineBasicBlock::iterator
Evan Cheng1f08cc22008-10-28 05:28:21 +0000130 findSpillPoint(MachineBasicBlock*, MachineInstr*, MachineInstr*,
Evan Chengf5cd4f02008-10-23 20:43:13 +0000131 SmallPtrSet<MachineInstr*, 4>&, unsigned&);
132
133 MachineBasicBlock::iterator
Evan Chengf62ce372008-10-28 00:47:49 +0000134 findRestorePoint(MachineBasicBlock*, MachineInstr*, unsigned,
Evan Chengf5cd4f02008-10-23 20:43:13 +0000135 SmallPtrSet<MachineInstr*, 4>&, unsigned&);
136
Evan Chengd0e32c52008-10-29 05:06:14 +0000137 int CreateSpillStackSlot(unsigned, const TargetRegisterClass *);
Evan Chengf5cd4f02008-10-23 20:43:13 +0000138
Evan Cheng54898932008-10-29 08:39:34 +0000139 bool IsAvailableInStack(MachineBasicBlock*, unsigned, unsigned, unsigned,
140 unsigned&, int&) const;
Evan Chengf5cd4f02008-10-23 20:43:13 +0000141
Evan Chengd0e32c52008-10-29 05:06:14 +0000142 void UpdateSpillSlotInterval(VNInfo*, unsigned, unsigned);
Evan Chengf5cd4f02008-10-23 20:43:13 +0000143
Evan Chengf5cd4f02008-10-23 20:43:13 +0000144 bool SplitRegLiveInterval(LiveInterval*);
145
Owen Anderson956ec272009-01-23 00:23:32 +0000146 bool SplitRegLiveIntervals(const TargetRegisterClass **,
147 SmallPtrSet<LiveInterval*, 8>&);
Owen Andersonf1f75b12008-11-04 22:22:41 +0000148
149 bool createsNewJoin(LiveRange* LR, MachineBasicBlock* DefMBB,
150 MachineBasicBlock* BarrierMBB);
Owen Anderson75fa96b2008-11-19 04:28:29 +0000151 bool Rematerialize(unsigned vreg, VNInfo* ValNo,
152 MachineInstr* DefMI,
153 MachineBasicBlock::iterator RestorePt,
154 unsigned RestoreIdx,
155 SmallPtrSet<MachineInstr*, 4>& RefsInMBB);
Owen Anderson7b9d67c2008-12-02 18:53:47 +0000156 MachineInstr* FoldSpill(unsigned vreg, const TargetRegisterClass* RC,
157 MachineInstr* DefMI,
158 MachineInstr* Barrier,
159 MachineBasicBlock* MBB,
160 int& SS,
161 SmallPtrSet<MachineInstr*, 4>& RefsInMBB);
Owen Andersonc93023a2009-03-04 08:52:31 +0000162 MachineInstr* FoldRestore(unsigned vreg,
163 const TargetRegisterClass* RC,
164 MachineInstr* Barrier,
165 MachineBasicBlock* MBB,
166 int SS,
167 SmallPtrSet<MachineInstr*, 4>& RefsInMBB);
Owen Andersond0b6a0d2008-12-16 21:35:08 +0000168 void RenumberValno(VNInfo* VN);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000169 void ReconstructLiveInterval(LiveInterval* LI);
Owen Anderson956ec272009-01-23 00:23:32 +0000170 bool removeDeadSpills(SmallPtrSet<LiveInterval*, 8>& split);
Owen Anderson45e68552009-01-29 05:28:55 +0000171 unsigned getNumberOfNonSpills(SmallPtrSet<MachineInstr*, 4>& MIs,
172 unsigned Reg, int FrameIndex, bool& TwoAddr);
Evan Cheng19a72582009-02-02 18:33:18 +0000173 VNInfo* PerformPHIConstruction(MachineBasicBlock::iterator Use,
174 MachineBasicBlock* MBB, LiveInterval* LI,
Owen Anderson200ee7f2009-01-06 07:53:32 +0000175 SmallPtrSet<MachineInstr*, 4>& Visited,
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000176 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Defs,
177 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Uses,
178 DenseMap<MachineInstr*, VNInfo*>& NewVNs,
Owen Anderson7d211e22008-12-31 02:00:25 +0000179 DenseMap<MachineBasicBlock*, VNInfo*>& LiveOut,
180 DenseMap<MachineBasicBlock*, VNInfo*>& Phis,
Evan Cheng19a72582009-02-02 18:33:18 +0000181 bool IsTopLevel, bool IsIntraBlock);
182 VNInfo* PerformPHIConstructionFallBack(MachineBasicBlock::iterator Use,
183 MachineBasicBlock* MBB, LiveInterval* LI,
184 SmallPtrSet<MachineInstr*, 4>& Visited,
185 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Defs,
186 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Uses,
187 DenseMap<MachineInstr*, VNInfo*>& NewVNs,
188 DenseMap<MachineBasicBlock*, VNInfo*>& LiveOut,
189 DenseMap<MachineBasicBlock*, VNInfo*>& Phis,
190 bool IsTopLevel, bool IsIntraBlock);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000191};
Evan Cheng09e8ca82008-10-20 21:44:59 +0000192} // end anonymous namespace
193
194char PreAllocSplitting::ID = 0;
195
196static RegisterPass<PreAllocSplitting>
197X("pre-alloc-splitting", "Pre-Register Allocation Live Interval Splitting");
198
199const PassInfo *const llvm::PreAllocSplittingID = &X;
200
Evan Chengf5cd4f02008-10-23 20:43:13 +0000201
202/// findNextEmptySlot - Find a gap after the given machine instruction in the
203/// instruction index map. If there isn't one, return end().
204MachineBasicBlock::iterator
205PreAllocSplitting::findNextEmptySlot(MachineBasicBlock *MBB, MachineInstr *MI,
206 unsigned &SpotIndex) {
207 MachineBasicBlock::iterator MII = MI;
208 if (++MII != MBB->end()) {
209 unsigned Index = LIs->findGapBeforeInstr(LIs->getInstructionIndex(MII));
210 if (Index) {
211 SpotIndex = Index;
212 return MII;
213 }
214 }
215 return MBB->end();
216}
217
218/// findSpillPoint - Find a gap as far away from the given MI that's suitable
219/// for spilling the current live interval. The index must be before any
220/// defs and uses of the live interval register in the mbb. Return begin() if
221/// none is found.
222MachineBasicBlock::iterator
223PreAllocSplitting::findSpillPoint(MachineBasicBlock *MBB, MachineInstr *MI,
Evan Cheng1f08cc22008-10-28 05:28:21 +0000224 MachineInstr *DefMI,
Evan Chengf5cd4f02008-10-23 20:43:13 +0000225 SmallPtrSet<MachineInstr*, 4> &RefsInMBB,
226 unsigned &SpillIndex) {
227 MachineBasicBlock::iterator Pt = MBB->begin();
228
229 // Go top down if RefsInMBB is empty.
Evan Cheng1f08cc22008-10-28 05:28:21 +0000230 if (RefsInMBB.empty() && !DefMI) {
Evan Chengf5cd4f02008-10-23 20:43:13 +0000231 MachineBasicBlock::iterator MII = MBB->begin();
232 MachineBasicBlock::iterator EndPt = MI;
Owen Anderson4cafbb52009-02-20 10:02:23 +0000233
234 if (MII == EndPt) return Pt;
235
Evan Chengf5cd4f02008-10-23 20:43:13 +0000236 do {
237 ++MII;
238 unsigned Index = LIs->getInstructionIndex(MII);
239 unsigned Gap = LIs->findGapBeforeInstr(Index);
Owen Anderson5734c942009-02-05 05:58:41 +0000240
241 // We can't insert the spill between the barrier (a call), and its
242 // corresponding call frame setup/teardown.
243 if (prior(MII)->getOpcode() == TRI->getCallFrameSetupOpcode()) {
244 bool reachedBarrier = false;
245 do {
246 if (MII == EndPt) {
247 reachedBarrier = true;
248 break;
249 }
250 ++MII;
251 } while (MII->getOpcode() != TRI->getCallFrameDestroyOpcode());
252
253 if (reachedBarrier) break;
254 } else if (Gap) {
Evan Chengf5cd4f02008-10-23 20:43:13 +0000255 Pt = MII;
256 SpillIndex = Gap;
257 break;
Owen Anderson5734c942009-02-05 05:58:41 +0000258 }
Evan Chengf5cd4f02008-10-23 20:43:13 +0000259 } while (MII != EndPt);
260 } else {
261 MachineBasicBlock::iterator MII = MI;
Evan Cheng1f08cc22008-10-28 05:28:21 +0000262 MachineBasicBlock::iterator EndPt = DefMI
263 ? MachineBasicBlock::iterator(DefMI) : MBB->begin();
Owen Anderson3ef45492009-01-29 22:13:06 +0000264
Evan Cheng1f08cc22008-10-28 05:28:21 +0000265 while (MII != EndPt && !RefsInMBB.count(MII)) {
Evan Chengf5cd4f02008-10-23 20:43:13 +0000266 unsigned Index = LIs->getInstructionIndex(MII);
Owen Anderson5734c942009-02-05 05:58:41 +0000267
268 // We can't insert the spill between the barrier (a call), and its
269 // corresponding call frame setup.
270 if (prior(MII)->getOpcode() == TRI->getCallFrameSetupOpcode()) {
271 --MII;
272 continue;
273 } if (MII->getOpcode() == TRI->getCallFrameDestroyOpcode()) {
274 bool reachedBarrier = false;
275 while (MII->getOpcode() != TRI->getCallFrameSetupOpcode()) {
276 --MII;
277 if (MII == EndPt) {
278 reachedBarrier = true;
279 break;
280 }
281 }
282
283 if (reachedBarrier) break;
284 else continue;
285 } else if (LIs->hasGapBeforeInstr(Index)) {
Evan Chengf5cd4f02008-10-23 20:43:13 +0000286 Pt = MII;
287 SpillIndex = LIs->findGapBeforeInstr(Index, true);
288 }
289 --MII;
290 }
291 }
292
293 return Pt;
294}
295
296/// findRestorePoint - Find a gap in the instruction index map that's suitable
297/// for restoring the current live interval value. The index must be before any
298/// uses of the live interval register in the mbb. Return end() if none is
299/// found.
300MachineBasicBlock::iterator
301PreAllocSplitting::findRestorePoint(MachineBasicBlock *MBB, MachineInstr *MI,
Evan Chengf62ce372008-10-28 00:47:49 +0000302 unsigned LastIdx,
Evan Chengf5cd4f02008-10-23 20:43:13 +0000303 SmallPtrSet<MachineInstr*, 4> &RefsInMBB,
304 unsigned &RestoreIndex) {
Evan Cheng54898932008-10-29 08:39:34 +0000305 // FIXME: Allow spill to be inserted to the beginning of the mbb. Update mbb
306 // begin index accordingly.
Owen Anderson5a92d4e2008-11-18 20:53:59 +0000307 MachineBasicBlock::iterator Pt = MBB->end();
Evan Chengf62ce372008-10-28 00:47:49 +0000308 unsigned EndIdx = LIs->getMBBEndIdx(MBB);
Evan Chengf5cd4f02008-10-23 20:43:13 +0000309
Evan Chengf62ce372008-10-28 00:47:49 +0000310 // Go bottom up if RefsInMBB is empty and the end of the mbb isn't beyond
311 // the last index in the live range.
312 if (RefsInMBB.empty() && LastIdx >= EndIdx) {
Owen Anderson711fd3d2008-11-13 21:53:14 +0000313 MachineBasicBlock::iterator MII = MBB->getFirstTerminator();
Evan Chengf5cd4f02008-10-23 20:43:13 +0000314 MachineBasicBlock::iterator EndPt = MI;
Owen Anderson4cafbb52009-02-20 10:02:23 +0000315
316 if (MII == EndPt) return Pt;
317
Evan Cheng54898932008-10-29 08:39:34 +0000318 --MII;
Evan Chengf5cd4f02008-10-23 20:43:13 +0000319 do {
Evan Chengf5cd4f02008-10-23 20:43:13 +0000320 unsigned Index = LIs->getInstructionIndex(MII);
Evan Cheng56ab0de2008-10-24 18:46:44 +0000321 unsigned Gap = LIs->findGapBeforeInstr(Index);
Owen Anderson3ef45492009-01-29 22:13:06 +0000322
323 // We can't insert a restore between the barrier (a call) and its
324 // corresponding call frame teardown.
Owen Anderson5734c942009-02-05 05:58:41 +0000325 if (MII->getOpcode() == TRI->getCallFrameDestroyOpcode()) {
326 bool reachedBarrier = false;
327 while (MII->getOpcode() != TRI->getCallFrameSetupOpcode()) {
328 --MII;
329 if (MII == EndPt) {
330 reachedBarrier = true;
331 break;
332 }
333 }
334
335 if (reachedBarrier) break;
336 else continue;
337 } else if (Gap) {
338 Pt = MII;
339 RestoreIndex = Gap;
Owen Anderson3ef45492009-01-29 22:13:06 +0000340 break;
Owen Anderson5734c942009-02-05 05:58:41 +0000341 }
342
Evan Cheng54898932008-10-29 08:39:34 +0000343 --MII;
Evan Chengf5cd4f02008-10-23 20:43:13 +0000344 } while (MII != EndPt);
345 } else {
346 MachineBasicBlock::iterator MII = MI;
347 MII = ++MII;
Owen Anderson3ef45492009-01-29 22:13:06 +0000348
Evan Chengf62ce372008-10-28 00:47:49 +0000349 // FIXME: Limit the number of instructions to examine to reduce
350 // compile time?
Owen Andersonc0f3a032009-01-29 08:22:06 +0000351 while (MII != MBB->getFirstTerminator()) {
Evan Chengf5cd4f02008-10-23 20:43:13 +0000352 unsigned Index = LIs->getInstructionIndex(MII);
Evan Chengf62ce372008-10-28 00:47:49 +0000353 if (Index > LastIdx)
354 break;
Evan Chengf5cd4f02008-10-23 20:43:13 +0000355 unsigned Gap = LIs->findGapBeforeInstr(Index);
Owen Anderson5734c942009-02-05 05:58:41 +0000356
357 // We can't insert a restore between the barrier (a call) and its
358 // corresponding call frame teardown.
359 if (MII->getOpcode() == TRI->getCallFrameDestroyOpcode()) {
360 ++MII;
361 continue;
362 } else if (prior(MII)->getOpcode() == TRI->getCallFrameSetupOpcode()) {
363 bool reachedBarrier = false;
364 do {
365 if (MII == MBB->getFirstTerminator() || RefsInMBB.count(MII)) {
366 reachedBarrier = true;
367 break;
368 }
369
370 ++MII;
371 } while (MII->getOpcode() != TRI->getCallFrameDestroyOpcode());
372
373 if (reachedBarrier) break;
374 } else if (Gap) {
Evan Chengf5cd4f02008-10-23 20:43:13 +0000375 Pt = MII;
376 RestoreIndex = Gap;
377 }
Owen Anderson5734c942009-02-05 05:58:41 +0000378
Evan Chengf5cd4f02008-10-23 20:43:13 +0000379 if (RefsInMBB.count(MII))
380 break;
381 ++MII;
382 }
383 }
384
385 return Pt;
386}
387
Evan Chengd0e32c52008-10-29 05:06:14 +0000388/// CreateSpillStackSlot - Create a stack slot for the live interval being
389/// split. If the live interval was previously split, just reuse the same
390/// slot.
391int PreAllocSplitting::CreateSpillStackSlot(unsigned Reg,
392 const TargetRegisterClass *RC) {
393 int SS;
394 DenseMap<unsigned, int>::iterator I = IntervalSSMap.find(Reg);
395 if (I != IntervalSSMap.end()) {
396 SS = I->second;
397 } else {
398 SS = MFI->CreateStackObject(RC->getSize(), RC->getAlignment());
399 IntervalSSMap[Reg] = SS;
Evan Cheng06587492008-10-24 02:05:00 +0000400 }
Evan Chengd0e32c52008-10-29 05:06:14 +0000401
402 // Create live interval for stack slot.
403 CurrSLI = &LSs->getOrCreateInterval(SS);
Evan Cheng54898932008-10-29 08:39:34 +0000404 if (CurrSLI->hasAtLeastOneValue())
Evan Chengd0e32c52008-10-29 05:06:14 +0000405 CurrSValNo = CurrSLI->getValNumInfo(0);
406 else
407 CurrSValNo = CurrSLI->getNextValue(~0U, 0, LSs->getVNInfoAllocator());
408 return SS;
Evan Chengf5cd4f02008-10-23 20:43:13 +0000409}
410
Evan Chengd0e32c52008-10-29 05:06:14 +0000411/// IsAvailableInStack - Return true if register is available in a split stack
412/// slot at the specified index.
413bool
Evan Cheng54898932008-10-29 08:39:34 +0000414PreAllocSplitting::IsAvailableInStack(MachineBasicBlock *DefMBB,
415 unsigned Reg, unsigned DefIndex,
416 unsigned RestoreIndex, unsigned &SpillIndex,
417 int& SS) const {
418 if (!DefMBB)
419 return false;
420
Evan Chengd0e32c52008-10-29 05:06:14 +0000421 DenseMap<unsigned, int>::iterator I = IntervalSSMap.find(Reg);
422 if (I == IntervalSSMap.end())
Evan Chengf5cd4f02008-10-23 20:43:13 +0000423 return false;
Evan Cheng54898932008-10-29 08:39:34 +0000424 DenseMap<unsigned, unsigned>::iterator II = Def2SpillMap.find(DefIndex);
425 if (II == Def2SpillMap.end())
426 return false;
427
428 // If last spill of def is in the same mbb as barrier mbb (where restore will
429 // be), make sure it's not below the intended restore index.
430 // FIXME: Undo the previous spill?
431 assert(LIs->getMBBFromIndex(II->second) == DefMBB);
432 if (DefMBB == BarrierMBB && II->second >= RestoreIndex)
433 return false;
434
435 SS = I->second;
436 SpillIndex = II->second;
437 return true;
Evan Chengf5cd4f02008-10-23 20:43:13 +0000438}
439
Evan Chengd0e32c52008-10-29 05:06:14 +0000440/// UpdateSpillSlotInterval - Given the specified val# of the register live
441/// interval being split, and the spill and restore indicies, update the live
442/// interval of the spill stack slot.
443void
444PreAllocSplitting::UpdateSpillSlotInterval(VNInfo *ValNo, unsigned SpillIndex,
445 unsigned RestoreIndex) {
Evan Cheng54898932008-10-29 08:39:34 +0000446 assert(LIs->getMBBFromIndex(RestoreIndex) == BarrierMBB &&
447 "Expect restore in the barrier mbb");
448
449 MachineBasicBlock *MBB = LIs->getMBBFromIndex(SpillIndex);
450 if (MBB == BarrierMBB) {
451 // Intra-block spill + restore. We are done.
Evan Chengd0e32c52008-10-29 05:06:14 +0000452 LiveRange SLR(SpillIndex, RestoreIndex, CurrSValNo);
453 CurrSLI->addRange(SLR);
454 return;
455 }
456
Evan Cheng54898932008-10-29 08:39:34 +0000457 SmallPtrSet<MachineBasicBlock*, 4> Processed;
458 unsigned EndIdx = LIs->getMBBEndIdx(MBB);
459 LiveRange SLR(SpillIndex, EndIdx+1, CurrSValNo);
Evan Chengd0e32c52008-10-29 05:06:14 +0000460 CurrSLI->addRange(SLR);
Evan Cheng54898932008-10-29 08:39:34 +0000461 Processed.insert(MBB);
Evan Chengd0e32c52008-10-29 05:06:14 +0000462
463 // Start from the spill mbb, figure out the extend of the spill slot's
464 // live interval.
465 SmallVector<MachineBasicBlock*, 4> WorkList;
Evan Cheng54898932008-10-29 08:39:34 +0000466 const LiveRange *LR = CurrLI->getLiveRangeContaining(SpillIndex);
467 if (LR->end > EndIdx)
Evan Chengd0e32c52008-10-29 05:06:14 +0000468 // If live range extend beyond end of mbb, add successors to work list.
469 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
470 SE = MBB->succ_end(); SI != SE; ++SI)
471 WorkList.push_back(*SI);
Evan Chengd0e32c52008-10-29 05:06:14 +0000472
473 while (!WorkList.empty()) {
474 MachineBasicBlock *MBB = WorkList.back();
475 WorkList.pop_back();
Evan Cheng54898932008-10-29 08:39:34 +0000476 if (Processed.count(MBB))
477 continue;
Evan Chengd0e32c52008-10-29 05:06:14 +0000478 unsigned Idx = LIs->getMBBStartIdx(MBB);
479 LR = CurrLI->getLiveRangeContaining(Idx);
Evan Cheng54898932008-10-29 08:39:34 +0000480 if (LR && LR->valno == ValNo) {
481 EndIdx = LIs->getMBBEndIdx(MBB);
482 if (Idx <= RestoreIndex && RestoreIndex < EndIdx) {
Evan Chengd0e32c52008-10-29 05:06:14 +0000483 // Spill slot live interval stops at the restore.
Evan Cheng54898932008-10-29 08:39:34 +0000484 LiveRange SLR(Idx, RestoreIndex, CurrSValNo);
Evan Chengd0e32c52008-10-29 05:06:14 +0000485 CurrSLI->addRange(SLR);
Evan Cheng54898932008-10-29 08:39:34 +0000486 } else if (LR->end > EndIdx) {
487 // Live range extends beyond end of mbb, process successors.
488 LiveRange SLR(Idx, EndIdx+1, CurrSValNo);
489 CurrSLI->addRange(SLR);
490 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
491 SE = MBB->succ_end(); SI != SE; ++SI)
492 WorkList.push_back(*SI);
Evan Chengd0e32c52008-10-29 05:06:14 +0000493 } else {
Evan Cheng54898932008-10-29 08:39:34 +0000494 LiveRange SLR(Idx, LR->end, CurrSValNo);
Evan Chengd0e32c52008-10-29 05:06:14 +0000495 CurrSLI->addRange(SLR);
Evan Chengd0e32c52008-10-29 05:06:14 +0000496 }
Evan Cheng54898932008-10-29 08:39:34 +0000497 Processed.insert(MBB);
Evan Chengd0e32c52008-10-29 05:06:14 +0000498 }
499 }
500}
501
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000502/// PerformPHIConstruction - From properly set up use and def lists, use a PHI
503/// construction algorithm to compute the ranges and valnos for an interval.
Evan Cheng19a72582009-02-02 18:33:18 +0000504VNInfo*
505PreAllocSplitting::PerformPHIConstruction(MachineBasicBlock::iterator UseI,
506 MachineBasicBlock* MBB, LiveInterval* LI,
Owen Anderson200ee7f2009-01-06 07:53:32 +0000507 SmallPtrSet<MachineInstr*, 4>& Visited,
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000508 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Defs,
509 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Uses,
510 DenseMap<MachineInstr*, VNInfo*>& NewVNs,
Owen Anderson7d211e22008-12-31 02:00:25 +0000511 DenseMap<MachineBasicBlock*, VNInfo*>& LiveOut,
512 DenseMap<MachineBasicBlock*, VNInfo*>& Phis,
Evan Cheng19a72582009-02-02 18:33:18 +0000513 bool IsTopLevel, bool IsIntraBlock) {
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000514 // Return memoized result if it's available.
Evan Cheng19a72582009-02-02 18:33:18 +0000515 if (IsTopLevel && Visited.count(UseI) && NewVNs.count(UseI))
516 return NewVNs[UseI];
517 else if (!IsTopLevel && IsIntraBlock && NewVNs.count(UseI))
518 return NewVNs[UseI];
519 else if (!IsIntraBlock && LiveOut.count(MBB))
Owen Anderson7d211e22008-12-31 02:00:25 +0000520 return LiveOut[MBB];
521
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000522 // Check if our block contains any uses or defs.
Owen Anderson7d211e22008-12-31 02:00:25 +0000523 bool ContainsDefs = Defs.count(MBB);
524 bool ContainsUses = Uses.count(MBB);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000525
Evan Cheng19a72582009-02-02 18:33:18 +0000526 VNInfo* RetVNI = 0;
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000527
528 // Enumerate the cases of use/def contaning blocks.
529 if (!ContainsDefs && !ContainsUses) {
Evan Cheng19a72582009-02-02 18:33:18 +0000530 return PerformPHIConstructionFallBack(UseI, MBB, LI, Visited, Defs, Uses,
531 NewVNs, LiveOut, Phis,
532 IsTopLevel, IsIntraBlock);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000533 } else if (ContainsDefs && !ContainsUses) {
Owen Anderson7d211e22008-12-31 02:00:25 +0000534 SmallPtrSet<MachineInstr*, 2>& BlockDefs = Defs[MBB];
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000535
536 // Search for the def in this block. If we don't find it before the
537 // instruction we care about, go to the fallback case. Note that that
Owen Anderson7d211e22008-12-31 02:00:25 +0000538 // should never happen: this cannot be intrablock, so use should
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000539 // always be an end() iterator.
Evan Cheng19a72582009-02-02 18:33:18 +0000540 assert(UseI == MBB->end() && "No use marked in intrablock");
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000541
Evan Cheng19a72582009-02-02 18:33:18 +0000542 MachineBasicBlock::iterator Walker = UseI;
543 --Walker;
544 while (Walker != MBB->begin()) {
545 if (BlockDefs.count(Walker))
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000546 break;
Evan Cheng19a72582009-02-02 18:33:18 +0000547 --Walker;
548 }
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000549
550 // Once we've found it, extend its VNInfo to our instruction.
Evan Cheng19a72582009-02-02 18:33:18 +0000551 unsigned DefIndex = LIs->getInstructionIndex(Walker);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000552 DefIndex = LiveIntervals::getDefIndex(DefIndex);
Owen Anderson7d211e22008-12-31 02:00:25 +0000553 unsigned EndIndex = LIs->getMBBEndIdx(MBB);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000554
Evan Cheng19a72582009-02-02 18:33:18 +0000555 RetVNI = NewVNs[Walker];
556 LI->addRange(LiveRange(DefIndex, EndIndex+1, RetVNI));
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000557 } else if (!ContainsDefs && ContainsUses) {
Owen Anderson7d211e22008-12-31 02:00:25 +0000558 SmallPtrSet<MachineInstr*, 2>& BlockUses = Uses[MBB];
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000559
560 // Search for the use in this block that precedes the instruction we care
Evan Cheng19a72582009-02-02 18:33:18 +0000561 // about, going to the fallback case if we don't find it.
562 if (UseI == MBB->begin())
563 return PerformPHIConstructionFallBack(UseI, MBB, LI, Visited, Defs,
564 Uses, NewVNs, LiveOut, Phis,
565 IsTopLevel, IsIntraBlock);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000566
Evan Cheng19a72582009-02-02 18:33:18 +0000567 MachineBasicBlock::iterator Walker = UseI;
568 --Walker;
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000569 bool found = false;
Evan Cheng19a72582009-02-02 18:33:18 +0000570 while (Walker != MBB->begin()) {
571 if (BlockUses.count(Walker)) {
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000572 found = true;
573 break;
Evan Cheng19a72582009-02-02 18:33:18 +0000574 }
575 --Walker;
576 }
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000577
578 // Must check begin() too.
Duncan Sands2b7fc1e2008-12-29 08:05:02 +0000579 if (!found) {
Evan Cheng19a72582009-02-02 18:33:18 +0000580 if (BlockUses.count(Walker))
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000581 found = true;
582 else
Evan Cheng19a72582009-02-02 18:33:18 +0000583 return PerformPHIConstructionFallBack(UseI, MBB, LI, Visited, Defs,
584 Uses, NewVNs, LiveOut, Phis,
585 IsTopLevel, IsIntraBlock);
Duncan Sands2b7fc1e2008-12-29 08:05:02 +0000586 }
587
Evan Cheng19a72582009-02-02 18:33:18 +0000588 unsigned UseIndex = LIs->getInstructionIndex(Walker);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000589 UseIndex = LiveIntervals::getUseIndex(UseIndex);
590 unsigned EndIndex = 0;
Evan Cheng19a72582009-02-02 18:33:18 +0000591 if (IsIntraBlock) {
592 EndIndex = LIs->getInstructionIndex(UseI);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000593 EndIndex = LiveIntervals::getUseIndex(EndIndex);
594 } else
Owen Anderson7d211e22008-12-31 02:00:25 +0000595 EndIndex = LIs->getMBBEndIdx(MBB);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000596
597 // Now, recursively phi construct the VNInfo for the use we found,
598 // and then extend it to include the instruction we care about
Evan Cheng19a72582009-02-02 18:33:18 +0000599 RetVNI = PerformPHIConstruction(Walker, MBB, LI, Visited, Defs, Uses,
600 NewVNs, LiveOut, Phis, false, true);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000601
Evan Cheng19a72582009-02-02 18:33:18 +0000602 LI->addRange(LiveRange(UseIndex, EndIndex+1, RetVNI));
Owen Andersonb4b84362009-01-26 21:57:31 +0000603
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000604 // FIXME: Need to set kills properly for inter-block stuff.
Evan Cheng19a72582009-02-02 18:33:18 +0000605 if (LI->isKill(RetVNI, UseIndex)) LI->removeKill(RetVNI, UseIndex);
606 if (IsIntraBlock)
607 LI->addKill(RetVNI, EndIndex);
608 } else if (ContainsDefs && ContainsUses) {
Owen Anderson7d211e22008-12-31 02:00:25 +0000609 SmallPtrSet<MachineInstr*, 2>& BlockDefs = Defs[MBB];
610 SmallPtrSet<MachineInstr*, 2>& BlockUses = Uses[MBB];
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000611
612 // This case is basically a merging of the two preceding case, with the
613 // special note that checking for defs must take precedence over checking
614 // for uses, because of two-address instructions.
615
Evan Cheng19a72582009-02-02 18:33:18 +0000616 if (UseI == MBB->begin())
617 return PerformPHIConstructionFallBack(UseI, MBB, LI, Visited, Defs, Uses,
618 NewVNs, LiveOut, Phis,
619 IsTopLevel, IsIntraBlock);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000620
Evan Cheng19a72582009-02-02 18:33:18 +0000621 MachineBasicBlock::iterator Walker = UseI;
622 --Walker;
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000623 bool foundDef = false;
624 bool foundUse = false;
Evan Cheng19a72582009-02-02 18:33:18 +0000625 while (Walker != MBB->begin()) {
626 if (BlockDefs.count(Walker)) {
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000627 foundDef = true;
628 break;
Evan Cheng19a72582009-02-02 18:33:18 +0000629 } else if (BlockUses.count(Walker)) {
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000630 foundUse = true;
631 break;
Evan Cheng19a72582009-02-02 18:33:18 +0000632 }
633 --Walker;
634 }
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000635
636 // Must check begin() too.
Duncan Sands2b7fc1e2008-12-29 08:05:02 +0000637 if (!foundDef && !foundUse) {
Evan Cheng19a72582009-02-02 18:33:18 +0000638 if (BlockDefs.count(Walker))
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000639 foundDef = true;
Evan Cheng19a72582009-02-02 18:33:18 +0000640 else if (BlockUses.count(Walker))
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000641 foundUse = true;
642 else
Evan Cheng19a72582009-02-02 18:33:18 +0000643 return PerformPHIConstructionFallBack(UseI, MBB, LI, Visited, Defs,
644 Uses, NewVNs, LiveOut, Phis,
645 IsTopLevel, IsIntraBlock);
Duncan Sands2b7fc1e2008-12-29 08:05:02 +0000646 }
647
Evan Cheng19a72582009-02-02 18:33:18 +0000648 unsigned StartIndex = LIs->getInstructionIndex(Walker);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000649 StartIndex = foundDef ? LiveIntervals::getDefIndex(StartIndex) :
650 LiveIntervals::getUseIndex(StartIndex);
651 unsigned EndIndex = 0;
Evan Cheng19a72582009-02-02 18:33:18 +0000652 if (IsIntraBlock) {
653 EndIndex = LIs->getInstructionIndex(UseI);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000654 EndIndex = LiveIntervals::getUseIndex(EndIndex);
655 } else
Owen Anderson7d211e22008-12-31 02:00:25 +0000656 EndIndex = LIs->getMBBEndIdx(MBB);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000657
658 if (foundDef)
Evan Cheng19a72582009-02-02 18:33:18 +0000659 RetVNI = NewVNs[Walker];
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000660 else
Evan Cheng19a72582009-02-02 18:33:18 +0000661 RetVNI = PerformPHIConstruction(Walker, MBB, LI, Visited, Defs, Uses,
662 NewVNs, LiveOut, Phis, false, true);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000663
Evan Cheng19a72582009-02-02 18:33:18 +0000664 LI->addRange(LiveRange(StartIndex, EndIndex+1, RetVNI));
Owen Andersonb4b84362009-01-26 21:57:31 +0000665
Evan Cheng19a72582009-02-02 18:33:18 +0000666 if (foundUse && LI->isKill(RetVNI, StartIndex))
667 LI->removeKill(RetVNI, StartIndex);
668 if (IsIntraBlock) {
669 LI->addKill(RetVNI, EndIndex);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000670 }
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000671 }
672
673 // Memoize results so we don't have to recompute them.
Evan Cheng19a72582009-02-02 18:33:18 +0000674 if (!IsIntraBlock) LiveOut[MBB] = RetVNI;
Owen Anderson200ee7f2009-01-06 07:53:32 +0000675 else {
Evan Cheng19a72582009-02-02 18:33:18 +0000676 if (!NewVNs.count(UseI))
677 NewVNs[UseI] = RetVNI;
678 Visited.insert(UseI);
Owen Anderson200ee7f2009-01-06 07:53:32 +0000679 }
680
Evan Cheng19a72582009-02-02 18:33:18 +0000681 return RetVNI;
682}
683
684/// PerformPHIConstructionFallBack - PerformPHIConstruction fall back path.
685///
686VNInfo*
687PreAllocSplitting::PerformPHIConstructionFallBack(MachineBasicBlock::iterator UseI,
688 MachineBasicBlock* MBB, LiveInterval* LI,
689 SmallPtrSet<MachineInstr*, 4>& Visited,
690 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Defs,
691 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Uses,
692 DenseMap<MachineInstr*, VNInfo*>& NewVNs,
693 DenseMap<MachineBasicBlock*, VNInfo*>& LiveOut,
694 DenseMap<MachineBasicBlock*, VNInfo*>& Phis,
695 bool IsTopLevel, bool IsIntraBlock) {
696 // NOTE: Because this is the fallback case from other cases, we do NOT
697 // assume that we are not intrablock here.
698 if (Phis.count(MBB)) return Phis[MBB];
699
700 unsigned StartIndex = LIs->getMBBStartIdx(MBB);
701 VNInfo *RetVNI = Phis[MBB] = LI->getNextValue(~0U, /*FIXME*/ 0,
702 LIs->getVNInfoAllocator());
703 if (!IsIntraBlock) LiveOut[MBB] = RetVNI;
704
705 // If there are no uses or defs between our starting point and the
706 // beginning of the block, then recursive perform phi construction
707 // on our predecessors.
708 DenseMap<MachineBasicBlock*, VNInfo*> IncomingVNs;
709 for (MachineBasicBlock::pred_iterator PI = MBB->pred_begin(),
710 PE = MBB->pred_end(); PI != PE; ++PI) {
711 VNInfo* Incoming = PerformPHIConstruction((*PI)->end(), *PI, LI,
712 Visited, Defs, Uses, NewVNs,
713 LiveOut, Phis, false, false);
714 if (Incoming != 0)
715 IncomingVNs[*PI] = Incoming;
716 }
717
718 if (MBB->pred_size() == 1 && !RetVNI->hasPHIKill) {
Owen Anderson5b93f6f2009-02-02 22:42:01 +0000719 VNInfo* OldVN = RetVNI;
720 VNInfo* NewVN = IncomingVNs.begin()->second;
721 VNInfo* MergedVN = LI->MergeValueNumberInto(OldVN, NewVN);
722 if (MergedVN == OldVN) std::swap(OldVN, NewVN);
723
724 for (DenseMap<MachineBasicBlock*, VNInfo*>::iterator LOI = LiveOut.begin(),
725 LOE = LiveOut.end(); LOI != LOE; ++LOI)
726 if (LOI->second == OldVN)
727 LOI->second = MergedVN;
728 for (DenseMap<MachineInstr*, VNInfo*>::iterator NVI = NewVNs.begin(),
729 NVE = NewVNs.end(); NVI != NVE; ++NVI)
730 if (NVI->second == OldVN)
731 NVI->second = MergedVN;
732 for (DenseMap<MachineBasicBlock*, VNInfo*>::iterator PI = Phis.begin(),
733 PE = Phis.end(); PI != PE; ++PI)
734 if (PI->second == OldVN)
735 PI->second = MergedVN;
736 RetVNI = MergedVN;
Evan Cheng19a72582009-02-02 18:33:18 +0000737 } else {
738 // Otherwise, merge the incoming VNInfos with a phi join. Create a new
739 // VNInfo to represent the joined value.
740 for (DenseMap<MachineBasicBlock*, VNInfo*>::iterator I =
741 IncomingVNs.begin(), E = IncomingVNs.end(); I != E; ++I) {
742 I->second->hasPHIKill = true;
743 unsigned KillIndex = LIs->getMBBEndIdx(I->first);
744 if (!LiveInterval::isKill(I->second, KillIndex))
745 LI->addKill(I->second, KillIndex);
746 }
747 }
748
749 unsigned EndIndex = 0;
750 if (IsIntraBlock) {
751 EndIndex = LIs->getInstructionIndex(UseI);
752 EndIndex = LiveIntervals::getUseIndex(EndIndex);
753 } else
754 EndIndex = LIs->getMBBEndIdx(MBB);
755 LI->addRange(LiveRange(StartIndex, EndIndex+1, RetVNI));
756 if (IsIntraBlock)
757 LI->addKill(RetVNI, EndIndex);
758
759 // Memoize results so we don't have to recompute them.
760 if (!IsIntraBlock)
761 LiveOut[MBB] = RetVNI;
762 else {
763 if (!NewVNs.count(UseI))
764 NewVNs[UseI] = RetVNI;
765 Visited.insert(UseI);
766 }
767
768 return RetVNI;
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000769}
770
771/// ReconstructLiveInterval - Recompute a live interval from scratch.
772void PreAllocSplitting::ReconstructLiveInterval(LiveInterval* LI) {
773 BumpPtrAllocator& Alloc = LIs->getVNInfoAllocator();
774
775 // Clear the old ranges and valnos;
776 LI->clear();
777
778 // Cache the uses and defs of the register
779 typedef DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> > RegMap;
780 RegMap Defs, Uses;
781
782 // Keep track of the new VNs we're creating.
783 DenseMap<MachineInstr*, VNInfo*> NewVNs;
784 SmallPtrSet<VNInfo*, 2> PhiVNs;
785
786 // Cache defs, and create a new VNInfo for each def.
787 for (MachineRegisterInfo::def_iterator DI = MRI->def_begin(LI->reg),
788 DE = MRI->def_end(); DI != DE; ++DI) {
789 Defs[(*DI).getParent()].insert(&*DI);
790
791 unsigned DefIdx = LIs->getInstructionIndex(&*DI);
792 DefIdx = LiveIntervals::getDefIndex(DefIdx);
793
Owen Anderson200ee7f2009-01-06 07:53:32 +0000794 VNInfo* NewVN = LI->getNextValue(DefIdx, 0, Alloc);
795
796 // If the def is a move, set the copy field.
Evan Cheng04ee5a12009-01-20 19:12:24 +0000797 unsigned SrcReg, DstReg, SrcSubIdx, DstSubIdx;
798 if (TII->isMoveInstr(*DI, SrcReg, DstReg, SrcSubIdx, DstSubIdx))
799 if (DstReg == LI->reg)
Owen Anderson200ee7f2009-01-06 07:53:32 +0000800 NewVN->copy = &*DI;
801
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000802 NewVNs[&*DI] = NewVN;
803 }
804
805 // Cache uses as a separate pass from actually processing them.
806 for (MachineRegisterInfo::use_iterator UI = MRI->use_begin(LI->reg),
807 UE = MRI->use_end(); UI != UE; ++UI)
808 Uses[(*UI).getParent()].insert(&*UI);
809
810 // Now, actually process every use and use a phi construction algorithm
811 // to walk from it to its reaching definitions, building VNInfos along
812 // the way.
Owen Anderson7d211e22008-12-31 02:00:25 +0000813 DenseMap<MachineBasicBlock*, VNInfo*> LiveOut;
814 DenseMap<MachineBasicBlock*, VNInfo*> Phis;
Owen Anderson200ee7f2009-01-06 07:53:32 +0000815 SmallPtrSet<MachineInstr*, 4> Visited;
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000816 for (MachineRegisterInfo::use_iterator UI = MRI->use_begin(LI->reg),
817 UE = MRI->use_end(); UI != UE; ++UI) {
Owen Anderson200ee7f2009-01-06 07:53:32 +0000818 PerformPHIConstruction(&*UI, UI->getParent(), LI, Visited, Defs,
Owen Anderson7d211e22008-12-31 02:00:25 +0000819 Uses, NewVNs, LiveOut, Phis, true, true);
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000820 }
Owen Andersond4f6fe52008-12-28 23:35:13 +0000821
822 // Add ranges for dead defs
823 for (MachineRegisterInfo::def_iterator DI = MRI->def_begin(LI->reg),
824 DE = MRI->def_end(); DI != DE; ++DI) {
825 unsigned DefIdx = LIs->getInstructionIndex(&*DI);
826 DefIdx = LiveIntervals::getDefIndex(DefIdx);
Owen Andersond4f6fe52008-12-28 23:35:13 +0000827
828 if (LI->liveAt(DefIdx)) continue;
829
830 VNInfo* DeadVN = NewVNs[&*DI];
Owen Anderson7d211e22008-12-31 02:00:25 +0000831 LI->addRange(LiveRange(DefIdx, DefIdx+1, DeadVN));
Owen Andersond4f6fe52008-12-28 23:35:13 +0000832 LI->addKill(DeadVN, DefIdx);
833 }
Owen Anderson60d4f6d2008-12-28 21:48:48 +0000834}
835
Owen Andersond0b6a0d2008-12-16 21:35:08 +0000836/// RenumberValno - Split the given valno out into a new vreg, allowing it to
837/// be allocated to a different register. This function creates a new vreg,
838/// copies the valno and its live ranges over to the new vreg's interval,
839/// removes them from the old interval, and rewrites all uses and defs of
840/// the original reg to the new vreg within those ranges.
841void PreAllocSplitting::RenumberValno(VNInfo* VN) {
Owen Anderson2ebf63f2008-12-18 01:27:19 +0000842 SmallVector<VNInfo*, 4> Stack;
843 SmallVector<VNInfo*, 4> VNsToCopy;
844 Stack.push_back(VN);
845
846 // Walk through and copy the valno we care about, and any other valnos
847 // that are two-address redefinitions of the one we care about. These
848 // will need to be rewritten as well. We also check for safety of the
849 // renumbering here, by making sure that none of the valno involved has
850 // phi kills.
851 while (!Stack.empty()) {
852 VNInfo* OldVN = Stack.back();
853 Stack.pop_back();
854
855 // Bail out if we ever encounter a valno that has a PHI kill. We can't
856 // renumber these.
857 if (OldVN->hasPHIKill) return;
858
859 VNsToCopy.push_back(OldVN);
860
861 // Locate two-address redefinitions
862 for (SmallVector<unsigned, 4>::iterator KI = OldVN->kills.begin(),
863 KE = OldVN->kills.end(); KI != KE; ++KI) {
864 MachineInstr* MI = LIs->getInstructionFromIndex(*KI);
Owen Anderson2ebf63f2008-12-18 01:27:19 +0000865 unsigned DefIdx = MI->findRegisterDefOperandIdx(CurrLI->reg);
866 if (DefIdx == ~0U) continue;
867 if (MI->isRegReDefinedByTwoAddr(DefIdx)) {
868 VNInfo* NextVN =
869 CurrLI->findDefinedVNInfo(LiveIntervals::getDefIndex(*KI));
Owen Andersonb4b84362009-01-26 21:57:31 +0000870 if (NextVN == OldVN) continue;
Owen Anderson2ebf63f2008-12-18 01:27:19 +0000871 Stack.push_back(NextVN);
872 }
873 }
874 }
875
Owen Andersond0b6a0d2008-12-16 21:35:08 +0000876 // Create the new vreg
877 unsigned NewVReg = MRI->createVirtualRegister(MRI->getRegClass(CurrLI->reg));
878
Owen Anderson2ebf63f2008-12-18 01:27:19 +0000879 // Create the new live interval
Owen Andersond0b6a0d2008-12-16 21:35:08 +0000880 LiveInterval& NewLI = LIs->getOrCreateInterval(NewVReg);
Owen Andersond0b6a0d2008-12-16 21:35:08 +0000881
Owen Anderson2ebf63f2008-12-18 01:27:19 +0000882 for (SmallVector<VNInfo*, 4>::iterator OI = VNsToCopy.begin(), OE =
883 VNsToCopy.end(); OI != OE; ++OI) {
884 VNInfo* OldVN = *OI;
885
886 // Copy the valno over
887 VNInfo* NewVN = NewLI.getNextValue(OldVN->def, OldVN->copy,
888 LIs->getVNInfoAllocator());
889 NewLI.copyValNumInfo(NewVN, OldVN);
890 NewLI.MergeValueInAsValue(*CurrLI, OldVN, NewVN);
891
892 // Remove the valno from the old interval
893 CurrLI->removeValNo(OldVN);
894 }
Owen Andersond0b6a0d2008-12-16 21:35:08 +0000895
896 // Rewrite defs and uses. This is done in two stages to avoid invalidating
897 // the reg_iterator.
898 SmallVector<std::pair<MachineInstr*, unsigned>, 8> OpsToChange;
899
900 for (MachineRegisterInfo::reg_iterator I = MRI->reg_begin(CurrLI->reg),
901 E = MRI->reg_end(); I != E; ++I) {
902 MachineOperand& MO = I.getOperand();
903 unsigned InstrIdx = LIs->getInstructionIndex(&*I);
904
905 if ((MO.isUse() && NewLI.liveAt(LiveIntervals::getUseIndex(InstrIdx))) ||
906 (MO.isDef() && NewLI.liveAt(LiveIntervals::getDefIndex(InstrIdx))))
907 OpsToChange.push_back(std::make_pair(&*I, I.getOperandNo()));
908 }
909
910 for (SmallVector<std::pair<MachineInstr*, unsigned>, 8>::iterator I =
911 OpsToChange.begin(), E = OpsToChange.end(); I != E; ++I) {
912 MachineInstr* Inst = I->first;
913 unsigned OpIdx = I->second;
914 MachineOperand& MO = Inst->getOperand(OpIdx);
915 MO.setReg(NewVReg);
916 }
Owen Anderson2ebf63f2008-12-18 01:27:19 +0000917
Owen Anderson45e68552009-01-29 05:28:55 +0000918 // The renumbered vreg shares a stack slot with the old register.
919 if (IntervalSSMap.count(CurrLI->reg))
920 IntervalSSMap[NewVReg] = IntervalSSMap[CurrLI->reg];
921
Owen Anderson2ebf63f2008-12-18 01:27:19 +0000922 NumRenumbers++;
Owen Andersond0b6a0d2008-12-16 21:35:08 +0000923}
924
Owen Anderson6002e992008-12-04 21:20:30 +0000925bool PreAllocSplitting::Rematerialize(unsigned vreg, VNInfo* ValNo,
926 MachineInstr* DefMI,
927 MachineBasicBlock::iterator RestorePt,
928 unsigned RestoreIdx,
929 SmallPtrSet<MachineInstr*, 4>& RefsInMBB) {
930 MachineBasicBlock& MBB = *RestorePt->getParent();
931
932 MachineBasicBlock::iterator KillPt = BarrierMBB->end();
933 unsigned KillIdx = 0;
934 if (ValNo->def == ~0U || DefMI->getParent() == BarrierMBB)
935 KillPt = findSpillPoint(BarrierMBB, Barrier, NULL, RefsInMBB, KillIdx);
936 else
937 KillPt = findNextEmptySlot(DefMI->getParent(), DefMI, KillIdx);
938
939 if (KillPt == DefMI->getParent()->end())
940 return false;
941
942 TII->reMaterialize(MBB, RestorePt, vreg, DefMI);
943 LIs->InsertMachineInstrInMaps(prior(RestorePt), RestoreIdx);
944
Owen Andersonb4b84362009-01-26 21:57:31 +0000945 ReconstructLiveInterval(CurrLI);
946 unsigned RematIdx = LIs->getInstructionIndex(prior(RestorePt));
947 RematIdx = LiveIntervals::getDefIndex(RematIdx);
948 RenumberValno(CurrLI->findDefinedVNInfo(RematIdx));
Owen Andersone1762c92009-01-12 03:10:40 +0000949
Owen Anderson75fa96b2008-11-19 04:28:29 +0000950 ++NumSplits;
951 ++NumRemats;
Owen Anderson7b9d67c2008-12-02 18:53:47 +0000952 return true;
953}
954
955MachineInstr* PreAllocSplitting::FoldSpill(unsigned vreg,
956 const TargetRegisterClass* RC,
957 MachineInstr* DefMI,
958 MachineInstr* Barrier,
959 MachineBasicBlock* MBB,
960 int& SS,
961 SmallPtrSet<MachineInstr*, 4>& RefsInMBB) {
962 MachineBasicBlock::iterator Pt = MBB->begin();
963
964 // Go top down if RefsInMBB is empty.
965 if (RefsInMBB.empty())
966 return 0;
Owen Anderson75fa96b2008-11-19 04:28:29 +0000967
Owen Anderson7b9d67c2008-12-02 18:53:47 +0000968 MachineBasicBlock::iterator FoldPt = Barrier;
969 while (&*FoldPt != DefMI && FoldPt != MBB->begin() &&
970 !RefsInMBB.count(FoldPt))
971 --FoldPt;
972
973 int OpIdx = FoldPt->findRegisterDefOperandIdx(vreg, false);
974 if (OpIdx == -1)
975 return 0;
976
977 SmallVector<unsigned, 1> Ops;
978 Ops.push_back(OpIdx);
979
980 if (!TII->canFoldMemoryOperand(FoldPt, Ops))
981 return 0;
982
983 DenseMap<unsigned, int>::iterator I = IntervalSSMap.find(vreg);
984 if (I != IntervalSSMap.end()) {
985 SS = I->second;
986 } else {
987 SS = MFI->CreateStackObject(RC->getSize(), RC->getAlignment());
988
989 }
990
991 MachineInstr* FMI = TII->foldMemoryOperand(*MBB->getParent(),
992 FoldPt, Ops, SS);
993
994 if (FMI) {
995 LIs->ReplaceMachineInstrInMaps(FoldPt, FMI);
996 FMI = MBB->insert(MBB->erase(FoldPt), FMI);
997 ++NumFolds;
998
999 IntervalSSMap[vreg] = SS;
1000 CurrSLI = &LSs->getOrCreateInterval(SS);
1001 if (CurrSLI->hasAtLeastOneValue())
1002 CurrSValNo = CurrSLI->getValNumInfo(0);
1003 else
1004 CurrSValNo = CurrSLI->getNextValue(~0U, 0, LSs->getVNInfoAllocator());
1005 }
1006
1007 return FMI;
Owen Anderson75fa96b2008-11-19 04:28:29 +00001008}
1009
Owen Andersonc93023a2009-03-04 08:52:31 +00001010MachineInstr* PreAllocSplitting::FoldRestore(unsigned vreg,
1011 const TargetRegisterClass* RC,
1012 MachineInstr* Barrier,
1013 MachineBasicBlock* MBB,
1014 int SS,
1015 SmallPtrSet<MachineInstr*, 4>& RefsInMBB) {
1016 // Go top down if RefsInMBB is empty.
1017 if (RefsInMBB.empty())
1018 return 0;
1019
1020 // Can't fold a restore between a call stack setup and teardown.
1021 MachineBasicBlock::iterator FoldPt = Barrier;
1022 while (FoldPt != MBB->getFirstTerminator() && !RefsInMBB.count(FoldPt)) {
1023 ++FoldPt;
1024
1025 if (FoldPt->getOpcode() == TRI->getCallFrameSetupOpcode()) {
1026 while (FoldPt != MBB->getFirstTerminator() &&
1027 FoldPt->getOpcode() != TRI->getCallFrameDestroyOpcode()) {
1028 if (RefsInMBB.count(FoldPt))
1029 return 0;
1030
1031 ++FoldPt;
1032 }
1033
1034 ++FoldPt;
1035 }
1036 }
1037
1038 if (FoldPt == MBB->getFirstTerminator())
1039 return 0;
1040
1041 int OpIdx = FoldPt->findRegisterUseOperandIdx(vreg, true);
1042 if (OpIdx == -1)
1043 return 0;
1044
1045 SmallVector<unsigned, 1> Ops;
1046 Ops.push_back(OpIdx);
1047
1048 if (!TII->canFoldMemoryOperand(FoldPt, Ops))
1049 return 0;
1050
1051 MachineInstr* FMI = TII->foldMemoryOperand(*MBB->getParent(),
1052 FoldPt, Ops, SS);
1053
1054 if (FMI) {
1055 LIs->ReplaceMachineInstrInMaps(FoldPt, FMI);
1056 FMI = MBB->insert(MBB->erase(FoldPt), FMI);
1057 ++NumFolds;
1058 }
1059
1060 return FMI;
1061}
1062
Evan Chengf5cd4f02008-10-23 20:43:13 +00001063/// SplitRegLiveInterval - Split (spill and restore) the given live interval
1064/// so it would not cross the barrier that's being processed. Shrink wrap
1065/// (minimize) the live interval to the last uses.
1066bool PreAllocSplitting::SplitRegLiveInterval(LiveInterval *LI) {
1067 CurrLI = LI;
1068
1069 // Find live range where current interval cross the barrier.
1070 LiveInterval::iterator LR =
1071 CurrLI->FindLiveRangeContaining(LIs->getUseIndex(BarrierIdx));
1072 VNInfo *ValNo = LR->valno;
1073
1074 if (ValNo->def == ~1U) {
1075 // Defined by a dead def? How can this be?
1076 assert(0 && "Val# is defined by a dead def?");
1077 abort();
1078 }
1079
Evan Cheng06587492008-10-24 02:05:00 +00001080 MachineInstr *DefMI = (ValNo->def != ~0U)
1081 ? LIs->getInstructionFromIndex(ValNo->def) : NULL;
Evan Cheng06587492008-10-24 02:05:00 +00001082
Owen Andersond3be4622009-01-21 08:18:03 +00001083 // If this would create a new join point, do not split.
1084 if (DefMI && createsNewJoin(LR, DefMI->getParent(), Barrier->getParent()))
1085 return false;
1086
Evan Chengf5cd4f02008-10-23 20:43:13 +00001087 // Find all references in the barrier mbb.
1088 SmallPtrSet<MachineInstr*, 4> RefsInMBB;
1089 for (MachineRegisterInfo::reg_iterator I = MRI->reg_begin(CurrLI->reg),
1090 E = MRI->reg_end(); I != E; ++I) {
1091 MachineInstr *RefMI = &*I;
1092 if (RefMI->getParent() == BarrierMBB)
1093 RefsInMBB.insert(RefMI);
1094 }
1095
1096 // Find a point to restore the value after the barrier.
Evan Chengb964f332009-01-26 18:33:51 +00001097 unsigned RestoreIndex = 0;
Evan Chengf5cd4f02008-10-23 20:43:13 +00001098 MachineBasicBlock::iterator RestorePt =
Evan Chengf62ce372008-10-28 00:47:49 +00001099 findRestorePoint(BarrierMBB, Barrier, LR->end, RefsInMBB, RestoreIndex);
Evan Chengf5cd4f02008-10-23 20:43:13 +00001100 if (RestorePt == BarrierMBB->end())
1101 return false;
1102
Owen Anderson75fa96b2008-11-19 04:28:29 +00001103 if (DefMI && LIs->isReMaterializable(*LI, ValNo, DefMI))
1104 if (Rematerialize(LI->reg, ValNo, DefMI, RestorePt,
1105 RestoreIndex, RefsInMBB))
1106 return true;
1107
Evan Chengf5cd4f02008-10-23 20:43:13 +00001108 // Add a spill either before the barrier or after the definition.
Evan Cheng06587492008-10-24 02:05:00 +00001109 MachineBasicBlock *DefMBB = DefMI ? DefMI->getParent() : NULL;
Evan Chengf5cd4f02008-10-23 20:43:13 +00001110 const TargetRegisterClass *RC = MRI->getRegClass(CurrLI->reg);
Evan Chengf5cd4f02008-10-23 20:43:13 +00001111 unsigned SpillIndex = 0;
Evan Cheng06587492008-10-24 02:05:00 +00001112 MachineInstr *SpillMI = NULL;
Evan Cheng985921e2008-10-27 23:29:28 +00001113 int SS = -1;
Evan Cheng78dfef72008-10-25 00:52:41 +00001114 if (ValNo->def == ~0U) {
Evan Chengf5cd4f02008-10-23 20:43:13 +00001115 // If it's defined by a phi, we must split just before the barrier.
Owen Anderson7b9d67c2008-12-02 18:53:47 +00001116 if ((SpillMI = FoldSpill(LI->reg, RC, 0, Barrier,
1117 BarrierMBB, SS, RefsInMBB))) {
1118 SpillIndex = LIs->getInstructionIndex(SpillMI);
1119 } else {
1120 MachineBasicBlock::iterator SpillPt =
1121 findSpillPoint(BarrierMBB, Barrier, NULL, RefsInMBB, SpillIndex);
1122 if (SpillPt == BarrierMBB->begin())
1123 return false; // No gap to insert spill.
1124 // Add spill.
1125
1126 SS = CreateSpillStackSlot(CurrLI->reg, RC);
1127 TII->storeRegToStackSlot(*BarrierMBB, SpillPt, CurrLI->reg, true, SS, RC);
1128 SpillMI = prior(SpillPt);
1129 LIs->InsertMachineInstrInMaps(SpillMI, SpillIndex);
1130 }
Evan Cheng54898932008-10-29 08:39:34 +00001131 } else if (!IsAvailableInStack(DefMBB, CurrLI->reg, ValNo->def,
1132 RestoreIndex, SpillIndex, SS)) {
Evan Cheng78dfef72008-10-25 00:52:41 +00001133 // If it's already split, just restore the value. There is no need to spill
1134 // the def again.
Evan Chengd0e32c52008-10-29 05:06:14 +00001135 if (!DefMI)
1136 return false; // Def is dead. Do nothing.
Owen Anderson7b9d67c2008-12-02 18:53:47 +00001137
1138 if ((SpillMI = FoldSpill(LI->reg, RC, DefMI, Barrier,
1139 BarrierMBB, SS, RefsInMBB))) {
1140 SpillIndex = LIs->getInstructionIndex(SpillMI);
Evan Cheng1f08cc22008-10-28 05:28:21 +00001141 } else {
Owen Anderson7b9d67c2008-12-02 18:53:47 +00001142 // Check if it's possible to insert a spill after the def MI.
1143 MachineBasicBlock::iterator SpillPt;
1144 if (DefMBB == BarrierMBB) {
1145 // Add spill after the def and the last use before the barrier.
1146 SpillPt = findSpillPoint(BarrierMBB, Barrier, DefMI,
1147 RefsInMBB, SpillIndex);
1148 if (SpillPt == DefMBB->begin())
1149 return false; // No gap to insert spill.
1150 } else {
1151 SpillPt = findNextEmptySlot(DefMBB, DefMI, SpillIndex);
1152 if (SpillPt == DefMBB->end())
1153 return false; // No gap to insert spill.
1154 }
1155 // Add spill. The store instruction kills the register if def is before
1156 // the barrier in the barrier block.
1157 SS = CreateSpillStackSlot(CurrLI->reg, RC);
1158 TII->storeRegToStackSlot(*DefMBB, SpillPt, CurrLI->reg,
1159 DefMBB == BarrierMBB, SS, RC);
1160 SpillMI = prior(SpillPt);
1161 LIs->InsertMachineInstrInMaps(SpillMI, SpillIndex);
Evan Cheng1f08cc22008-10-28 05:28:21 +00001162 }
Evan Chengf5cd4f02008-10-23 20:43:13 +00001163 }
1164
Evan Cheng54898932008-10-29 08:39:34 +00001165 // Remember def instruction index to spill index mapping.
1166 if (DefMI && SpillMI)
1167 Def2SpillMap[ValNo->def] = SpillIndex;
1168
Evan Chengf5cd4f02008-10-23 20:43:13 +00001169 // Add restore.
Owen Andersonc93023a2009-03-04 08:52:31 +00001170 bool FoldedRestore = false;
1171 if (MachineInstr* LMI = FoldRestore(CurrLI->reg, RC, Barrier,
1172 BarrierMBB, SS, RefsInMBB)) {
1173 RestorePt = LMI;
1174 FoldedRestore = true;
1175 } else {
1176 TII->loadRegFromStackSlot(*BarrierMBB, RestorePt, CurrLI->reg, SS, RC);
1177 MachineInstr *LoadMI = prior(RestorePt);
1178 LIs->InsertMachineInstrInMaps(LoadMI, RestoreIndex);
1179 }
Evan Chengf5cd4f02008-10-23 20:43:13 +00001180
Evan Chengd0e32c52008-10-29 05:06:14 +00001181 // Update spill stack slot live interval.
Evan Cheng54898932008-10-29 08:39:34 +00001182 UpdateSpillSlotInterval(ValNo, LIs->getUseIndex(SpillIndex)+1,
1183 LIs->getDefIndex(RestoreIndex));
Evan Chengd0e32c52008-10-29 05:06:14 +00001184
Owen Andersonb4b84362009-01-26 21:57:31 +00001185 ReconstructLiveInterval(CurrLI);
Owen Andersonc93023a2009-03-04 08:52:31 +00001186
1187 if (!FoldedRestore) {
1188 unsigned RestoreIdx = LIs->getInstructionIndex(prior(RestorePt));
1189 RestoreIdx = LiveIntervals::getDefIndex(RestoreIdx);
1190 RenumberValno(CurrLI->findDefinedVNInfo(RestoreIdx));
1191 }
Owen Anderson7d211e22008-12-31 02:00:25 +00001192
Evan Chengae7fa5b2008-10-28 01:48:24 +00001193 ++NumSplits;
Evan Chengf5cd4f02008-10-23 20:43:13 +00001194 return true;
1195}
1196
1197/// SplitRegLiveIntervals - Split all register live intervals that cross the
1198/// barrier that's being processed.
1199bool
Owen Anderson956ec272009-01-23 00:23:32 +00001200PreAllocSplitting::SplitRegLiveIntervals(const TargetRegisterClass **RCs,
1201 SmallPtrSet<LiveInterval*, 8>& Split) {
Evan Chengf5cd4f02008-10-23 20:43:13 +00001202 // First find all the virtual registers whose live intervals are intercepted
1203 // by the current barrier.
1204 SmallVector<LiveInterval*, 8> Intervals;
1205 for (const TargetRegisterClass **RC = RCs; *RC; ++RC) {
Evan Cheng4350eb82009-02-06 17:17:30 +00001206 // FIXME: If it's not safe to move any instruction that defines the barrier
1207 // register class, then it means there are some special dependencies which
1208 // codegen is not modelling. Ignore these barriers for now.
1209 if (!TII->isSafeToMoveRegClassDefs(*RC))
Evan Cheng23066282008-10-27 07:14:50 +00001210 continue;
Evan Chengf5cd4f02008-10-23 20:43:13 +00001211 std::vector<unsigned> &VRs = MRI->getRegClassVirtRegs(*RC);
1212 for (unsigned i = 0, e = VRs.size(); i != e; ++i) {
1213 unsigned Reg = VRs[i];
1214 if (!LIs->hasInterval(Reg))
1215 continue;
1216 LiveInterval *LI = &LIs->getInterval(Reg);
1217 if (LI->liveAt(BarrierIdx) && !Barrier->readsRegister(Reg))
1218 // Virtual register live interval is intercepted by the barrier. We
1219 // should split and shrink wrap its interval if possible.
1220 Intervals.push_back(LI);
1221 }
1222 }
1223
1224 // Process the affected live intervals.
1225 bool Change = false;
1226 while (!Intervals.empty()) {
Evan Chengae7fa5b2008-10-28 01:48:24 +00001227 if (PreSplitLimit != -1 && (int)NumSplits == PreSplitLimit)
1228 break;
Owen Andersone1762c92009-01-12 03:10:40 +00001229 else if (NumSplits == 4)
1230 Change |= Change;
Evan Chengf5cd4f02008-10-23 20:43:13 +00001231 LiveInterval *LI = Intervals.back();
1232 Intervals.pop_back();
Owen Anderson956ec272009-01-23 00:23:32 +00001233 bool result = SplitRegLiveInterval(LI);
1234 if (result) Split.insert(LI);
1235 Change |= result;
Evan Chengf5cd4f02008-10-23 20:43:13 +00001236 }
1237
1238 return Change;
1239}
1240
Owen Anderson45e68552009-01-29 05:28:55 +00001241unsigned PreAllocSplitting::getNumberOfNonSpills(
Owen Anderson32ca8652009-01-24 10:07:43 +00001242 SmallPtrSet<MachineInstr*, 4>& MIs,
Owen Anderson45e68552009-01-29 05:28:55 +00001243 unsigned Reg, int FrameIndex,
1244 bool& FeedsTwoAddr) {
1245 unsigned NonSpills = 0;
Owen Anderson32ca8652009-01-24 10:07:43 +00001246 for (SmallPtrSet<MachineInstr*, 4>::iterator UI = MIs.begin(), UE = MIs.end();
Owen Anderson45e68552009-01-29 05:28:55 +00001247 UI != UE; ++UI) {
Owen Anderson32ca8652009-01-24 10:07:43 +00001248 int StoreFrameIndex;
1249 unsigned StoreVReg = TII->isStoreToStackSlot(*UI, StoreFrameIndex);
Owen Anderson45e68552009-01-29 05:28:55 +00001250 if (StoreVReg != Reg || StoreFrameIndex != FrameIndex)
1251 NonSpills++;
1252
1253 int DefIdx = (*UI)->findRegisterDefOperandIdx(Reg);
1254 if (DefIdx != -1 && (*UI)->isRegReDefinedByTwoAddr(DefIdx))
1255 FeedsTwoAddr = true;
Owen Anderson32ca8652009-01-24 10:07:43 +00001256 }
1257
Owen Anderson45e68552009-01-29 05:28:55 +00001258 return NonSpills;
Owen Anderson32ca8652009-01-24 10:07:43 +00001259}
1260
Owen Anderson956ec272009-01-23 00:23:32 +00001261/// removeDeadSpills - After doing splitting, filter through all intervals we've
1262/// split, and see if any of the spills are unnecessary. If so, remove them.
1263bool PreAllocSplitting::removeDeadSpills(SmallPtrSet<LiveInterval*, 8>& split) {
1264 bool changed = false;
1265
Owen Anderson4bfc2092009-01-29 05:41:02 +00001266 // Walk over all of the live intervals that were touched by the splitter,
1267 // and see if we can do any DCE and/or folding.
Owen Anderson956ec272009-01-23 00:23:32 +00001268 for (SmallPtrSet<LiveInterval*, 8>::iterator LI = split.begin(),
1269 LE = split.end(); LI != LE; ++LI) {
Owen Anderson9ce499a2009-01-23 03:28:53 +00001270 DenseMap<VNInfo*, SmallPtrSet<MachineInstr*, 4> > VNUseCount;
Owen Anderson956ec272009-01-23 00:23:32 +00001271
Owen Anderson4bfc2092009-01-29 05:41:02 +00001272 // First, collect all the uses of the vreg, and sort them by their
1273 // reaching definition (VNInfo).
Owen Anderson956ec272009-01-23 00:23:32 +00001274 for (MachineRegisterInfo::use_iterator UI = MRI->use_begin((*LI)->reg),
1275 UE = MRI->use_end(); UI != UE; ++UI) {
1276 unsigned index = LIs->getInstructionIndex(&*UI);
1277 index = LiveIntervals::getUseIndex(index);
1278
1279 const LiveRange* LR = (*LI)->getLiveRangeContaining(index);
Owen Anderson9ce499a2009-01-23 03:28:53 +00001280 VNUseCount[LR->valno].insert(&*UI);
Owen Anderson956ec272009-01-23 00:23:32 +00001281 }
1282
Owen Anderson4bfc2092009-01-29 05:41:02 +00001283 // Now, take the definitions (VNInfo's) one at a time and try to DCE
1284 // and/or fold them away.
Owen Anderson956ec272009-01-23 00:23:32 +00001285 for (LiveInterval::vni_iterator VI = (*LI)->vni_begin(),
1286 VE = (*LI)->vni_end(); VI != VE; ++VI) {
Owen Anderson45e68552009-01-29 05:28:55 +00001287
1288 if (DeadSplitLimit != -1 && (int)NumDeadSpills == DeadSplitLimit)
1289 return changed;
1290
Owen Anderson956ec272009-01-23 00:23:32 +00001291 VNInfo* CurrVN = *VI;
Owen Anderson4bfc2092009-01-29 05:41:02 +00001292
1293 // We don't currently try to handle definitions with PHI kills, because
1294 // it would involve processing more than one VNInfo at once.
Owen Anderson956ec272009-01-23 00:23:32 +00001295 if (CurrVN->hasPHIKill) continue;
Owen Anderson956ec272009-01-23 00:23:32 +00001296
Owen Anderson4bfc2092009-01-29 05:41:02 +00001297 // We also don't try to handle the results of PHI joins, since there's
1298 // no defining instruction to analyze.
Owen Anderson956ec272009-01-23 00:23:32 +00001299 unsigned DefIdx = CurrVN->def;
1300 if (DefIdx == ~0U || DefIdx == ~1U) continue;
Owen Anderson9ce499a2009-01-23 03:28:53 +00001301
Owen Anderson4bfc2092009-01-29 05:41:02 +00001302 // We're only interested in eliminating cruft introduced by the splitter,
1303 // is of the form load-use or load-use-store. First, check that the
1304 // definition is a load, and remember what stack slot we loaded it from.
Owen Anderson956ec272009-01-23 00:23:32 +00001305 MachineInstr* DefMI = LIs->getInstructionFromIndex(DefIdx);
1306 int FrameIndex;
1307 if (!TII->isLoadFromStackSlot(DefMI, FrameIndex)) continue;
1308
Owen Anderson4bfc2092009-01-29 05:41:02 +00001309 // If the definition has no uses at all, just DCE it.
Owen Anderson9ce499a2009-01-23 03:28:53 +00001310 if (VNUseCount[CurrVN].size() == 0) {
1311 LIs->RemoveMachineInstrFromMaps(DefMI);
1312 (*LI)->removeValNo(CurrVN);
1313 DefMI->eraseFromParent();
Owen Andersonc0f3a032009-01-29 08:22:06 +00001314 VNUseCount.erase(CurrVN);
Owen Anderson9ce499a2009-01-23 03:28:53 +00001315 NumDeadSpills++;
1316 changed = true;
Owen Anderson32ca8652009-01-24 10:07:43 +00001317 continue;
Owen Anderson9ce499a2009-01-23 03:28:53 +00001318 }
Owen Anderson32ca8652009-01-24 10:07:43 +00001319
Owen Anderson4bfc2092009-01-29 05:41:02 +00001320 // Second, get the number of non-store uses of the definition, as well as
1321 // a flag indicating whether it feeds into a later two-address definition.
Owen Anderson45e68552009-01-29 05:28:55 +00001322 bool FeedsTwoAddr = false;
1323 unsigned NonSpillCount = getNumberOfNonSpills(VNUseCount[CurrVN],
1324 (*LI)->reg, FrameIndex,
1325 FeedsTwoAddr);
1326
Owen Anderson4bfc2092009-01-29 05:41:02 +00001327 // If there's one non-store use and it doesn't feed a two-addr, then
1328 // this is a load-use-store case that we can try to fold.
Owen Anderson45e68552009-01-29 05:28:55 +00001329 if (NonSpillCount == 1 && !FeedsTwoAddr) {
Owen Anderson4bfc2092009-01-29 05:41:02 +00001330 // Start by finding the non-store use MachineInstr.
Owen Anderson45e68552009-01-29 05:28:55 +00001331 SmallPtrSet<MachineInstr*, 4>::iterator UI = VNUseCount[CurrVN].begin();
1332 int StoreFrameIndex;
1333 unsigned StoreVReg = TII->isStoreToStackSlot(*UI, StoreFrameIndex);
1334 while (UI != VNUseCount[CurrVN].end() &&
1335 (StoreVReg == (*LI)->reg && StoreFrameIndex == FrameIndex)) {
1336 ++UI;
1337 if (UI != VNUseCount[CurrVN].end())
1338 StoreVReg = TII->isStoreToStackSlot(*UI, StoreFrameIndex);
1339 }
Owen Anderson45e68552009-01-29 05:28:55 +00001340 if (UI == VNUseCount[CurrVN].end()) continue;
1341
1342 MachineInstr* use = *UI;
1343
Owen Anderson4bfc2092009-01-29 05:41:02 +00001344 // Attempt to fold it away!
Owen Anderson45e68552009-01-29 05:28:55 +00001345 int OpIdx = use->findRegisterUseOperandIdx((*LI)->reg, false);
1346 if (OpIdx == -1) continue;
Owen Anderson45e68552009-01-29 05:28:55 +00001347 SmallVector<unsigned, 1> Ops;
1348 Ops.push_back(OpIdx);
Owen Anderson45e68552009-01-29 05:28:55 +00001349 if (!TII->canFoldMemoryOperand(use, Ops)) continue;
1350
1351 MachineInstr* NewMI =
1352 TII->foldMemoryOperand(*use->getParent()->getParent(),
1353 use, Ops, FrameIndex);
1354
1355 if (!NewMI) continue;
1356
Owen Anderson4bfc2092009-01-29 05:41:02 +00001357 // Update relevant analyses.
Owen Anderson45e68552009-01-29 05:28:55 +00001358 LIs->RemoveMachineInstrFromMaps(DefMI);
1359 LIs->ReplaceMachineInstrInMaps(use, NewMI);
1360 (*LI)->removeValNo(CurrVN);
1361
1362 DefMI->eraseFromParent();
1363 MachineBasicBlock* MBB = use->getParent();
1364 NewMI = MBB->insert(MBB->erase(use), NewMI);
1365 VNUseCount[CurrVN].erase(use);
1366
Owen Anderson4bfc2092009-01-29 05:41:02 +00001367 // Remove deleted instructions. Note that we need to remove them from
1368 // the VNInfo->use map as well, just to be safe.
Owen Anderson45e68552009-01-29 05:28:55 +00001369 for (SmallPtrSet<MachineInstr*, 4>::iterator II =
1370 VNUseCount[CurrVN].begin(), IE = VNUseCount[CurrVN].end();
1371 II != IE; ++II) {
Owen Anderson4bfc2092009-01-29 05:41:02 +00001372 for (DenseMap<VNInfo*, SmallPtrSet<MachineInstr*, 4> >::iterator
Owen Andersonc0f3a032009-01-29 08:22:06 +00001373 VNI = VNUseCount.begin(), VNE = VNUseCount.end(); VNI != VNE;
1374 ++VNI)
1375 if (VNI->first != CurrVN)
1376 VNI->second.erase(*II);
Owen Anderson45e68552009-01-29 05:28:55 +00001377 LIs->RemoveMachineInstrFromMaps(*II);
1378 (*II)->eraseFromParent();
1379 }
Owen Andersonc0f3a032009-01-29 08:22:06 +00001380
1381 VNUseCount.erase(CurrVN);
Owen Anderson45e68552009-01-29 05:28:55 +00001382
1383 for (DenseMap<VNInfo*, SmallPtrSet<MachineInstr*, 4> >::iterator
1384 VI = VNUseCount.begin(), VE = VNUseCount.end(); VI != VE; ++VI)
1385 if (VI->second.erase(use))
1386 VI->second.insert(NewMI);
1387
1388 NumDeadSpills++;
1389 changed = true;
1390 continue;
1391 }
1392
Owen Anderson4bfc2092009-01-29 05:41:02 +00001393 // If there's more than one non-store instruction, we can't profitably
1394 // fold it, so bail.
Owen Anderson45e68552009-01-29 05:28:55 +00001395 if (NonSpillCount) continue;
Owen Anderson32ca8652009-01-24 10:07:43 +00001396
Owen Anderson4bfc2092009-01-29 05:41:02 +00001397 // Otherwise, this is a load-store case, so DCE them.
Owen Anderson32ca8652009-01-24 10:07:43 +00001398 for (SmallPtrSet<MachineInstr*, 4>::iterator UI =
1399 VNUseCount[CurrVN].begin(), UE = VNUseCount[CurrVN].end();
1400 UI != UI; ++UI) {
1401 LIs->RemoveMachineInstrFromMaps(*UI);
1402 (*UI)->eraseFromParent();
1403 }
1404
Owen Andersonc0f3a032009-01-29 08:22:06 +00001405 VNUseCount.erase(CurrVN);
1406
Owen Anderson32ca8652009-01-24 10:07:43 +00001407 LIs->RemoveMachineInstrFromMaps(DefMI);
1408 (*LI)->removeValNo(CurrVN);
1409 DefMI->eraseFromParent();
1410 NumDeadSpills++;
1411 changed = true;
Owen Anderson956ec272009-01-23 00:23:32 +00001412 }
1413 }
1414
1415 return changed;
1416}
1417
Owen Andersonf1f75b12008-11-04 22:22:41 +00001418bool PreAllocSplitting::createsNewJoin(LiveRange* LR,
1419 MachineBasicBlock* DefMBB,
1420 MachineBasicBlock* BarrierMBB) {
1421 if (DefMBB == BarrierMBB)
1422 return false;
1423
1424 if (LR->valno->hasPHIKill)
1425 return false;
1426
1427 unsigned MBBEnd = LIs->getMBBEndIdx(BarrierMBB);
1428 if (LR->end < MBBEnd)
1429 return false;
1430
1431 MachineLoopInfo& MLI = getAnalysis<MachineLoopInfo>();
1432 if (MLI.getLoopFor(DefMBB) != MLI.getLoopFor(BarrierMBB))
1433 return true;
1434
1435 MachineDominatorTree& MDT = getAnalysis<MachineDominatorTree>();
1436 SmallPtrSet<MachineBasicBlock*, 4> Visited;
1437 typedef std::pair<MachineBasicBlock*,
1438 MachineBasicBlock::succ_iterator> ItPair;
1439 SmallVector<ItPair, 4> Stack;
1440 Stack.push_back(std::make_pair(BarrierMBB, BarrierMBB->succ_begin()));
1441
1442 while (!Stack.empty()) {
1443 ItPair P = Stack.back();
1444 Stack.pop_back();
1445
1446 MachineBasicBlock* PredMBB = P.first;
1447 MachineBasicBlock::succ_iterator S = P.second;
1448
1449 if (S == PredMBB->succ_end())
1450 continue;
1451 else if (Visited.count(*S)) {
1452 Stack.push_back(std::make_pair(PredMBB, ++S));
1453 continue;
1454 } else
Owen Andersonb214c692008-11-05 00:32:13 +00001455 Stack.push_back(std::make_pair(PredMBB, S+1));
Owen Andersonf1f75b12008-11-04 22:22:41 +00001456
1457 MachineBasicBlock* MBB = *S;
1458 Visited.insert(MBB);
1459
1460 if (MBB == BarrierMBB)
1461 return true;
1462
1463 MachineDomTreeNode* DefMDTN = MDT.getNode(DefMBB);
1464 MachineDomTreeNode* BarrierMDTN = MDT.getNode(BarrierMBB);
1465 MachineDomTreeNode* MDTN = MDT.getNode(MBB)->getIDom();
1466 while (MDTN) {
1467 if (MDTN == DefMDTN)
1468 return true;
1469 else if (MDTN == BarrierMDTN)
1470 break;
1471 MDTN = MDTN->getIDom();
1472 }
1473
1474 MBBEnd = LIs->getMBBEndIdx(MBB);
1475 if (LR->end > MBBEnd)
1476 Stack.push_back(std::make_pair(MBB, MBB->succ_begin()));
1477 }
1478
1479 return false;
1480}
1481
1482
Evan Cheng09e8ca82008-10-20 21:44:59 +00001483bool PreAllocSplitting::runOnMachineFunction(MachineFunction &MF) {
Evan Chengd0e32c52008-10-29 05:06:14 +00001484 CurrMF = &MF;
1485 TM = &MF.getTarget();
Owen Anderson3ef45492009-01-29 22:13:06 +00001486 TRI = TM->getRegisterInfo();
Evan Chengd0e32c52008-10-29 05:06:14 +00001487 TII = TM->getInstrInfo();
1488 MFI = MF.getFrameInfo();
1489 MRI = &MF.getRegInfo();
1490 LIs = &getAnalysis<LiveIntervals>();
1491 LSs = &getAnalysis<LiveStacks>();
Evan Chengf5cd4f02008-10-23 20:43:13 +00001492
1493 bool MadeChange = false;
1494
1495 // Make sure blocks are numbered in order.
1496 MF.RenumberBlocks();
1497
Evan Cheng54898932008-10-29 08:39:34 +00001498 MachineBasicBlock *Entry = MF.begin();
1499 SmallPtrSet<MachineBasicBlock*,16> Visited;
1500
Owen Anderson956ec272009-01-23 00:23:32 +00001501 SmallPtrSet<LiveInterval*, 8> Split;
1502
Evan Cheng54898932008-10-29 08:39:34 +00001503 for (df_ext_iterator<MachineBasicBlock*, SmallPtrSet<MachineBasicBlock*,16> >
1504 DFI = df_ext_begin(Entry, Visited), E = df_ext_end(Entry, Visited);
1505 DFI != E; ++DFI) {
1506 BarrierMBB = *DFI;
1507 for (MachineBasicBlock::iterator I = BarrierMBB->begin(),
1508 E = BarrierMBB->end(); I != E; ++I) {
1509 Barrier = &*I;
1510 const TargetRegisterClass **BarrierRCs =
1511 Barrier->getDesc().getRegClassBarriers();
1512 if (!BarrierRCs)
1513 continue;
1514 BarrierIdx = LIs->getInstructionIndex(Barrier);
Owen Anderson956ec272009-01-23 00:23:32 +00001515 MadeChange |= SplitRegLiveIntervals(BarrierRCs, Split);
Evan Cheng54898932008-10-29 08:39:34 +00001516 }
1517 }
Evan Chengf5cd4f02008-10-23 20:43:13 +00001518
Owen Anderson956ec272009-01-23 00:23:32 +00001519 MadeChange |= removeDeadSpills(Split);
1520
Evan Chengf5cd4f02008-10-23 20:43:13 +00001521 return MadeChange;
Evan Cheng09e8ca82008-10-20 21:44:59 +00001522}