blob: 9ac799cf36098d426af2d14d90fa8deed4f5cf6b [file] [log] [blame]
Evan Cheng09e8ca82008-10-20 21:44:59 +00001//===-- PreAllocSplitting.cpp - Pre-allocation Interval Spltting Pass. ----===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the machine instruction level pre-register allocation
11// live interval splitting pass. It finds live interval barriers, i.e.
12// instructions which will kill all physical registers in certain register
13// classes, and split all live intervals which cross the barrier.
14//
15//===----------------------------------------------------------------------===//
16
17#define DEBUG_TYPE "pre-alloc-split"
18#include "llvm/CodeGen/LiveIntervalAnalysis.h"
Evan Chengf5cd4f02008-10-23 20:43:13 +000019#include "llvm/CodeGen/MachineFrameInfo.h"
Evan Cheng09e8ca82008-10-20 21:44:59 +000020#include "llvm/CodeGen/MachineFunctionPass.h"
21#include "llvm/CodeGen/MachineLoopInfo.h"
22#include "llvm/CodeGen/MachineRegisterInfo.h"
23#include "llvm/CodeGen/Passes.h"
24#include "llvm/CodeGen/RegisterCoalescer.h"
Evan Chengf5cd4f02008-10-23 20:43:13 +000025#include "llvm/Target/TargetInstrInfo.h"
Evan Cheng09e8ca82008-10-20 21:44:59 +000026#include "llvm/Target/TargetMachine.h"
27#include "llvm/Target/TargetOptions.h"
28#include "llvm/Target/TargetRegisterInfo.h"
29#include "llvm/Support/CommandLine.h"
30#include "llvm/Support/Debug.h"
Evan Cheng09e8ca82008-10-20 21:44:59 +000031#include "llvm/ADT/SmallPtrSet.h"
Evan Chengf5cd4f02008-10-23 20:43:13 +000032#include "llvm/ADT/Statistic.h"
33#include <map>
Evan Cheng09e8ca82008-10-20 21:44:59 +000034using namespace llvm;
35
Evan Chengf5cd4f02008-10-23 20:43:13 +000036STATISTIC(NumSplit , "Number of intervals split");
37
Evan Cheng09e8ca82008-10-20 21:44:59 +000038namespace {
39 class VISIBILITY_HIDDEN PreAllocSplitting : public MachineFunctionPass {
Evan Chengf5cd4f02008-10-23 20:43:13 +000040 MachineFunction *CurMF;
41 const TargetMachine *TM;
42 const TargetInstrInfo *TII;
43 MachineFrameInfo *MFI;
44 MachineRegisterInfo *MRI;
45 LiveIntervals *LIs;
Evan Cheng09e8ca82008-10-20 21:44:59 +000046
Evan Chengf5cd4f02008-10-23 20:43:13 +000047 // Barrier - Current barrier being processed.
48 MachineInstr *Barrier;
49
50 // BarrierMBB - Basic block where the barrier resides in.
51 MachineBasicBlock *BarrierMBB;
52
53 // Barrier - Current barrier index.
54 unsigned BarrierIdx;
55
56 // CurrLI - Current live interval being split.
57 LiveInterval *CurrLI;
58
59 // LIValNoSSMap - A map from live interval and val# pairs to spill slots.
60 // This records what live interval's val# has been split and what spill
61 // slot was used.
62 std::map<std::pair<unsigned, unsigned>, int> LIValNoSSMap;
63
Evan Cheng09e8ca82008-10-20 21:44:59 +000064 public:
65 static char ID;
66 PreAllocSplitting() : MachineFunctionPass(&ID) {}
67
68 virtual bool runOnMachineFunction(MachineFunction &MF);
69
70 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
71 AU.addRequired<LiveIntervals>();
72 AU.addPreserved<LiveIntervals>();
Evan Cheng09e8ca82008-10-20 21:44:59 +000073 AU.addPreserved<RegisterCoalescer>();
74 if (StrongPHIElim)
75 AU.addPreservedID(StrongPHIEliminationID);
76 else
77 AU.addPreservedID(PHIEliminationID);
Evan Cheng09e8ca82008-10-20 21:44:59 +000078 MachineFunctionPass::getAnalysisUsage(AU);
79 }
80
81 virtual void releaseMemory() {
Evan Chengf5cd4f02008-10-23 20:43:13 +000082 LIValNoSSMap.clear();
Evan Cheng09e8ca82008-10-20 21:44:59 +000083 }
84
85 virtual const char *getPassName() const {
86 return "Pre-Register Allocaton Live Interval Splitting";
87 }
Evan Chengf5cd4f02008-10-23 20:43:13 +000088
89 /// print - Implement the dump method.
90 virtual void print(std::ostream &O, const Module* M = 0) const {
91 LIs->print(O, M);
92 }
93
94 void print(std::ostream *O, const Module* M = 0) const {
95 if (O) print(*O, M);
96 }
97
98 private:
99 MachineBasicBlock::iterator
100 findNextEmptySlot(MachineBasicBlock*, MachineInstr*,
101 unsigned&);
102
103 MachineBasicBlock::iterator
104 findSpillPoint(MachineBasicBlock*, MachineInstr*,
105 SmallPtrSet<MachineInstr*, 4>&, unsigned&);
106
107 MachineBasicBlock::iterator
108 findRestorePoint(MachineBasicBlock*, MachineInstr*,
109 SmallPtrSet<MachineInstr*, 4>&, unsigned&);
110
111 void RecordSplit(unsigned, unsigned, unsigned, int);
112
113 bool isAlreadySplit(unsigned, unsigned, int&);
114
115 void UpdateIntervalForSplit(VNInfo*, unsigned, unsigned);
116
117 bool ShrinkWrapToLastUse(MachineBasicBlock*,
118 std::vector<MachineOperand*>&);
119
120 void ShrinkWrapLiveInterval(VNInfo*, MachineBasicBlock*,
121 MachineBasicBlock*, SmallPtrSet<MachineBasicBlock*, 8>&,
122 DenseMap<unsigned, std::vector<MachineOperand*> >&);
123
124 bool SplitRegLiveInterval(LiveInterval*);
125
126 bool SplitRegLiveIntervals(const TargetRegisterClass **);
Evan Cheng09e8ca82008-10-20 21:44:59 +0000127 };
128} // end anonymous namespace
129
130char PreAllocSplitting::ID = 0;
131
132static RegisterPass<PreAllocSplitting>
133X("pre-alloc-splitting", "Pre-Register Allocation Live Interval Splitting");
134
135const PassInfo *const llvm::PreAllocSplittingID = &X;
136
Evan Chengf5cd4f02008-10-23 20:43:13 +0000137
138/// findNextEmptySlot - Find a gap after the given machine instruction in the
139/// instruction index map. If there isn't one, return end().
140MachineBasicBlock::iterator
141PreAllocSplitting::findNextEmptySlot(MachineBasicBlock *MBB, MachineInstr *MI,
142 unsigned &SpotIndex) {
143 MachineBasicBlock::iterator MII = MI;
144 if (++MII != MBB->end()) {
145 unsigned Index = LIs->findGapBeforeInstr(LIs->getInstructionIndex(MII));
146 if (Index) {
147 SpotIndex = Index;
148 return MII;
149 }
150 }
151 return MBB->end();
152}
153
154/// findSpillPoint - Find a gap as far away from the given MI that's suitable
155/// for spilling the current live interval. The index must be before any
156/// defs and uses of the live interval register in the mbb. Return begin() if
157/// none is found.
158MachineBasicBlock::iterator
159PreAllocSplitting::findSpillPoint(MachineBasicBlock *MBB, MachineInstr *MI,
160 SmallPtrSet<MachineInstr*, 4> &RefsInMBB,
161 unsigned &SpillIndex) {
162 MachineBasicBlock::iterator Pt = MBB->begin();
163
164 // Go top down if RefsInMBB is empty.
165 if (RefsInMBB.empty()) {
166 MachineBasicBlock::iterator MII = MBB->begin();
167 MachineBasicBlock::iterator EndPt = MI;
168 do {
169 ++MII;
170 unsigned Index = LIs->getInstructionIndex(MII);
171 unsigned Gap = LIs->findGapBeforeInstr(Index);
172 if (Gap) {
173 Pt = MII;
174 SpillIndex = Gap;
175 break;
176 }
177 } while (MII != EndPt);
178 } else {
179 MachineBasicBlock::iterator MII = MI;
180 while (MII != MBB->begin() && !RefsInMBB.count(MII)) {
181 unsigned Index = LIs->getInstructionIndex(MII);
182 if (LIs->hasGapBeforeInstr(Index)) {
183 Pt = MII;
184 SpillIndex = LIs->findGapBeforeInstr(Index, true);
185 }
186 --MII;
187 }
188 }
189
190 return Pt;
191}
192
193/// findRestorePoint - Find a gap in the instruction index map that's suitable
194/// for restoring the current live interval value. The index must be before any
195/// uses of the live interval register in the mbb. Return end() if none is
196/// found.
197MachineBasicBlock::iterator
198PreAllocSplitting::findRestorePoint(MachineBasicBlock *MBB, MachineInstr *MI,
199 SmallPtrSet<MachineInstr*, 4> &RefsInMBB,
200 unsigned &RestoreIndex) {
201 MachineBasicBlock::iterator Pt = MBB->end();
202
203 // Go bottom up if RefsInMBB is empty.
204 if (RefsInMBB.empty()) {
205 MachineBasicBlock::iterator MII = MBB->end();
206 MachineBasicBlock::iterator EndPt = MI;
207 do {
208 --MII;
209 unsigned Index = LIs->getInstructionIndex(MII);
210 unsigned Gap = LIs->hasGapBeforeInstr(Index);
211 if (Gap) {
212 Pt = MII;
213 RestoreIndex = Gap;
214 break;
215 }
216 } while (MII != EndPt);
217 } else {
218 MachineBasicBlock::iterator MII = MI;
219 MII = ++MII;
220 while (MII != MBB->end()) {
221 unsigned Index = LIs->getInstructionIndex(MII);
222 unsigned Gap = LIs->findGapBeforeInstr(Index);
223 if (Gap) {
224 Pt = MII;
225 RestoreIndex = Gap;
226 }
227 if (RefsInMBB.count(MII))
228 break;
229 ++MII;
230 }
231 }
232
233 return Pt;
234}
235
236/// RecordSplit - Given a register live interval is split, remember the spill
237/// slot where the val#s are in.
238void PreAllocSplitting::RecordSplit(unsigned Reg, unsigned SpillIndex,
239 unsigned RestoreIndex, int SS) {
240 LiveInterval::iterator LR =
241 CurrLI->FindLiveRangeContaining(LIs->getUseIndex(SpillIndex));
242 LIValNoSSMap.insert(std::make_pair(std::make_pair(CurrLI->reg, LR->valno->id),
243 SS));
244 LR = CurrLI->FindLiveRangeContaining(LIs->getDefIndex(RestoreIndex));
245 LIValNoSSMap.insert(std::make_pair(std::make_pair(CurrLI->reg, LR->valno->id),
246 SS));
247}
248
249/// isAlreadySplit - Return if a given val# of a register live interval is already
250/// split. Also return by reference the spill stock where the value is.
251bool PreAllocSplitting::isAlreadySplit(unsigned Reg, unsigned ValNoId, int &SS){
252 std::map<std::pair<unsigned, unsigned>, int>::iterator I =
253 LIValNoSSMap.find(std::make_pair(Reg, ValNoId));
254 if (I == LIValNoSSMap.end())
255 return false;
256 SS = I->second;
257 return true;
258}
259
260/// UpdateIntervalForSplit - Given the specified val# of the current live
261/// interval is being split, and the split and rejoin indices, update the live
262/// interval accordingly.
263void
264PreAllocSplitting::UpdateIntervalForSplit(VNInfo *ValNo, unsigned SplitIndex,
265 unsigned JoinIndex) {
266 SmallVector<std::pair<unsigned,unsigned>, 4> Before;
267 SmallVector<std::pair<unsigned,unsigned>, 4> After;
268 SmallVector<unsigned, 4> BeforeKills;
269 SmallVector<unsigned, 4> AfterKills;
270 SmallPtrSet<const LiveRange*, 4> Processed;
271
272 // First, let's figure out which parts of the live interval is now defined
273 // by the restore, which are defined by the original definition.
274 const LiveRange *LR = CurrLI->getLiveRangeContaining(JoinIndex);
275 After.push_back(std::make_pair(JoinIndex, LR->end));
276 assert(LR->contains(SplitIndex));
277 Before.push_back(std::make_pair(LR->start, SplitIndex));
278 BeforeKills.push_back(SplitIndex);
279 Processed.insert(LR);
280
281 SmallVector<MachineBasicBlock*, 4> WorkList;
282 MachineBasicBlock *MBB = LIs->getMBBFromIndex(LR->end-1);
283 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
284 SE = MBB->succ_end(); SI != SE; ++SI)
285 WorkList.push_back(*SI);
286
287 while (!WorkList.empty()) {
288 MBB = WorkList.back();
289 WorkList.pop_back();
290 unsigned Idx = LIs->getMBBStartIdx(MBB);
291 LR = CurrLI->getLiveRangeContaining(Idx);
292 if (LR && LR->valno == ValNo && !Processed.count(LR)) {
293 After.push_back(std::make_pair(LR->start, LR->end));
294 if (CurrLI->isKill(ValNo, LR->end))
295 AfterKills.push_back(LR->end);
296 Idx = LIs->getMBBEndIdx(MBB);
297 if (LR->end > Idx) {
298 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
299 SE = MBB->succ_end(); SI != SE; ++SI)
300 WorkList.push_back(*SI);
301 if (LR->end > Idx+1) {
302 MBB = LIs->getMBBFromIndex(LR->end-1);
303 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
304 SE = MBB->succ_end(); SI != SE; ++SI)
305 WorkList.push_back(*SI);
306 }
307 }
308 Processed.insert(LR);
309 }
310 }
311
312 for (LiveInterval::iterator I = CurrLI->begin(), E = CurrLI->end();
313 I != E; ++I) {
314 LiveRange *LR = I;
315 if (LR->valno == ValNo && !Processed.count(LR)) {
316 Before.push_back(std::make_pair(LR->start, LR->end));
317 if (CurrLI->isKill(ValNo, LR->end))
318 BeforeKills.push_back(LR->end);
319 }
320 }
321
322 // Now create new val#s to represent the live ranges defined by the old def
323 // those defined by the restore.
324 unsigned AfterDef = ValNo->def;
325 MachineInstr *AfterCopy = ValNo->copy;
326 bool HasPHIKill = ValNo->hasPHIKill;
327 CurrLI->removeValNo(ValNo);
328 VNInfo *BValNo = CurrLI->getNextValue(AfterDef, AfterCopy,
329 LIs->getVNInfoAllocator());
330 VNInfo *AValNo = CurrLI->getNextValue(JoinIndex,0, LIs->getVNInfoAllocator());
331 AValNo->hasPHIKill = HasPHIKill;
332 CurrLI->addKills(AValNo, AfterKills);
333 CurrLI->addKills(BValNo, BeforeKills);
334
335 for (unsigned i = 0, e = Before.size(); i != e; ++i) {
336 unsigned Start = Before[i].first;
337 unsigned End = Before[i].second;
338 CurrLI->addRange(LiveRange(Start, End, BValNo));
339 }
340 for (unsigned i = 0, e = After.size(); i != e; ++i) {
341 unsigned Start = After[i].first;
342 unsigned End = After[i].second;
343 CurrLI->addRange(LiveRange(Start, End, AValNo));
344 }
345}
346
347/// ShrinkWrapToLastUse - There are uses of the current live interval in the
348/// given block, shrink wrap the live interval to the last use (i.e. remove
349/// from last use to the end of the mbb). In case mbb is the where the barrier
350/// is, remove from the last use to the barrier.
351bool
352PreAllocSplitting::ShrinkWrapToLastUse(MachineBasicBlock *MBB,
353 std::vector<MachineOperand*> &Uses) {
354 MachineOperand *LastMO = 0;
355 MachineInstr *LastMI = 0;
356 if (MBB != BarrierMBB && Uses.size() == 1) {
357 // Single use, no need to traverse the block. We can't assume this for the
358 // barrier bb though since the use is probably below the barrier.
359 LastMO = Uses[0];
360 LastMI = LastMO->getParent();
361 } else {
362 SmallPtrSet<MachineInstr*, 4> UseMIs;
363 for (unsigned i = 0, e = Uses.size(); i != e; ++i)
364 UseMIs.insert(Uses[i]->getParent());
365 MachineBasicBlock::iterator MII;
366 if (MBB == BarrierMBB) {
367 MII = Barrier;
368 --MII;
369 } else
370 MII = MBB->end();
371 for (MachineBasicBlock::iterator MEE = MBB->begin(); MII != MEE; --MII) {
372 MachineInstr *UseMI = &*MII;
373 if (!UseMIs.count(UseMI))
374 continue;
375 for (unsigned i = 0, e = UseMI->getNumOperands(); i != e; ++i) {
376 MachineOperand &MO = UseMI->getOperand(i);
377 if (MO.isReg() && MO.getReg() == CurrLI->reg) {
378 LastMO = &MO;
379 break;
380 }
381 }
382 LastMI = UseMI;
383 break;
384 }
385 }
386
387 // Cut off live range from last use (or beginning of the mbb if there
388 // are no uses in it) to the end of the mbb.
389 unsigned RangeStart, RangeEnd = LIs->getMBBEndIdx(MBB)+1;
390 if (LastMI) {
391 RangeStart = LIs->getUseIndex(LIs->getInstructionIndex(LastMI))+1;
392 assert(!LastMO->isKill() && "Last use already terminates the interval?");
393 LastMO->setIsKill();
394 } else {
395 assert(MBB == BarrierMBB);
396 RangeStart = LIs->getMBBStartIdx(MBB);
397 }
398 if (MBB == BarrierMBB)
399 RangeEnd = LIs->getUseIndex(BarrierIdx);
400 CurrLI->removeRange(RangeStart, RangeEnd);
401
402 // Return true if the last use becomes a new kill.
403 return LastMI;
404}
405
406/// ShrinkWrapLiveInterval - Recursively traverse the predecessor
407/// chain to find the new 'kills' and shrink wrap the live interval to the
408/// new kill indices.
409void
410PreAllocSplitting::ShrinkWrapLiveInterval(VNInfo *ValNo,
411 MachineBasicBlock *MBB, MachineBasicBlock *DefMBB,
412 SmallPtrSet<MachineBasicBlock*, 8> &Visited,
413 DenseMap<unsigned, std::vector<MachineOperand*> > &Uses) {
414 if (!Visited.insert(MBB))
415 return;
416
417 DenseMap<unsigned, std::vector<MachineOperand*> >::iterator UMII =
418 Uses.find(MBB->getNumber());
419 if (UMII != Uses.end()) {
420 // At least one use in this mbb, lets look for the kill.
421 if (ShrinkWrapToLastUse(MBB, UMII->second))
422 // Found a kill, shrink wrapping of this path ends here.
423 return;
424 } else {
425 // Remove entire live range of the bb out of the live interval.
426 CurrLI->removeRange(LIs->getMBBStartIdx(MBB), LIs->getMBBEndIdx(MBB));
427 abort(); // FIXME
428 }
429
430 if (MBB == DefMBB)
431 // Reached the def mbb, stop traversing this path further.
432 return;
433
434 // Traverse the pathes up the predecessor chains further.
435 for (MachineBasicBlock::pred_iterator PI = MBB->pred_begin(),
436 PE = MBB->pred_end(); PI != PE; ++PI) {
437 MachineBasicBlock *Pred = *PI;
438 if (Pred == MBB)
439 continue;
440 if (Pred == DefMBB && ValNo->hasPHIKill)
441 // Pred is the def bb and the def reaches other val#s, we must
442 // allow the value to be live out of the bb.
443 continue;
444 ShrinkWrapLiveInterval(ValNo, Pred, DefMBB, Visited, Uses);
445 }
446
447 return;
448}
449
450/// SplitRegLiveInterval - Split (spill and restore) the given live interval
451/// so it would not cross the barrier that's being processed. Shrink wrap
452/// (minimize) the live interval to the last uses.
453bool PreAllocSplitting::SplitRegLiveInterval(LiveInterval *LI) {
454 CurrLI = LI;
455
456 // Find live range where current interval cross the barrier.
457 LiveInterval::iterator LR =
458 CurrLI->FindLiveRangeContaining(LIs->getUseIndex(BarrierIdx));
459 VNInfo *ValNo = LR->valno;
460
461 if (ValNo->def == ~1U) {
462 // Defined by a dead def? How can this be?
463 assert(0 && "Val# is defined by a dead def?");
464 abort();
465 }
466
467 // Find all references in the barrier mbb.
468 SmallPtrSet<MachineInstr*, 4> RefsInMBB;
469 for (MachineRegisterInfo::reg_iterator I = MRI->reg_begin(CurrLI->reg),
470 E = MRI->reg_end(); I != E; ++I) {
471 MachineInstr *RefMI = &*I;
472 if (RefMI->getParent() == BarrierMBB)
473 RefsInMBB.insert(RefMI);
474 }
475
476 // Find a point to restore the value after the barrier.
477 unsigned RestoreIndex;
478 MachineBasicBlock::iterator RestorePt =
479 findRestorePoint(BarrierMBB, Barrier, RefsInMBB, RestoreIndex);
480 if (RestorePt == BarrierMBB->end())
481 return false;
482
483 // Add a spill either before the barrier or after the definition.
484 MachineBasicBlock *DefMBB = NULL;
485 const TargetRegisterClass *RC = MRI->getRegClass(CurrLI->reg);
486 int SS;
487 unsigned SpillIndex = 0;
488 if (isAlreadySplit(CurrLI->reg, ValNo->id, SS)) {
489 // If it's already split, just restore the value. There is no need to spill
490 // the def again.
491 abort(); // FIXME
492 } else if (ValNo->def == ~0U) {
493 // If it's defined by a phi, we must split just before the barrier.
494 MachineBasicBlock::iterator SpillPt =
495 findSpillPoint(BarrierMBB, Barrier, RefsInMBB, SpillIndex);
496 if (SpillPt == BarrierMBB->begin())
497 return false; // No gap to insert spill.
498 // Add spill.
499 SS = MFI->CreateStackObject(RC->getSize(), RC->getAlignment());
500 TII->storeRegToStackSlot(*BarrierMBB, SpillPt, CurrLI->reg, true, SS, RC);
501 MachineInstr *StoreMI = prior(SpillPt);
502 LIs->InsertMachineInstrInMaps(StoreMI, SpillIndex);
503 } else {
504 // Check if it's possible to insert a spill after the def MI.
505 MachineInstr *DefMI = LIs->getInstructionFromIndex(ValNo->def);
506 DefMBB = DefMI->getParent();
507 MachineBasicBlock::iterator SpillPt =
508 findNextEmptySlot(DefMBB, DefMI, SpillIndex);
509 if (SpillPt == DefMBB->end())
510 return false; // No gap to insert spill.
511 SS = MFI->CreateStackObject(RC->getSize(), RC->getAlignment());
512
513 // Add spill. The store instruction does *not* kill the register.
514 TII->storeRegToStackSlot(*DefMBB, SpillPt, CurrLI->reg, false, SS, RC);
515 MachineInstr *StoreMI = prior(SpillPt);
516 LIs->InsertMachineInstrInMaps(StoreMI, SpillIndex);
517 }
518
519 // Add restore.
520 // FIXME: Create live interval for stack slot.
521 TII->loadRegFromStackSlot(*BarrierMBB, RestorePt, CurrLI->reg, SS, RC);
522 MachineInstr *LoadMI = prior(RestorePt);
523 LIs->InsertMachineInstrInMaps(LoadMI, RestoreIndex);
524
525 // If live interval is spilled in the same block as the barrier, just
526 // create a hole in the interval.
527 if (!DefMBB ||
528 LIs->getInstructionFromIndex(SpillIndex)->getParent() == BarrierMBB) {
529 UpdateIntervalForSplit(ValNo, LIs->getUseIndex(SpillIndex)+1,
530 LIs->getDefIndex(RestoreIndex));
531
532 // Record val# values are in the specific spill slot.
533 RecordSplit(CurrLI->reg, SpillIndex, RestoreIndex, SS);
534
535 ++NumSplit;
536 return true;
537 }
538
539 // Shrink wrap the live interval by walking up the CFG and find the
540 // new kills.
541 // Now let's find all the uses of the val#.
542 DenseMap<unsigned, std::vector<MachineOperand*> > Uses;
543 for (MachineRegisterInfo::use_iterator UI = MRI->use_begin(CurrLI->reg),
544 UE = MRI->use_end(); UI != UE; ++UI) {
545 MachineOperand &UseMO = UI.getOperand();
546 MachineInstr *UseMI = UseMO.getParent();
547 unsigned UseIdx = LIs->getInstructionIndex(UseMI);
548 LiveInterval::iterator ULR = CurrLI->FindLiveRangeContaining(UseIdx);
549 if (ULR->valno != ValNo)
550 continue;
551 MachineBasicBlock *UseMBB = UseMI->getParent();
552 unsigned MBBId = UseMBB->getNumber();
553 DenseMap<unsigned, std::vector<MachineOperand*> >::iterator UMII =
554 Uses.find(MBBId);
555 if (UMII != Uses.end())
556 UMII->second.push_back(&UseMO);
557 else {
558 std::vector<MachineOperand*> Ops;
559 Ops.push_back(&UseMO);
560 Uses.insert(std::make_pair(MBBId, Ops));
561 }
562 }
563
564 // Walk up the predecessor chains.
565 SmallPtrSet<MachineBasicBlock*, 8> Visited;
566 ShrinkWrapLiveInterval(ValNo, BarrierMBB, DefMBB, Visited, Uses);
567
568 // Remove live range from barrier to the restore. FIXME: Find a better
569 // point to re-start the live interval.
570 UpdateIntervalForSplit(ValNo, LIs->getUseIndex(BarrierIdx)+1,
571 LIs->getDefIndex(RestoreIndex));
572 // Record val# values are in the specific spill slot.
573 RecordSplit(CurrLI->reg, BarrierIdx, RestoreIndex, SS);
574
575 ++NumSplit;
576 return true;
577}
578
579/// SplitRegLiveIntervals - Split all register live intervals that cross the
580/// barrier that's being processed.
581bool
582PreAllocSplitting::SplitRegLiveIntervals(const TargetRegisterClass **RCs) {
583 // First find all the virtual registers whose live intervals are intercepted
584 // by the current barrier.
585 SmallVector<LiveInterval*, 8> Intervals;
586 for (const TargetRegisterClass **RC = RCs; *RC; ++RC) {
587 std::vector<unsigned> &VRs = MRI->getRegClassVirtRegs(*RC);
588 for (unsigned i = 0, e = VRs.size(); i != e; ++i) {
589 unsigned Reg = VRs[i];
590 if (!LIs->hasInterval(Reg))
591 continue;
592 LiveInterval *LI = &LIs->getInterval(Reg);
593 if (LI->liveAt(BarrierIdx) && !Barrier->readsRegister(Reg))
594 // Virtual register live interval is intercepted by the barrier. We
595 // should split and shrink wrap its interval if possible.
596 Intervals.push_back(LI);
597 }
598 }
599
600 // Process the affected live intervals.
601 bool Change = false;
602 while (!Intervals.empty()) {
603 LiveInterval *LI = Intervals.back();
604 Intervals.pop_back();
605 Change |= SplitRegLiveInterval(LI);
606 }
607
608 return Change;
609}
610
Evan Cheng09e8ca82008-10-20 21:44:59 +0000611bool PreAllocSplitting::runOnMachineFunction(MachineFunction &MF) {
Evan Chengf5cd4f02008-10-23 20:43:13 +0000612 CurMF = &MF;
613 TM = &MF.getTarget();
614 TII = TM->getInstrInfo();
615 MFI = MF.getFrameInfo();
616 MRI = &MF.getRegInfo();
617 LIs = &getAnalysis<LiveIntervals>();
618
619 bool MadeChange = false;
620
621 // Make sure blocks are numbered in order.
622 MF.RenumberBlocks();
623
624 for (MachineFunction::reverse_iterator I = MF.rbegin(), E = MF.rend();
625 I != E; ++I) {
626 BarrierMBB = &*I;
627 for (MachineBasicBlock::reverse_iterator II = BarrierMBB->rbegin(),
628 EE = BarrierMBB->rend(); II != EE; ++II) {
629 Barrier = &*II;
630 const TargetRegisterClass **BarrierRCs =
631 Barrier->getDesc().getRegClassBarriers();
632 if (!BarrierRCs)
633 continue;
634 BarrierIdx = LIs->getInstructionIndex(Barrier);
635 MadeChange |= SplitRegLiveIntervals(BarrierRCs);
636 }
637 }
638
639 return MadeChange;
Evan Cheng09e8ca82008-10-20 21:44:59 +0000640}