1 //===-- PreAllocSplitting.cpp - Pre-allocation Interval Spltting Pass. ----===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements the machine instruction level pre-register allocation
11 // live interval splitting pass. It finds live interval barriers, i.e.
12 // instructions which will kill all physical registers in certain register
13 // classes, and split all live intervals which cross the barrier.
15 //===----------------------------------------------------------------------===//
17 #define DEBUG_TYPE "pre-alloc-split"
18 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
19 #include "llvm/CodeGen/MachineFrameInfo.h"
20 #include "llvm/CodeGen/MachineFunctionPass.h"
21 #include "llvm/CodeGen/MachineLoopInfo.h"
22 #include "llvm/CodeGen/MachineRegisterInfo.h"
23 #include "llvm/CodeGen/Passes.h"
24 #include "llvm/CodeGen/RegisterCoalescer.h"
25 #include "llvm/Target/TargetInstrInfo.h"
26 #include "llvm/Target/TargetMachine.h"
27 #include "llvm/Target/TargetOptions.h"
28 #include "llvm/Target/TargetRegisterInfo.h"
29 #include "llvm/Support/CommandLine.h"
30 #include "llvm/Support/Debug.h"
31 #include "llvm/ADT/SmallPtrSet.h"
32 #include "llvm/ADT/Statistic.h"
36 STATISTIC(NumSplit , "Number of intervals split");
39 class VISIBILITY_HIDDEN PreAllocSplitting : public MachineFunctionPass {
40 MachineFunction *CurMF;
41 const TargetMachine *TM;
42 const TargetInstrInfo *TII;
43 MachineFrameInfo *MFI;
44 MachineRegisterInfo *MRI;
47 // Barrier - Current barrier being processed.
48 MachineInstr *Barrier;
50 // BarrierMBB - Basic block where the barrier resides in.
51 MachineBasicBlock *BarrierMBB;
53 // Barrier - Current barrier index.
56 // CurrLI - Current live interval being split.
59 // LIValNoSSMap - A map from live interval and val# pairs to spill slots.
60 // This records what live interval's val# has been split and what spill
62 std::map<std::pair<unsigned, unsigned>, int> LIValNoSSMap;
64 // RestoreMIs - All the restores inserted due to live interval splitting.
65 SmallPtrSet<MachineInstr*, 8> RestoreMIs;
69 PreAllocSplitting() : MachineFunctionPass(&ID) {}
71 virtual bool runOnMachineFunction(MachineFunction &MF);
73 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
74 AU.addRequired<LiveIntervals>();
75 AU.addPreserved<LiveIntervals>();
76 AU.addPreserved<RegisterCoalescer>();
78 AU.addPreservedID(StrongPHIEliminationID);
80 AU.addPreservedID(PHIEliminationID);
81 MachineFunctionPass::getAnalysisUsage(AU);
84 virtual void releaseMemory() {
89 virtual const char *getPassName() const {
90 return "Pre-Register Allocaton Live Interval Splitting";
93 /// print - Implement the dump method.
94 virtual void print(std::ostream &O, const Module* M = 0) const {
98 void print(std::ostream *O, const Module* M = 0) const {
103 MachineBasicBlock::iterator
104 findNextEmptySlot(MachineBasicBlock*, MachineInstr*,
107 MachineBasicBlock::iterator
108 findSpillPoint(MachineBasicBlock*, MachineInstr*,
109 SmallPtrSet<MachineInstr*, 4>&, unsigned&);
111 MachineBasicBlock::iterator
112 findRestorePoint(MachineBasicBlock*, MachineInstr*,
113 SmallPtrSet<MachineInstr*, 4>&, unsigned&);
115 void RecordSplit(unsigned, unsigned, unsigned, int);
117 bool isAlreadySplit(unsigned, unsigned, int&);
119 void UpdateIntervalForSplit(VNInfo*, unsigned, unsigned);
121 bool ShrinkWrapToLastUse(MachineBasicBlock*,
122 SmallVector<MachineOperand*, 4>&,
123 SmallPtrSet<MachineInstr*, 4>&);
125 void ShrinkWrapLiveInterval(VNInfo*, MachineBasicBlock*,
126 MachineBasicBlock*, SmallPtrSet<MachineBasicBlock*, 8>&,
127 DenseMap<MachineBasicBlock*, SmallVector<MachineOperand*, 4> >&,
128 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 4> >&,
129 SmallVector<MachineBasicBlock*, 4>&);
131 bool SplitRegLiveInterval(LiveInterval*);
133 bool SplitRegLiveIntervals(const TargetRegisterClass **);
135 } // end anonymous namespace
137 char PreAllocSplitting::ID = 0;
139 static RegisterPass<PreAllocSplitting>
140 X("pre-alloc-splitting", "Pre-Register Allocation Live Interval Splitting");
142 const PassInfo *const llvm::PreAllocSplittingID = &X;
145 /// findNextEmptySlot - Find a gap after the given machine instruction in the
146 /// instruction index map. If there isn't one, return end().
147 MachineBasicBlock::iterator
148 PreAllocSplitting::findNextEmptySlot(MachineBasicBlock *MBB, MachineInstr *MI,
149 unsigned &SpotIndex) {
150 MachineBasicBlock::iterator MII = MI;
151 if (++MII != MBB->end()) {
152 unsigned Index = LIs->findGapBeforeInstr(LIs->getInstructionIndex(MII));
161 /// findSpillPoint - Find a gap as far away from the given MI that's suitable
162 /// for spilling the current live interval. The index must be before any
163 /// defs and uses of the live interval register in the mbb. Return begin() if
165 MachineBasicBlock::iterator
166 PreAllocSplitting::findSpillPoint(MachineBasicBlock *MBB, MachineInstr *MI,
167 SmallPtrSet<MachineInstr*, 4> &RefsInMBB,
168 unsigned &SpillIndex) {
169 MachineBasicBlock::iterator Pt = MBB->begin();
171 // Go top down if RefsInMBB is empty.
172 if (RefsInMBB.empty()) {
173 MachineBasicBlock::iterator MII = MBB->begin();
174 MachineBasicBlock::iterator EndPt = MI;
177 unsigned Index = LIs->getInstructionIndex(MII);
178 unsigned Gap = LIs->findGapBeforeInstr(Index);
184 } while (MII != EndPt);
186 MachineBasicBlock::iterator MII = MI;
187 while (MII != MBB->begin() && !RefsInMBB.count(MII)) {
188 unsigned Index = LIs->getInstructionIndex(MII);
189 if (LIs->hasGapBeforeInstr(Index)) {
191 SpillIndex = LIs->findGapBeforeInstr(Index, true);
200 /// findRestorePoint - Find a gap in the instruction index map that's suitable
201 /// for restoring the current live interval value. The index must be before any
202 /// uses of the live interval register in the mbb. Return end() if none is
204 MachineBasicBlock::iterator
205 PreAllocSplitting::findRestorePoint(MachineBasicBlock *MBB, MachineInstr *MI,
206 SmallPtrSet<MachineInstr*, 4> &RefsInMBB,
207 unsigned &RestoreIndex) {
208 MachineBasicBlock::iterator Pt = MBB->end();
210 // Go bottom up if RefsInMBB is empty.
211 if (RefsInMBB.empty()) {
212 MachineBasicBlock::iterator MII = MBB->end();
213 MachineBasicBlock::iterator EndPt = MI;
216 unsigned Index = LIs->getInstructionIndex(MII);
217 unsigned Gap = LIs->findGapBeforeInstr(Index);
223 } while (MII != EndPt);
225 MachineBasicBlock::iterator MII = MI;
227 while (MII != MBB->end()) {
228 unsigned Index = LIs->getInstructionIndex(MII);
229 unsigned Gap = LIs->findGapBeforeInstr(Index);
234 if (RefsInMBB.count(MII))
243 /// RecordSplit - Given a register live interval is split, remember the spill
244 /// slot where the val#s are in.
245 void PreAllocSplitting::RecordSplit(unsigned Reg, unsigned SpillIndex,
246 unsigned RestoreIndex, int SS) {
247 const LiveRange *LR = NULL;
249 LR = CurrLI->getLiveRangeContaining(LIs->getUseIndex(SpillIndex));
250 LIValNoSSMap.insert(std::make_pair(std::make_pair(CurrLI->reg,
251 LR->valno->id), SS));
253 LR = CurrLI->getLiveRangeContaining(LIs->getDefIndex(RestoreIndex));
254 LIValNoSSMap.insert(std::make_pair(std::make_pair(CurrLI->reg,
255 LR->valno->id), SS));
258 /// isAlreadySplit - Return if a given val# of a register live interval is already
259 /// split. Also return by reference the spill stock where the value is.
260 bool PreAllocSplitting::isAlreadySplit(unsigned Reg, unsigned ValNoId, int &SS){
261 std::map<std::pair<unsigned, unsigned>, int>::iterator I =
262 LIValNoSSMap.find(std::make_pair(Reg, ValNoId));
263 if (I == LIValNoSSMap.end())
269 /// UpdateIntervalForSplit - Given the specified val# of the current live
270 /// interval is being split, and the split and rejoin indices, update the live
271 /// interval accordingly.
273 PreAllocSplitting::UpdateIntervalForSplit(VNInfo *ValNo, unsigned SplitIndex,
274 unsigned JoinIndex) {
275 SmallVector<std::pair<unsigned,unsigned>, 4> Before;
276 SmallVector<std::pair<unsigned,unsigned>, 4> After;
277 SmallVector<unsigned, 4> BeforeKills;
278 SmallVector<unsigned, 4> AfterKills;
279 SmallPtrSet<const LiveRange*, 4> Processed;
281 // First, let's figure out which parts of the live interval is now defined
282 // by the restore, which are defined by the original definition.
283 const LiveRange *LR = CurrLI->getLiveRangeContaining(JoinIndex);
284 After.push_back(std::make_pair(JoinIndex, LR->end));
285 if (CurrLI->isKill(ValNo, LR->end))
286 AfterKills.push_back(LR->end);
288 assert(LR->contains(SplitIndex));
289 if (SplitIndex > LR->start) {
290 Before.push_back(std::make_pair(LR->start, SplitIndex));
291 BeforeKills.push_back(SplitIndex);
293 Processed.insert(LR);
295 SmallVector<MachineBasicBlock*, 4> WorkList;
296 MachineBasicBlock *MBB = LIs->getMBBFromIndex(LR->end-1);
297 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
298 SE = MBB->succ_end(); SI != SE; ++SI)
299 WorkList.push_back(*SI);
301 while (!WorkList.empty()) {
302 MBB = WorkList.back();
304 unsigned Idx = LIs->getMBBStartIdx(MBB);
305 LR = CurrLI->getLiveRangeContaining(Idx);
306 if (LR && LR->valno == ValNo && !Processed.count(LR)) {
307 After.push_back(std::make_pair(LR->start, LR->end));
308 if (CurrLI->isKill(ValNo, LR->end))
309 AfterKills.push_back(LR->end);
310 Idx = LIs->getMBBEndIdx(MBB);
312 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
313 SE = MBB->succ_end(); SI != SE; ++SI)
314 WorkList.push_back(*SI);
315 if (LR->end > Idx+1) {
316 MBB = LIs->getMBBFromIndex(LR->end-1);
317 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
318 SE = MBB->succ_end(); SI != SE; ++SI)
319 WorkList.push_back(*SI);
322 Processed.insert(LR);
326 for (LiveInterval::iterator I = CurrLI->begin(), E = CurrLI->end();
329 if (LR->valno == ValNo && !Processed.count(LR)) {
330 Before.push_back(std::make_pair(LR->start, LR->end));
331 if (CurrLI->isKill(ValNo, LR->end))
332 BeforeKills.push_back(LR->end);
336 // Now create new val#s to represent the live ranges defined by the old def
337 // those defined by the restore.
338 unsigned AfterDef = ValNo->def;
339 MachineInstr *AfterCopy = ValNo->copy;
340 bool HasPHIKill = ValNo->hasPHIKill;
341 CurrLI->removeValNo(ValNo);
342 VNInfo *BValNo = (Before.empty())
344 : CurrLI->getNextValue(AfterDef, AfterCopy, LIs->getVNInfoAllocator());
346 CurrLI->addKills(BValNo, BeforeKills);
348 VNInfo *AValNo = (After.empty())
350 : CurrLI->getNextValue(JoinIndex,0, LIs->getVNInfoAllocator());
352 AValNo->hasPHIKill = HasPHIKill;
353 CurrLI->addKills(AValNo, AfterKills);
356 for (unsigned i = 0, e = Before.size(); i != e; ++i) {
357 unsigned Start = Before[i].first;
358 unsigned End = Before[i].second;
359 CurrLI->addRange(LiveRange(Start, End, BValNo));
361 for (unsigned i = 0, e = After.size(); i != e; ++i) {
362 unsigned Start = After[i].first;
363 unsigned End = After[i].second;
364 CurrLI->addRange(LiveRange(Start, End, AValNo));
368 /// ShrinkWrapToLastUse - There are uses of the current live interval in the
369 /// given block, shrink wrap the live interval to the last use (i.e. remove
370 /// from last use to the end of the mbb). In case mbb is the where the barrier
371 /// is, remove from the last use to the barrier.
373 PreAllocSplitting::ShrinkWrapToLastUse(MachineBasicBlock *MBB,
374 SmallVector<MachineOperand*, 4> &Uses,
375 SmallPtrSet<MachineInstr*, 4> &UseMIs) {
376 MachineOperand *LastMO = 0;
377 MachineInstr *LastMI = 0;
378 if (MBB != BarrierMBB && Uses.size() == 1) {
379 // Single use, no need to traverse the block. We can't assume this for the
380 // barrier bb though since the use is probably below the barrier.
382 LastMI = LastMO->getParent();
384 MachineBasicBlock::iterator MEE = MBB->begin();
385 MachineBasicBlock::iterator MII;
386 if (MBB == BarrierMBB)
390 while (--MII != MEE) {
391 MachineInstr *UseMI = &*MII;
392 if (!UseMIs.count(UseMI))
394 for (unsigned i = 0, e = UseMI->getNumOperands(); i != e; ++i) {
395 MachineOperand &MO = UseMI->getOperand(i);
396 if (MO.isReg() && MO.getReg() == CurrLI->reg) {
406 // Cut off live range from last use (or beginning of the mbb if there
407 // are no uses in it) to the end of the mbb.
408 unsigned RangeStart, RangeEnd = LIs->getMBBEndIdx(MBB)+1;
410 RangeStart = LIs->getUseIndex(LIs->getInstructionIndex(LastMI))+1;
411 assert(!LastMO->isKill() && "Last use already terminates the interval?");
414 assert(MBB == BarrierMBB);
415 RangeStart = LIs->getMBBStartIdx(MBB);
417 if (MBB == BarrierMBB)
418 RangeEnd = LIs->getUseIndex(BarrierIdx)+1;
419 CurrLI->removeRange(RangeStart, RangeEnd);
421 // Return true if the last use becomes a new kill.
425 /// ShrinkWrapLiveInterval - Recursively traverse the predecessor
426 /// chain to find the new 'kills' and shrink wrap the live interval to the
427 /// new kill indices.
429 PreAllocSplitting::ShrinkWrapLiveInterval(VNInfo *ValNo,
430 MachineBasicBlock *MBB, MachineBasicBlock *DefMBB,
431 SmallPtrSet<MachineBasicBlock*, 8> &Visited,
432 DenseMap<MachineBasicBlock*, SmallVector<MachineOperand*, 4> > &Uses,
433 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 4> > &UseMIs,
434 SmallVector<MachineBasicBlock*, 4> &UseMBBs) {
435 if (!Visited.insert(MBB))
438 DenseMap<MachineBasicBlock*, SmallVector<MachineOperand*, 4> >::iterator
439 UMII = Uses.find(MBB);
440 if (UMII != Uses.end()) {
441 // At least one use in this mbb, lets look for the kill.
442 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 4> >::iterator
443 UMII2 = UseMIs.find(MBB);
444 if (ShrinkWrapToLastUse(MBB, UMII->second, UMII2->second))
445 // Found a kill, shrink wrapping of this path ends here.
447 } else if (MBB == DefMBB) {
448 assert(LIValNoSSMap.find(std::make_pair(CurrLI->reg, ValNo->id)) !=
449 LIValNoSSMap.end() && "Why wasn't def spilled?");
450 // There are no uses after the def.
451 MachineInstr *DefMI = LIs->getInstructionFromIndex(ValNo->def);
452 assert(RestoreMIs.count(DefMI) && "Not defined by a join?");
453 if (UseMBBs.empty()) {
454 // The only use must be below barrier in the barrier block. It's safe to
456 LIs->RemoveMachineInstrFromMaps(DefMI);
457 DefMI->eraseFromParent();
458 CurrLI->removeRange(ValNo->def, LIs->getMBBEndIdx(MBB)+1);
461 // Remove entire live range of the bb out of the live interval.
462 CurrLI->removeRange(LIs->getMBBStartIdx(MBB), LIs->getMBBEndIdx(MBB)+1);
466 // Reached the def mbb, stop traversing this path further.
469 // Traverse the pathes up the predecessor chains further.
470 for (MachineBasicBlock::pred_iterator PI = MBB->pred_begin(),
471 PE = MBB->pred_end(); PI != PE; ++PI) {
472 MachineBasicBlock *Pred = *PI;
475 if (Pred == DefMBB && ValNo->hasPHIKill)
476 // Pred is the def bb and the def reaches other val#s, we must
477 // allow the value to be live out of the bb.
479 ShrinkWrapLiveInterval(ValNo, Pred, DefMBB, Visited, Uses, UseMIs, UseMBBs);
485 /// SplitRegLiveInterval - Split (spill and restore) the given live interval
486 /// so it would not cross the barrier that's being processed. Shrink wrap
487 /// (minimize) the live interval to the last uses.
488 bool PreAllocSplitting::SplitRegLiveInterval(LiveInterval *LI) {
491 // Find live range where current interval cross the barrier.
492 LiveInterval::iterator LR =
493 CurrLI->FindLiveRangeContaining(LIs->getUseIndex(BarrierIdx));
494 VNInfo *ValNo = LR->valno;
496 if (ValNo->def == ~1U) {
497 // Defined by a dead def? How can this be?
498 assert(0 && "Val# is defined by a dead def?");
502 // FIXME: For now, if definition is rematerializable, do not split.
503 MachineInstr *DefMI = (ValNo->def != ~0U)
504 ? LIs->getInstructionFromIndex(ValNo->def) : NULL;
505 if (DefMI && LIs->isReMaterializable(*LI, ValNo, DefMI))
508 // Find all references in the barrier mbb.
509 SmallPtrSet<MachineInstr*, 4> RefsInMBB;
510 for (MachineRegisterInfo::reg_iterator I = MRI->reg_begin(CurrLI->reg),
511 E = MRI->reg_end(); I != E; ++I) {
512 MachineInstr *RefMI = &*I;
513 if (RefMI->getParent() == BarrierMBB)
514 RefsInMBB.insert(RefMI);
517 // Find a point to restore the value after the barrier.
518 unsigned RestoreIndex;
519 MachineBasicBlock::iterator RestorePt =
520 findRestorePoint(BarrierMBB, Barrier, RefsInMBB, RestoreIndex);
521 if (RestorePt == BarrierMBB->end())
524 // Add a spill either before the barrier or after the definition.
525 MachineBasicBlock *DefMBB = DefMI ? DefMI->getParent() : NULL;
526 const TargetRegisterClass *RC = MRI->getRegClass(CurrLI->reg);
528 unsigned SpillIndex = 0;
529 MachineInstr *SpillMI = NULL;
530 if (isAlreadySplit(CurrLI->reg, ValNo->id, SS)) {
531 // If it's already split, just restore the value. There is no need to spill
533 } else if (ValNo->def == ~0U) {
534 // If it's defined by a phi, we must split just before the barrier.
535 MachineBasicBlock::iterator SpillPt =
536 findSpillPoint(BarrierMBB, Barrier, RefsInMBB, SpillIndex);
537 if (SpillPt == BarrierMBB->begin())
538 return false; // No gap to insert spill.
540 SS = MFI->CreateStackObject(RC->getSize(), RC->getAlignment());
541 TII->storeRegToStackSlot(*BarrierMBB, SpillPt, CurrLI->reg, true, SS, RC);
542 SpillMI = prior(SpillPt);
543 LIs->InsertMachineInstrInMaps(SpillMI, SpillIndex);
545 // Check if it's possible to insert a spill after the def MI.
546 MachineBasicBlock::iterator SpillPt =
547 findNextEmptySlot(DefMBB, DefMI, SpillIndex);
548 if (SpillPt == DefMBB->end())
549 return false; // No gap to insert spill.
550 SS = MFI->CreateStackObject(RC->getSize(), RC->getAlignment());
552 // Add spill. The store instruction kills the register if def is before the
553 // barrier in the barrier block.
554 TII->storeRegToStackSlot(*DefMBB, SpillPt, CurrLI->reg,
555 DefMBB == BarrierMBB, SS, RC);
556 SpillMI = prior(SpillPt);
557 LIs->InsertMachineInstrInMaps(SpillMI, SpillIndex);
561 // FIXME: Create live interval for stack slot.
562 TII->loadRegFromStackSlot(*BarrierMBB, RestorePt, CurrLI->reg, SS, RC);
563 MachineInstr *LoadMI = prior(RestorePt);
564 LIs->InsertMachineInstrInMaps(LoadMI, RestoreIndex);
565 RestoreMIs.insert(LoadMI);
567 // If live interval is spilled in the same block as the barrier, just
568 // create a hole in the interval.
570 (SpillIndex && SpillMI->getParent() == BarrierMBB)) {
571 UpdateIntervalForSplit(ValNo, LIs->getUseIndex(SpillIndex)+1,
572 LIs->getDefIndex(RestoreIndex));
574 // Record val# values are in the specific spill slot.
575 RecordSplit(CurrLI->reg, SpillIndex, RestoreIndex, SS);
581 // Shrink wrap the live interval by walking up the CFG and find the
583 // Now let's find all the uses of the val#.
584 DenseMap<MachineBasicBlock*, SmallVector<MachineOperand*, 4> > Uses;
585 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 4> > UseMIs;
586 SmallPtrSet<MachineBasicBlock*, 4> Seen;
587 SmallVector<MachineBasicBlock*, 4> UseMBBs;
588 for (MachineRegisterInfo::use_iterator UI = MRI->use_begin(CurrLI->reg),
589 UE = MRI->use_end(); UI != UE; ++UI) {
590 MachineOperand &UseMO = UI.getOperand();
591 MachineInstr *UseMI = UseMO.getParent();
592 unsigned UseIdx = LIs->getInstructionIndex(UseMI);
593 LiveInterval::iterator ULR = CurrLI->FindLiveRangeContaining(UseIdx);
594 if (ULR->valno != ValNo)
596 MachineBasicBlock *UseMBB = UseMI->getParent();
597 // Remember which other mbb's use this val#.
598 if (Seen.insert(UseMBB) && UseMBB != BarrierMBB)
599 UseMBBs.push_back(UseMBB);
600 DenseMap<MachineBasicBlock*, SmallVector<MachineOperand*, 4> >::iterator
601 UMII = Uses.find(UseMBB);
602 if (UMII != Uses.end()) {
603 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 4> >::iterator
604 UMII2 = UseMIs.find(UseMBB);
605 UMII->second.push_back(&UseMO);
606 UMII2->second.insert(UseMI);
608 SmallVector<MachineOperand*, 4> Ops;
609 Ops.push_back(&UseMO);
610 Uses.insert(std::make_pair(UseMBB, Ops));
611 SmallPtrSet<MachineInstr*, 4> MIs;
613 UseMIs.insert(std::make_pair(UseMBB, MIs));
617 // Walk up the predecessor chains.
618 SmallPtrSet<MachineBasicBlock*, 8> Visited;
619 ShrinkWrapLiveInterval(ValNo, BarrierMBB, DefMBB, Visited,
620 Uses, UseMIs, UseMBBs);
622 // Remove live range from barrier to the restore. FIXME: Find a better
623 // point to re-start the live interval.
624 UpdateIntervalForSplit(ValNo, LIs->getUseIndex(BarrierIdx)+1,
625 LIs->getDefIndex(RestoreIndex));
626 // Record val# values are in the specific spill slot.
627 RecordSplit(CurrLI->reg, SpillIndex, RestoreIndex, SS);
633 /// SplitRegLiveIntervals - Split all register live intervals that cross the
634 /// barrier that's being processed.
636 PreAllocSplitting::SplitRegLiveIntervals(const TargetRegisterClass **RCs) {
637 // First find all the virtual registers whose live intervals are intercepted
638 // by the current barrier.
639 SmallVector<LiveInterval*, 8> Intervals;
640 for (const TargetRegisterClass **RC = RCs; *RC; ++RC) {
641 std::vector<unsigned> &VRs = MRI->getRegClassVirtRegs(*RC);
642 for (unsigned i = 0, e = VRs.size(); i != e; ++i) {
643 unsigned Reg = VRs[i];
644 if (!LIs->hasInterval(Reg))
646 LiveInterval *LI = &LIs->getInterval(Reg);
647 if (LI->liveAt(BarrierIdx) && !Barrier->readsRegister(Reg))
648 // Virtual register live interval is intercepted by the barrier. We
649 // should split and shrink wrap its interval if possible.
650 Intervals.push_back(LI);
654 // Process the affected live intervals.
656 while (!Intervals.empty()) {
657 LiveInterval *LI = Intervals.back();
658 Intervals.pop_back();
659 Change |= SplitRegLiveInterval(LI);
665 bool PreAllocSplitting::runOnMachineFunction(MachineFunction &MF) {
667 TM = &MF.getTarget();
668 TII = TM->getInstrInfo();
669 MFI = MF.getFrameInfo();
670 MRI = &MF.getRegInfo();
671 LIs = &getAnalysis<LiveIntervals>();
673 bool MadeChange = false;
675 // Make sure blocks are numbered in order.
678 for (MachineFunction::reverse_iterator I = MF.rbegin(), E = MF.rend();
681 for (MachineBasicBlock::reverse_iterator II = BarrierMBB->rbegin(),
682 EE = BarrierMBB->rend(); II != EE; ++II) {
684 const TargetRegisterClass **BarrierRCs =
685 Barrier->getDesc().getRegClassBarriers();
688 BarrierIdx = LIs->getInstructionIndex(Barrier);
689 MadeChange |= SplitRegLiveIntervals(BarrierRCs);