1 //===-- PreAllocSplitting.cpp - Pre-allocation Interval Spltting Pass. ----===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements the machine instruction level pre-register allocation
11 // live interval splitting pass. It finds live interval barriers, i.e.
12 // instructions which will kill all physical registers in certain register
13 // classes, and split all live intervals which cross the barrier.
15 //===----------------------------------------------------------------------===//
17 #define DEBUG_TYPE "pre-alloc-split"
18 #include "VirtRegMap.h"
19 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
20 #include "llvm/CodeGen/LiveStackAnalysis.h"
21 #include "llvm/CodeGen/MachineDominators.h"
22 #include "llvm/CodeGen/MachineFrameInfo.h"
23 #include "llvm/CodeGen/MachineFunctionPass.h"
24 #include "llvm/CodeGen/MachineLoopInfo.h"
25 #include "llvm/CodeGen/MachineRegisterInfo.h"
26 #include "llvm/CodeGen/Passes.h"
27 #include "llvm/CodeGen/RegisterCoalescer.h"
28 #include "llvm/Target/TargetInstrInfo.h"
29 #include "llvm/Target/TargetMachine.h"
30 #include "llvm/Target/TargetOptions.h"
31 #include "llvm/Target/TargetRegisterInfo.h"
32 #include "llvm/Support/CommandLine.h"
33 #include "llvm/Support/Debug.h"
34 #include "llvm/ADT/DenseMap.h"
35 #include "llvm/ADT/DepthFirstIterator.h"
36 #include "llvm/ADT/SmallPtrSet.h"
37 #include "llvm/ADT/Statistic.h"
40 static cl::opt<int> PreSplitLimit("pre-split-limit", cl::init(-1), cl::Hidden);
41 static cl::opt<int> DeadSplitLimit("dead-split-limit", cl::init(-1), cl::Hidden);
42 static cl::opt<int> RestoreFoldLimit("restore-fold-limit", cl::init(-1), cl::Hidden);
44 STATISTIC(NumSplits, "Number of intervals split");
45 STATISTIC(NumRemats, "Number of intervals split by rematerialization");
46 STATISTIC(NumFolds, "Number of intervals split with spill folding");
47 STATISTIC(NumRestoreFolds, "Number of intervals split with restore folding");
48 STATISTIC(NumRenumbers, "Number of intervals renumbered into new registers");
49 STATISTIC(NumDeadSpills, "Number of dead spills removed");
52 class VISIBILITY_HIDDEN PreAllocSplitting : public MachineFunctionPass {
53 MachineFunction *CurrMF;
54 const TargetMachine *TM;
55 const TargetInstrInfo *TII;
56 const TargetRegisterInfo* TRI;
57 MachineFrameInfo *MFI;
58 MachineRegisterInfo *MRI;
63 // Barrier - Current barrier being processed.
64 MachineInstr *Barrier;
66 // BarrierMBB - Basic block where the barrier resides in.
67 MachineBasicBlock *BarrierMBB;
69 // Barrier - Current barrier index.
72 // CurrLI - Current live interval being split.
75 // CurrSLI - Current stack slot live interval.
76 LiveInterval *CurrSLI;
78 // CurrSValNo - Current val# for the stack slot live interval.
81 // IntervalSSMap - A map from live interval to spill slots.
82 DenseMap<unsigned, int> IntervalSSMap;
84 // Def2SpillMap - A map from a def instruction index to spill index.
85 DenseMap<unsigned, unsigned> Def2SpillMap;
89 PreAllocSplitting() : MachineFunctionPass(&ID) {}
91 virtual bool runOnMachineFunction(MachineFunction &MF);
93 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
94 AU.addRequired<LiveIntervals>();
95 AU.addPreserved<LiveIntervals>();
96 AU.addRequired<LiveStacks>();
97 AU.addPreserved<LiveStacks>();
98 AU.addPreserved<RegisterCoalescer>();
100 AU.addPreservedID(StrongPHIEliminationID);
102 AU.addPreservedID(PHIEliminationID);
103 AU.addRequired<MachineDominatorTree>();
104 AU.addRequired<MachineLoopInfo>();
105 AU.addRequired<VirtRegMap>();
106 AU.addPreserved<MachineDominatorTree>();
107 AU.addPreserved<MachineLoopInfo>();
108 AU.addPreserved<VirtRegMap>();
109 MachineFunctionPass::getAnalysisUsage(AU);
112 virtual void releaseMemory() {
113 IntervalSSMap.clear();
114 Def2SpillMap.clear();
117 virtual const char *getPassName() const {
118 return "Pre-Register Allocaton Live Interval Splitting";
121 /// print - Implement the dump method.
122 virtual void print(std::ostream &O, const Module* M = 0) const {
126 void print(std::ostream *O, const Module* M = 0) const {
131 MachineBasicBlock::iterator
132 findNextEmptySlot(MachineBasicBlock*, MachineInstr*,
135 MachineBasicBlock::iterator
136 findSpillPoint(MachineBasicBlock*, MachineInstr*, MachineInstr*,
137 SmallPtrSet<MachineInstr*, 4>&, unsigned&);
139 MachineBasicBlock::iterator
140 findRestorePoint(MachineBasicBlock*, MachineInstr*, unsigned,
141 SmallPtrSet<MachineInstr*, 4>&, unsigned&);
143 int CreateSpillStackSlot(unsigned, const TargetRegisterClass *);
145 bool IsAvailableInStack(MachineBasicBlock*, unsigned, unsigned, unsigned,
146 unsigned&, int&) const;
148 void UpdateSpillSlotInterval(VNInfo*, unsigned, unsigned);
150 bool SplitRegLiveInterval(LiveInterval*);
152 bool SplitRegLiveIntervals(const TargetRegisterClass **,
153 SmallPtrSet<LiveInterval*, 8>&);
155 bool createsNewJoin(LiveRange* LR, MachineBasicBlock* DefMBB,
156 MachineBasicBlock* BarrierMBB);
157 bool Rematerialize(unsigned vreg, VNInfo* ValNo,
159 MachineBasicBlock::iterator RestorePt,
161 SmallPtrSet<MachineInstr*, 4>& RefsInMBB);
162 MachineInstr* FoldSpill(unsigned vreg, const TargetRegisterClass* RC,
164 MachineInstr* Barrier,
165 MachineBasicBlock* MBB,
167 SmallPtrSet<MachineInstr*, 4>& RefsInMBB);
168 MachineInstr* FoldRestore(unsigned vreg,
169 const TargetRegisterClass* RC,
170 MachineInstr* Barrier,
171 MachineBasicBlock* MBB,
173 SmallPtrSet<MachineInstr*, 4>& RefsInMBB);
174 void RenumberValno(VNInfo* VN);
175 void ReconstructLiveInterval(LiveInterval* LI);
176 bool removeDeadSpills(SmallPtrSet<LiveInterval*, 8>& split);
177 unsigned getNumberOfNonSpills(SmallPtrSet<MachineInstr*, 4>& MIs,
178 unsigned Reg, int FrameIndex, bool& TwoAddr);
179 VNInfo* PerformPHIConstruction(MachineBasicBlock::iterator Use,
180 MachineBasicBlock* MBB, LiveInterval* LI,
181 SmallPtrSet<MachineInstr*, 4>& Visited,
182 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Defs,
183 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Uses,
184 DenseMap<MachineInstr*, VNInfo*>& NewVNs,
185 DenseMap<MachineBasicBlock*, VNInfo*>& LiveOut,
186 DenseMap<MachineBasicBlock*, VNInfo*>& Phis,
187 bool IsTopLevel, bool IsIntraBlock);
188 VNInfo* PerformPHIConstructionFallBack(MachineBasicBlock::iterator Use,
189 MachineBasicBlock* MBB, LiveInterval* LI,
190 SmallPtrSet<MachineInstr*, 4>& Visited,
191 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Defs,
192 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Uses,
193 DenseMap<MachineInstr*, VNInfo*>& NewVNs,
194 DenseMap<MachineBasicBlock*, VNInfo*>& LiveOut,
195 DenseMap<MachineBasicBlock*, VNInfo*>& Phis,
196 bool IsTopLevel, bool IsIntraBlock);
198 } // end anonymous namespace
200 char PreAllocSplitting::ID = 0;
202 static RegisterPass<PreAllocSplitting>
203 X("pre-alloc-splitting", "Pre-Register Allocation Live Interval Splitting");
205 const PassInfo *const llvm::PreAllocSplittingID = &X;
208 /// findNextEmptySlot - Find a gap after the given machine instruction in the
209 /// instruction index map. If there isn't one, return end().
210 MachineBasicBlock::iterator
211 PreAllocSplitting::findNextEmptySlot(MachineBasicBlock *MBB, MachineInstr *MI,
212 unsigned &SpotIndex) {
213 MachineBasicBlock::iterator MII = MI;
214 if (++MII != MBB->end()) {
215 unsigned Index = LIs->findGapBeforeInstr(LIs->getInstructionIndex(MII));
224 /// findSpillPoint - Find a gap as far away from the given MI that's suitable
225 /// for spilling the current live interval. The index must be before any
226 /// defs and uses of the live interval register in the mbb. Return begin() if
228 MachineBasicBlock::iterator
229 PreAllocSplitting::findSpillPoint(MachineBasicBlock *MBB, MachineInstr *MI,
231 SmallPtrSet<MachineInstr*, 4> &RefsInMBB,
232 unsigned &SpillIndex) {
233 MachineBasicBlock::iterator Pt = MBB->begin();
235 // Go top down if RefsInMBB is empty.
236 if (RefsInMBB.empty() && !DefMI) {
237 MachineBasicBlock::iterator MII = MBB->begin();
238 MachineBasicBlock::iterator EndPt = MI;
240 if (MII == EndPt) return Pt;
244 unsigned Index = LIs->getInstructionIndex(MII);
245 unsigned Gap = LIs->findGapBeforeInstr(Index);
247 // We can't insert the spill between the barrier (a call), and its
248 // corresponding call frame setup/teardown.
249 if (prior(MII)->getOpcode() == TRI->getCallFrameSetupOpcode()) {
250 bool reachedBarrier = false;
253 reachedBarrier = true;
257 } while (MII->getOpcode() != TRI->getCallFrameDestroyOpcode());
259 if (reachedBarrier) break;
265 } while (MII != EndPt);
267 MachineBasicBlock::iterator MII = MI;
268 MachineBasicBlock::iterator EndPt = DefMI
269 ? MachineBasicBlock::iterator(DefMI) : MBB->begin();
271 while (MII != EndPt && !RefsInMBB.count(MII)) {
272 unsigned Index = LIs->getInstructionIndex(MII);
274 // We can't insert the spill between the barrier (a call), and its
275 // corresponding call frame setup.
276 if (prior(MII)->getOpcode() == TRI->getCallFrameSetupOpcode()) {
279 } if (MII->getOpcode() == TRI->getCallFrameDestroyOpcode()) {
280 bool reachedBarrier = false;
281 while (MII->getOpcode() != TRI->getCallFrameSetupOpcode()) {
284 reachedBarrier = true;
289 if (reachedBarrier) break;
291 } else if (LIs->hasGapBeforeInstr(Index)) {
293 SpillIndex = LIs->findGapBeforeInstr(Index, true);
302 /// findRestorePoint - Find a gap in the instruction index map that's suitable
303 /// for restoring the current live interval value. The index must be before any
304 /// uses of the live interval register in the mbb. Return end() if none is
306 MachineBasicBlock::iterator
307 PreAllocSplitting::findRestorePoint(MachineBasicBlock *MBB, MachineInstr *MI,
309 SmallPtrSet<MachineInstr*, 4> &RefsInMBB,
310 unsigned &RestoreIndex) {
311 // FIXME: Allow spill to be inserted to the beginning of the mbb. Update mbb
312 // begin index accordingly.
313 MachineBasicBlock::iterator Pt = MBB->end();
314 unsigned EndIdx = LIs->getMBBEndIdx(MBB);
316 // Go bottom up if RefsInMBB is empty and the end of the mbb isn't beyond
317 // the last index in the live range.
318 if (RefsInMBB.empty() && LastIdx >= EndIdx) {
319 MachineBasicBlock::iterator MII = MBB->getFirstTerminator();
320 MachineBasicBlock::iterator EndPt = MI;
322 if (MII == EndPt) return Pt;
326 unsigned Index = LIs->getInstructionIndex(MII);
327 unsigned Gap = LIs->findGapBeforeInstr(Index);
329 // We can't insert a restore between the barrier (a call) and its
330 // corresponding call frame teardown.
331 if (MII->getOpcode() == TRI->getCallFrameDestroyOpcode()) {
332 bool reachedBarrier = false;
333 while (MII->getOpcode() != TRI->getCallFrameSetupOpcode()) {
336 reachedBarrier = true;
341 if (reachedBarrier) break;
350 } while (MII != EndPt);
352 MachineBasicBlock::iterator MII = MI;
355 // FIXME: Limit the number of instructions to examine to reduce
357 while (MII != MBB->getFirstTerminator()) {
358 unsigned Index = LIs->getInstructionIndex(MII);
361 unsigned Gap = LIs->findGapBeforeInstr(Index);
363 // We can't insert a restore between the barrier (a call) and its
364 // corresponding call frame teardown.
365 if (MII->getOpcode() == TRI->getCallFrameDestroyOpcode()) {
368 } else if (prior(MII)->getOpcode() == TRI->getCallFrameSetupOpcode()) {
369 bool reachedBarrier = false;
371 if (MII == MBB->getFirstTerminator() || RefsInMBB.count(MII)) {
372 reachedBarrier = true;
377 } while (MII->getOpcode() != TRI->getCallFrameDestroyOpcode());
379 if (reachedBarrier) break;
385 if (RefsInMBB.count(MII))
394 /// CreateSpillStackSlot - Create a stack slot for the live interval being
395 /// split. If the live interval was previously split, just reuse the same
397 int PreAllocSplitting::CreateSpillStackSlot(unsigned Reg,
398 const TargetRegisterClass *RC) {
400 DenseMap<unsigned, int>::iterator I = IntervalSSMap.find(Reg);
401 if (I != IntervalSSMap.end()) {
404 SS = MFI->CreateStackObject(RC->getSize(), RC->getAlignment());
405 IntervalSSMap[Reg] = SS;
408 // Create live interval for stack slot.
409 CurrSLI = &LSs->getOrCreateInterval(SS);
410 if (CurrSLI->hasAtLeastOneValue())
411 CurrSValNo = CurrSLI->getValNumInfo(0);
413 CurrSValNo = CurrSLI->getNextValue(~0U, 0, LSs->getVNInfoAllocator());
417 /// IsAvailableInStack - Return true if register is available in a split stack
418 /// slot at the specified index.
420 PreAllocSplitting::IsAvailableInStack(MachineBasicBlock *DefMBB,
421 unsigned Reg, unsigned DefIndex,
422 unsigned RestoreIndex, unsigned &SpillIndex,
427 DenseMap<unsigned, int>::iterator I = IntervalSSMap.find(Reg);
428 if (I == IntervalSSMap.end())
430 DenseMap<unsigned, unsigned>::iterator II = Def2SpillMap.find(DefIndex);
431 if (II == Def2SpillMap.end())
434 // If last spill of def is in the same mbb as barrier mbb (where restore will
435 // be), make sure it's not below the intended restore index.
436 // FIXME: Undo the previous spill?
437 assert(LIs->getMBBFromIndex(II->second) == DefMBB);
438 if (DefMBB == BarrierMBB && II->second >= RestoreIndex)
442 SpillIndex = II->second;
446 /// UpdateSpillSlotInterval - Given the specified val# of the register live
447 /// interval being split, and the spill and restore indicies, update the live
448 /// interval of the spill stack slot.
450 PreAllocSplitting::UpdateSpillSlotInterval(VNInfo *ValNo, unsigned SpillIndex,
451 unsigned RestoreIndex) {
452 assert(LIs->getMBBFromIndex(RestoreIndex) == BarrierMBB &&
453 "Expect restore in the barrier mbb");
455 MachineBasicBlock *MBB = LIs->getMBBFromIndex(SpillIndex);
456 if (MBB == BarrierMBB) {
457 // Intra-block spill + restore. We are done.
458 LiveRange SLR(SpillIndex, RestoreIndex, CurrSValNo);
459 CurrSLI->addRange(SLR);
463 SmallPtrSet<MachineBasicBlock*, 4> Processed;
464 unsigned EndIdx = LIs->getMBBEndIdx(MBB);
465 LiveRange SLR(SpillIndex, EndIdx+1, CurrSValNo);
466 CurrSLI->addRange(SLR);
467 Processed.insert(MBB);
469 // Start from the spill mbb, figure out the extend of the spill slot's
471 SmallVector<MachineBasicBlock*, 4> WorkList;
472 const LiveRange *LR = CurrLI->getLiveRangeContaining(SpillIndex);
473 if (LR->end > EndIdx)
474 // If live range extend beyond end of mbb, add successors to work list.
475 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
476 SE = MBB->succ_end(); SI != SE; ++SI)
477 WorkList.push_back(*SI);
479 while (!WorkList.empty()) {
480 MachineBasicBlock *MBB = WorkList.back();
482 if (Processed.count(MBB))
484 unsigned Idx = LIs->getMBBStartIdx(MBB);
485 LR = CurrLI->getLiveRangeContaining(Idx);
486 if (LR && LR->valno == ValNo) {
487 EndIdx = LIs->getMBBEndIdx(MBB);
488 if (Idx <= RestoreIndex && RestoreIndex < EndIdx) {
489 // Spill slot live interval stops at the restore.
490 LiveRange SLR(Idx, RestoreIndex, CurrSValNo);
491 CurrSLI->addRange(SLR);
492 } else if (LR->end > EndIdx) {
493 // Live range extends beyond end of mbb, process successors.
494 LiveRange SLR(Idx, EndIdx+1, CurrSValNo);
495 CurrSLI->addRange(SLR);
496 for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
497 SE = MBB->succ_end(); SI != SE; ++SI)
498 WorkList.push_back(*SI);
500 LiveRange SLR(Idx, LR->end, CurrSValNo);
501 CurrSLI->addRange(SLR);
503 Processed.insert(MBB);
508 /// PerformPHIConstruction - From properly set up use and def lists, use a PHI
509 /// construction algorithm to compute the ranges and valnos for an interval.
511 PreAllocSplitting::PerformPHIConstruction(MachineBasicBlock::iterator UseI,
512 MachineBasicBlock* MBB, LiveInterval* LI,
513 SmallPtrSet<MachineInstr*, 4>& Visited,
514 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Defs,
515 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Uses,
516 DenseMap<MachineInstr*, VNInfo*>& NewVNs,
517 DenseMap<MachineBasicBlock*, VNInfo*>& LiveOut,
518 DenseMap<MachineBasicBlock*, VNInfo*>& Phis,
519 bool IsTopLevel, bool IsIntraBlock) {
520 // Return memoized result if it's available.
521 if (IsTopLevel && Visited.count(UseI) && NewVNs.count(UseI))
523 else if (!IsTopLevel && IsIntraBlock && NewVNs.count(UseI))
525 else if (!IsIntraBlock && LiveOut.count(MBB))
528 // Check if our block contains any uses or defs.
529 bool ContainsDefs = Defs.count(MBB);
530 bool ContainsUses = Uses.count(MBB);
534 // Enumerate the cases of use/def contaning blocks.
535 if (!ContainsDefs && !ContainsUses) {
536 return PerformPHIConstructionFallBack(UseI, MBB, LI, Visited, Defs, Uses,
537 NewVNs, LiveOut, Phis,
538 IsTopLevel, IsIntraBlock);
539 } else if (ContainsDefs && !ContainsUses) {
540 SmallPtrSet<MachineInstr*, 2>& BlockDefs = Defs[MBB];
542 // Search for the def in this block. If we don't find it before the
543 // instruction we care about, go to the fallback case. Note that that
544 // should never happen: this cannot be intrablock, so use should
545 // always be an end() iterator.
546 assert(UseI == MBB->end() && "No use marked in intrablock");
548 MachineBasicBlock::iterator Walker = UseI;
550 while (Walker != MBB->begin()) {
551 if (BlockDefs.count(Walker))
556 // Once we've found it, extend its VNInfo to our instruction.
557 unsigned DefIndex = LIs->getInstructionIndex(Walker);
558 DefIndex = LiveIntervals::getDefIndex(DefIndex);
559 unsigned EndIndex = LIs->getMBBEndIdx(MBB);
561 RetVNI = NewVNs[Walker];
562 LI->addRange(LiveRange(DefIndex, EndIndex+1, RetVNI));
563 } else if (!ContainsDefs && ContainsUses) {
564 SmallPtrSet<MachineInstr*, 2>& BlockUses = Uses[MBB];
566 // Search for the use in this block that precedes the instruction we care
567 // about, going to the fallback case if we don't find it.
568 if (UseI == MBB->begin())
569 return PerformPHIConstructionFallBack(UseI, MBB, LI, Visited, Defs,
570 Uses, NewVNs, LiveOut, Phis,
571 IsTopLevel, IsIntraBlock);
573 MachineBasicBlock::iterator Walker = UseI;
576 while (Walker != MBB->begin()) {
577 if (BlockUses.count(Walker)) {
584 // Must check begin() too.
586 if (BlockUses.count(Walker))
589 return PerformPHIConstructionFallBack(UseI, MBB, LI, Visited, Defs,
590 Uses, NewVNs, LiveOut, Phis,
591 IsTopLevel, IsIntraBlock);
594 unsigned UseIndex = LIs->getInstructionIndex(Walker);
595 UseIndex = LiveIntervals::getUseIndex(UseIndex);
596 unsigned EndIndex = 0;
598 EndIndex = LIs->getInstructionIndex(UseI);
599 EndIndex = LiveIntervals::getUseIndex(EndIndex);
601 EndIndex = LIs->getMBBEndIdx(MBB);
603 // Now, recursively phi construct the VNInfo for the use we found,
604 // and then extend it to include the instruction we care about
605 RetVNI = PerformPHIConstruction(Walker, MBB, LI, Visited, Defs, Uses,
606 NewVNs, LiveOut, Phis, false, true);
608 LI->addRange(LiveRange(UseIndex, EndIndex+1, RetVNI));
610 // FIXME: Need to set kills properly for inter-block stuff.
611 if (LI->isKill(RetVNI, UseIndex)) LI->removeKill(RetVNI, UseIndex);
613 LI->addKill(RetVNI, EndIndex);
614 } else if (ContainsDefs && ContainsUses) {
615 SmallPtrSet<MachineInstr*, 2>& BlockDefs = Defs[MBB];
616 SmallPtrSet<MachineInstr*, 2>& BlockUses = Uses[MBB];
618 // This case is basically a merging of the two preceding case, with the
619 // special note that checking for defs must take precedence over checking
620 // for uses, because of two-address instructions.
622 if (UseI == MBB->begin())
623 return PerformPHIConstructionFallBack(UseI, MBB, LI, Visited, Defs, Uses,
624 NewVNs, LiveOut, Phis,
625 IsTopLevel, IsIntraBlock);
627 MachineBasicBlock::iterator Walker = UseI;
629 bool foundDef = false;
630 bool foundUse = false;
631 while (Walker != MBB->begin()) {
632 if (BlockDefs.count(Walker)) {
635 } else if (BlockUses.count(Walker)) {
642 // Must check begin() too.
643 if (!foundDef && !foundUse) {
644 if (BlockDefs.count(Walker))
646 else if (BlockUses.count(Walker))
649 return PerformPHIConstructionFallBack(UseI, MBB, LI, Visited, Defs,
650 Uses, NewVNs, LiveOut, Phis,
651 IsTopLevel, IsIntraBlock);
654 unsigned StartIndex = LIs->getInstructionIndex(Walker);
655 StartIndex = foundDef ? LiveIntervals::getDefIndex(StartIndex) :
656 LiveIntervals::getUseIndex(StartIndex);
657 unsigned EndIndex = 0;
659 EndIndex = LIs->getInstructionIndex(UseI);
660 EndIndex = LiveIntervals::getUseIndex(EndIndex);
662 EndIndex = LIs->getMBBEndIdx(MBB);
665 RetVNI = NewVNs[Walker];
667 RetVNI = PerformPHIConstruction(Walker, MBB, LI, Visited, Defs, Uses,
668 NewVNs, LiveOut, Phis, false, true);
670 LI->addRange(LiveRange(StartIndex, EndIndex+1, RetVNI));
672 if (foundUse && LI->isKill(RetVNI, StartIndex))
673 LI->removeKill(RetVNI, StartIndex);
675 LI->addKill(RetVNI, EndIndex);
679 // Memoize results so we don't have to recompute them.
680 if (!IsIntraBlock) LiveOut[MBB] = RetVNI;
682 if (!NewVNs.count(UseI))
683 NewVNs[UseI] = RetVNI;
684 Visited.insert(UseI);
690 /// PerformPHIConstructionFallBack - PerformPHIConstruction fall back path.
693 PreAllocSplitting::PerformPHIConstructionFallBack(MachineBasicBlock::iterator UseI,
694 MachineBasicBlock* MBB, LiveInterval* LI,
695 SmallPtrSet<MachineInstr*, 4>& Visited,
696 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Defs,
697 DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Uses,
698 DenseMap<MachineInstr*, VNInfo*>& NewVNs,
699 DenseMap<MachineBasicBlock*, VNInfo*>& LiveOut,
700 DenseMap<MachineBasicBlock*, VNInfo*>& Phis,
701 bool IsTopLevel, bool IsIntraBlock) {
702 // NOTE: Because this is the fallback case from other cases, we do NOT
703 // assume that we are not intrablock here.
704 if (Phis.count(MBB)) return Phis[MBB];
706 unsigned StartIndex = LIs->getMBBStartIdx(MBB);
707 VNInfo *RetVNI = Phis[MBB] = LI->getNextValue(~0U, /*FIXME*/ 0,
708 LIs->getVNInfoAllocator());
709 if (!IsIntraBlock) LiveOut[MBB] = RetVNI;
711 // If there are no uses or defs between our starting point and the
712 // beginning of the block, then recursive perform phi construction
713 // on our predecessors.
714 DenseMap<MachineBasicBlock*, VNInfo*> IncomingVNs;
715 for (MachineBasicBlock::pred_iterator PI = MBB->pred_begin(),
716 PE = MBB->pred_end(); PI != PE; ++PI) {
717 VNInfo* Incoming = PerformPHIConstruction((*PI)->end(), *PI, LI,
718 Visited, Defs, Uses, NewVNs,
719 LiveOut, Phis, false, false);
721 IncomingVNs[*PI] = Incoming;
724 if (MBB->pred_size() == 1 && !RetVNI->hasPHIKill) {
725 VNInfo* OldVN = RetVNI;
726 VNInfo* NewVN = IncomingVNs.begin()->second;
727 VNInfo* MergedVN = LI->MergeValueNumberInto(OldVN, NewVN);
728 if (MergedVN == OldVN) std::swap(OldVN, NewVN);
730 for (DenseMap<MachineBasicBlock*, VNInfo*>::iterator LOI = LiveOut.begin(),
731 LOE = LiveOut.end(); LOI != LOE; ++LOI)
732 if (LOI->second == OldVN)
733 LOI->second = MergedVN;
734 for (DenseMap<MachineInstr*, VNInfo*>::iterator NVI = NewVNs.begin(),
735 NVE = NewVNs.end(); NVI != NVE; ++NVI)
736 if (NVI->second == OldVN)
737 NVI->second = MergedVN;
738 for (DenseMap<MachineBasicBlock*, VNInfo*>::iterator PI = Phis.begin(),
739 PE = Phis.end(); PI != PE; ++PI)
740 if (PI->second == OldVN)
741 PI->second = MergedVN;
744 // Otherwise, merge the incoming VNInfos with a phi join. Create a new
745 // VNInfo to represent the joined value.
746 for (DenseMap<MachineBasicBlock*, VNInfo*>::iterator I =
747 IncomingVNs.begin(), E = IncomingVNs.end(); I != E; ++I) {
748 I->second->hasPHIKill = true;
749 unsigned KillIndex = LIs->getMBBEndIdx(I->first);
750 if (!LiveInterval::isKill(I->second, KillIndex))
751 LI->addKill(I->second, KillIndex);
755 unsigned EndIndex = 0;
757 EndIndex = LIs->getInstructionIndex(UseI);
758 EndIndex = LiveIntervals::getUseIndex(EndIndex);
760 EndIndex = LIs->getMBBEndIdx(MBB);
761 LI->addRange(LiveRange(StartIndex, EndIndex+1, RetVNI));
763 LI->addKill(RetVNI, EndIndex);
765 // Memoize results so we don't have to recompute them.
767 LiveOut[MBB] = RetVNI;
769 if (!NewVNs.count(UseI))
770 NewVNs[UseI] = RetVNI;
771 Visited.insert(UseI);
777 /// ReconstructLiveInterval - Recompute a live interval from scratch.
778 void PreAllocSplitting::ReconstructLiveInterval(LiveInterval* LI) {
779 BumpPtrAllocator& Alloc = LIs->getVNInfoAllocator();
781 // Clear the old ranges and valnos;
784 // Cache the uses and defs of the register
785 typedef DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> > RegMap;
788 // Keep track of the new VNs we're creating.
789 DenseMap<MachineInstr*, VNInfo*> NewVNs;
790 SmallPtrSet<VNInfo*, 2> PhiVNs;
792 // Cache defs, and create a new VNInfo for each def.
793 for (MachineRegisterInfo::def_iterator DI = MRI->def_begin(LI->reg),
794 DE = MRI->def_end(); DI != DE; ++DI) {
795 Defs[(*DI).getParent()].insert(&*DI);
797 unsigned DefIdx = LIs->getInstructionIndex(&*DI);
798 DefIdx = LiveIntervals::getDefIndex(DefIdx);
800 VNInfo* NewVN = LI->getNextValue(DefIdx, 0, Alloc);
802 // If the def is a move, set the copy field.
803 unsigned SrcReg, DstReg, SrcSubIdx, DstSubIdx;
804 if (TII->isMoveInstr(*DI, SrcReg, DstReg, SrcSubIdx, DstSubIdx))
805 if (DstReg == LI->reg)
808 NewVNs[&*DI] = NewVN;
811 // Cache uses as a separate pass from actually processing them.
812 for (MachineRegisterInfo::use_iterator UI = MRI->use_begin(LI->reg),
813 UE = MRI->use_end(); UI != UE; ++UI)
814 Uses[(*UI).getParent()].insert(&*UI);
816 // Now, actually process every use and use a phi construction algorithm
817 // to walk from it to its reaching definitions, building VNInfos along
819 DenseMap<MachineBasicBlock*, VNInfo*> LiveOut;
820 DenseMap<MachineBasicBlock*, VNInfo*> Phis;
821 SmallPtrSet<MachineInstr*, 4> Visited;
822 for (MachineRegisterInfo::use_iterator UI = MRI->use_begin(LI->reg),
823 UE = MRI->use_end(); UI != UE; ++UI) {
824 PerformPHIConstruction(&*UI, UI->getParent(), LI, Visited, Defs,
825 Uses, NewVNs, LiveOut, Phis, true, true);
828 // Add ranges for dead defs
829 for (MachineRegisterInfo::def_iterator DI = MRI->def_begin(LI->reg),
830 DE = MRI->def_end(); DI != DE; ++DI) {
831 unsigned DefIdx = LIs->getInstructionIndex(&*DI);
832 DefIdx = LiveIntervals::getDefIndex(DefIdx);
834 if (LI->liveAt(DefIdx)) continue;
836 VNInfo* DeadVN = NewVNs[&*DI];
837 LI->addRange(LiveRange(DefIdx, DefIdx+1, DeadVN));
838 LI->addKill(DeadVN, DefIdx);
842 /// RenumberValno - Split the given valno out into a new vreg, allowing it to
843 /// be allocated to a different register. This function creates a new vreg,
844 /// copies the valno and its live ranges over to the new vreg's interval,
845 /// removes them from the old interval, and rewrites all uses and defs of
846 /// the original reg to the new vreg within those ranges.
847 void PreAllocSplitting::RenumberValno(VNInfo* VN) {
848 SmallVector<VNInfo*, 4> Stack;
849 SmallVector<VNInfo*, 4> VNsToCopy;
852 // Walk through and copy the valno we care about, and any other valnos
853 // that are two-address redefinitions of the one we care about. These
854 // will need to be rewritten as well. We also check for safety of the
855 // renumbering here, by making sure that none of the valno involved has
857 while (!Stack.empty()) {
858 VNInfo* OldVN = Stack.back();
861 // Bail out if we ever encounter a valno that has a PHI kill. We can't
863 if (OldVN->hasPHIKill) return;
865 VNsToCopy.push_back(OldVN);
867 // Locate two-address redefinitions
868 for (SmallVector<unsigned, 4>::iterator KI = OldVN->kills.begin(),
869 KE = OldVN->kills.end(); KI != KE; ++KI) {
870 MachineInstr* MI = LIs->getInstructionFromIndex(*KI);
871 unsigned DefIdx = MI->findRegisterDefOperandIdx(CurrLI->reg);
872 if (DefIdx == ~0U) continue;
873 if (MI->isRegReDefinedByTwoAddr(DefIdx)) {
875 CurrLI->findDefinedVNInfo(LiveIntervals::getDefIndex(*KI));
876 if (NextVN == OldVN) continue;
877 Stack.push_back(NextVN);
882 // Create the new vreg
883 unsigned NewVReg = MRI->createVirtualRegister(MRI->getRegClass(CurrLI->reg));
885 // Create the new live interval
886 LiveInterval& NewLI = LIs->getOrCreateInterval(NewVReg);
888 for (SmallVector<VNInfo*, 4>::iterator OI = VNsToCopy.begin(), OE =
889 VNsToCopy.end(); OI != OE; ++OI) {
892 // Copy the valno over
893 VNInfo* NewVN = NewLI.getNextValue(OldVN->def, OldVN->copy,
894 LIs->getVNInfoAllocator());
895 NewLI.copyValNumInfo(NewVN, OldVN);
896 NewLI.MergeValueInAsValue(*CurrLI, OldVN, NewVN);
898 // Remove the valno from the old interval
899 CurrLI->removeValNo(OldVN);
902 // Rewrite defs and uses. This is done in two stages to avoid invalidating
904 SmallVector<std::pair<MachineInstr*, unsigned>, 8> OpsToChange;
906 for (MachineRegisterInfo::reg_iterator I = MRI->reg_begin(CurrLI->reg),
907 E = MRI->reg_end(); I != E; ++I) {
908 MachineOperand& MO = I.getOperand();
909 unsigned InstrIdx = LIs->getInstructionIndex(&*I);
911 if ((MO.isUse() && NewLI.liveAt(LiveIntervals::getUseIndex(InstrIdx))) ||
912 (MO.isDef() && NewLI.liveAt(LiveIntervals::getDefIndex(InstrIdx))))
913 OpsToChange.push_back(std::make_pair(&*I, I.getOperandNo()));
916 for (SmallVector<std::pair<MachineInstr*, unsigned>, 8>::iterator I =
917 OpsToChange.begin(), E = OpsToChange.end(); I != E; ++I) {
918 MachineInstr* Inst = I->first;
919 unsigned OpIdx = I->second;
920 MachineOperand& MO = Inst->getOperand(OpIdx);
924 // Grow the VirtRegMap, since we've created a new vreg.
927 // The renumbered vreg shares a stack slot with the old register.
928 if (IntervalSSMap.count(CurrLI->reg))
929 IntervalSSMap[NewVReg] = IntervalSSMap[CurrLI->reg];
934 bool PreAllocSplitting::Rematerialize(unsigned vreg, VNInfo* ValNo,
936 MachineBasicBlock::iterator RestorePt,
938 SmallPtrSet<MachineInstr*, 4>& RefsInMBB) {
939 MachineBasicBlock& MBB = *RestorePt->getParent();
941 MachineBasicBlock::iterator KillPt = BarrierMBB->end();
942 unsigned KillIdx = 0;
943 if (ValNo->def == ~0U || DefMI->getParent() == BarrierMBB)
944 KillPt = findSpillPoint(BarrierMBB, Barrier, NULL, RefsInMBB, KillIdx);
946 KillPt = findNextEmptySlot(DefMI->getParent(), DefMI, KillIdx);
948 if (KillPt == DefMI->getParent()->end())
951 TII->reMaterialize(MBB, RestorePt, vreg, DefMI);
952 LIs->InsertMachineInstrInMaps(prior(RestorePt), RestoreIdx);
954 ReconstructLiveInterval(CurrLI);
955 unsigned RematIdx = LIs->getInstructionIndex(prior(RestorePt));
956 RematIdx = LiveIntervals::getDefIndex(RematIdx);
957 RenumberValno(CurrLI->findDefinedVNInfo(RematIdx));
964 MachineInstr* PreAllocSplitting::FoldSpill(unsigned vreg,
965 const TargetRegisterClass* RC,
967 MachineInstr* Barrier,
968 MachineBasicBlock* MBB,
970 SmallPtrSet<MachineInstr*, 4>& RefsInMBB) {
971 MachineBasicBlock::iterator Pt = MBB->begin();
973 // Go top down if RefsInMBB is empty.
974 if (RefsInMBB.empty())
977 MachineBasicBlock::iterator FoldPt = Barrier;
978 while (&*FoldPt != DefMI && FoldPt != MBB->begin() &&
979 !RefsInMBB.count(FoldPt))
982 int OpIdx = FoldPt->findRegisterDefOperandIdx(vreg, false);
986 SmallVector<unsigned, 1> Ops;
987 Ops.push_back(OpIdx);
989 if (!TII->canFoldMemoryOperand(FoldPt, Ops))
992 DenseMap<unsigned, int>::iterator I = IntervalSSMap.find(vreg);
993 if (I != IntervalSSMap.end()) {
996 SS = MFI->CreateStackObject(RC->getSize(), RC->getAlignment());
1000 MachineInstr* FMI = TII->foldMemoryOperand(*MBB->getParent(),
1004 LIs->ReplaceMachineInstrInMaps(FoldPt, FMI);
1005 FMI = MBB->insert(MBB->erase(FoldPt), FMI);
1008 IntervalSSMap[vreg] = SS;
1009 CurrSLI = &LSs->getOrCreateInterval(SS);
1010 if (CurrSLI->hasAtLeastOneValue())
1011 CurrSValNo = CurrSLI->getValNumInfo(0);
1013 CurrSValNo = CurrSLI->getNextValue(~0U, 0, LSs->getVNInfoAllocator());
1019 MachineInstr* PreAllocSplitting::FoldRestore(unsigned vreg,
1020 const TargetRegisterClass* RC,
1021 MachineInstr* Barrier,
1022 MachineBasicBlock* MBB,
1024 SmallPtrSet<MachineInstr*, 4>& RefsInMBB) {
1025 if ((int)RestoreFoldLimit != -1 && RestoreFoldLimit == (int)NumRestoreFolds)
1028 // Go top down if RefsInMBB is empty.
1029 if (RefsInMBB.empty())
1032 // Can't fold a restore between a call stack setup and teardown.
1033 MachineBasicBlock::iterator FoldPt = Barrier;
1035 // Advance from barrier to call frame teardown.
1036 while (FoldPt != MBB->getFirstTerminator() &&
1037 FoldPt->getOpcode() != TRI->getCallFrameDestroyOpcode()) {
1038 if (RefsInMBB.count(FoldPt))
1044 if (FoldPt == MBB->getFirstTerminator())
1049 // Now find the restore point.
1050 while (FoldPt != MBB->getFirstTerminator() && !RefsInMBB.count(FoldPt)) {
1051 if (FoldPt->getOpcode() == TRI->getCallFrameSetupOpcode()) {
1052 while (FoldPt != MBB->getFirstTerminator() &&
1053 FoldPt->getOpcode() != TRI->getCallFrameDestroyOpcode()) {
1054 if (RefsInMBB.count(FoldPt))
1060 if (FoldPt == MBB->getFirstTerminator())
1067 if (FoldPt == MBB->getFirstTerminator())
1070 int OpIdx = FoldPt->findRegisterUseOperandIdx(vreg, true);
1074 SmallVector<unsigned, 1> Ops;
1075 Ops.push_back(OpIdx);
1077 if (!TII->canFoldMemoryOperand(FoldPt, Ops))
1080 MachineInstr* FMI = TII->foldMemoryOperand(*MBB->getParent(),
1084 LIs->ReplaceMachineInstrInMaps(FoldPt, FMI);
1085 FMI = MBB->insert(MBB->erase(FoldPt), FMI);
1092 /// SplitRegLiveInterval - Split (spill and restore) the given live interval
1093 /// so it would not cross the barrier that's being processed. Shrink wrap
1094 /// (minimize) the live interval to the last uses.
1095 bool PreAllocSplitting::SplitRegLiveInterval(LiveInterval *LI) {
1098 // Find live range where current interval cross the barrier.
1099 LiveInterval::iterator LR =
1100 CurrLI->FindLiveRangeContaining(LIs->getUseIndex(BarrierIdx));
1101 VNInfo *ValNo = LR->valno;
1103 if (ValNo->def == ~1U) {
1104 // Defined by a dead def? How can this be?
1105 assert(0 && "Val# is defined by a dead def?");
1109 MachineInstr *DefMI = (ValNo->def != ~0U)
1110 ? LIs->getInstructionFromIndex(ValNo->def) : NULL;
1112 // If this would create a new join point, do not split.
1113 if (DefMI && createsNewJoin(LR, DefMI->getParent(), Barrier->getParent()))
1116 // Find all references in the barrier mbb.
1117 SmallPtrSet<MachineInstr*, 4> RefsInMBB;
1118 for (MachineRegisterInfo::reg_iterator I = MRI->reg_begin(CurrLI->reg),
1119 E = MRI->reg_end(); I != E; ++I) {
1120 MachineInstr *RefMI = &*I;
1121 if (RefMI->getParent() == BarrierMBB)
1122 RefsInMBB.insert(RefMI);
1125 // Find a point to restore the value after the barrier.
1126 unsigned RestoreIndex = 0;
1127 MachineBasicBlock::iterator RestorePt =
1128 findRestorePoint(BarrierMBB, Barrier, LR->end, RefsInMBB, RestoreIndex);
1129 if (RestorePt == BarrierMBB->end())
1132 if (DefMI && LIs->isReMaterializable(*LI, ValNo, DefMI))
1133 if (Rematerialize(LI->reg, ValNo, DefMI, RestorePt,
1134 RestoreIndex, RefsInMBB))
1137 // Add a spill either before the barrier or after the definition.
1138 MachineBasicBlock *DefMBB = DefMI ? DefMI->getParent() : NULL;
1139 const TargetRegisterClass *RC = MRI->getRegClass(CurrLI->reg);
1140 unsigned SpillIndex = 0;
1141 MachineInstr *SpillMI = NULL;
1143 if (ValNo->def == ~0U) {
1144 // If it's defined by a phi, we must split just before the barrier.
1145 if ((SpillMI = FoldSpill(LI->reg, RC, 0, Barrier,
1146 BarrierMBB, SS, RefsInMBB))) {
1147 SpillIndex = LIs->getInstructionIndex(SpillMI);
1149 MachineBasicBlock::iterator SpillPt =
1150 findSpillPoint(BarrierMBB, Barrier, NULL, RefsInMBB, SpillIndex);
1151 if (SpillPt == BarrierMBB->begin())
1152 return false; // No gap to insert spill.
1155 SS = CreateSpillStackSlot(CurrLI->reg, RC);
1156 TII->storeRegToStackSlot(*BarrierMBB, SpillPt, CurrLI->reg, true, SS, RC);
1157 SpillMI = prior(SpillPt);
1158 LIs->InsertMachineInstrInMaps(SpillMI, SpillIndex);
1160 } else if (!IsAvailableInStack(DefMBB, CurrLI->reg, ValNo->def,
1161 RestoreIndex, SpillIndex, SS)) {
1162 // If it's already split, just restore the value. There is no need to spill
1165 return false; // Def is dead. Do nothing.
1167 if ((SpillMI = FoldSpill(LI->reg, RC, DefMI, Barrier,
1168 BarrierMBB, SS, RefsInMBB))) {
1169 SpillIndex = LIs->getInstructionIndex(SpillMI);
1171 // Check if it's possible to insert a spill after the def MI.
1172 MachineBasicBlock::iterator SpillPt;
1173 if (DefMBB == BarrierMBB) {
1174 // Add spill after the def and the last use before the barrier.
1175 SpillPt = findSpillPoint(BarrierMBB, Barrier, DefMI,
1176 RefsInMBB, SpillIndex);
1177 if (SpillPt == DefMBB->begin())
1178 return false; // No gap to insert spill.
1180 SpillPt = findNextEmptySlot(DefMBB, DefMI, SpillIndex);
1181 if (SpillPt == DefMBB->end())
1182 return false; // No gap to insert spill.
1184 // Add spill. The store instruction kills the register if def is before
1185 // the barrier in the barrier block.
1186 SS = CreateSpillStackSlot(CurrLI->reg, RC);
1187 TII->storeRegToStackSlot(*DefMBB, SpillPt, CurrLI->reg,
1188 DefMBB == BarrierMBB, SS, RC);
1189 SpillMI = prior(SpillPt);
1190 LIs->InsertMachineInstrInMaps(SpillMI, SpillIndex);
1194 // Remember def instruction index to spill index mapping.
1195 if (DefMI && SpillMI)
1196 Def2SpillMap[ValNo->def] = SpillIndex;
1199 bool FoldedRestore = false;
1200 if (MachineInstr* LMI = FoldRestore(CurrLI->reg, RC, Barrier,
1201 BarrierMBB, SS, RefsInMBB)) {
1203 RestoreIndex = LIs->getInstructionIndex(RestorePt);
1204 FoldedRestore = true;
1206 TII->loadRegFromStackSlot(*BarrierMBB, RestorePt, CurrLI->reg, SS, RC);
1207 MachineInstr *LoadMI = prior(RestorePt);
1208 LIs->InsertMachineInstrInMaps(LoadMI, RestoreIndex);
1211 // Update spill stack slot live interval.
1212 UpdateSpillSlotInterval(ValNo, LIs->getUseIndex(SpillIndex)+1,
1213 LIs->getDefIndex(RestoreIndex));
1215 ReconstructLiveInterval(CurrLI);
1217 if (!FoldedRestore) {
1218 unsigned RestoreIdx = LIs->getInstructionIndex(prior(RestorePt));
1219 RestoreIdx = LiveIntervals::getDefIndex(RestoreIdx);
1220 RenumberValno(CurrLI->findDefinedVNInfo(RestoreIdx));
1227 /// SplitRegLiveIntervals - Split all register live intervals that cross the
1228 /// barrier that's being processed.
1230 PreAllocSplitting::SplitRegLiveIntervals(const TargetRegisterClass **RCs,
1231 SmallPtrSet<LiveInterval*, 8>& Split) {
1232 // First find all the virtual registers whose live intervals are intercepted
1233 // by the current barrier.
1234 SmallVector<LiveInterval*, 8> Intervals;
1235 for (const TargetRegisterClass **RC = RCs; *RC; ++RC) {
1236 // FIXME: If it's not safe to move any instruction that defines the barrier
1237 // register class, then it means there are some special dependencies which
1238 // codegen is not modelling. Ignore these barriers for now.
1239 if (!TII->isSafeToMoveRegClassDefs(*RC))
1241 std::vector<unsigned> &VRs = MRI->getRegClassVirtRegs(*RC);
1242 for (unsigned i = 0, e = VRs.size(); i != e; ++i) {
1243 unsigned Reg = VRs[i];
1244 if (!LIs->hasInterval(Reg))
1246 LiveInterval *LI = &LIs->getInterval(Reg);
1247 if (LI->liveAt(BarrierIdx) && !Barrier->readsRegister(Reg))
1248 // Virtual register live interval is intercepted by the barrier. We
1249 // should split and shrink wrap its interval if possible.
1250 Intervals.push_back(LI);
1254 // Process the affected live intervals.
1255 bool Change = false;
1256 while (!Intervals.empty()) {
1257 if (PreSplitLimit != -1 && (int)NumSplits == PreSplitLimit)
1259 else if (NumSplits == 4)
1261 LiveInterval *LI = Intervals.back();
1262 Intervals.pop_back();
1263 bool result = SplitRegLiveInterval(LI);
1264 if (result) Split.insert(LI);
1271 unsigned PreAllocSplitting::getNumberOfNonSpills(
1272 SmallPtrSet<MachineInstr*, 4>& MIs,
1273 unsigned Reg, int FrameIndex,
1274 bool& FeedsTwoAddr) {
1275 unsigned NonSpills = 0;
1276 for (SmallPtrSet<MachineInstr*, 4>::iterator UI = MIs.begin(), UE = MIs.end();
1278 int StoreFrameIndex;
1279 unsigned StoreVReg = TII->isStoreToStackSlot(*UI, StoreFrameIndex);
1280 if (StoreVReg != Reg || StoreFrameIndex != FrameIndex)
1283 int DefIdx = (*UI)->findRegisterDefOperandIdx(Reg);
1284 if (DefIdx != -1 && (*UI)->isRegReDefinedByTwoAddr(DefIdx))
1285 FeedsTwoAddr = true;
1291 /// removeDeadSpills - After doing splitting, filter through all intervals we've
1292 /// split, and see if any of the spills are unnecessary. If so, remove them.
1293 bool PreAllocSplitting::removeDeadSpills(SmallPtrSet<LiveInterval*, 8>& split) {
1294 bool changed = false;
1296 // Walk over all of the live intervals that were touched by the splitter,
1297 // and see if we can do any DCE and/or folding.
1298 for (SmallPtrSet<LiveInterval*, 8>::iterator LI = split.begin(),
1299 LE = split.end(); LI != LE; ++LI) {
1300 DenseMap<VNInfo*, SmallPtrSet<MachineInstr*, 4> > VNUseCount;
1302 // First, collect all the uses of the vreg, and sort them by their
1303 // reaching definition (VNInfo).
1304 for (MachineRegisterInfo::use_iterator UI = MRI->use_begin((*LI)->reg),
1305 UE = MRI->use_end(); UI != UE; ++UI) {
1306 unsigned index = LIs->getInstructionIndex(&*UI);
1307 index = LiveIntervals::getUseIndex(index);
1309 const LiveRange* LR = (*LI)->getLiveRangeContaining(index);
1310 VNUseCount[LR->valno].insert(&*UI);
1313 // Now, take the definitions (VNInfo's) one at a time and try to DCE
1314 // and/or fold them away.
1315 for (LiveInterval::vni_iterator VI = (*LI)->vni_begin(),
1316 VE = (*LI)->vni_end(); VI != VE; ++VI) {
1318 if (DeadSplitLimit != -1 && (int)NumDeadSpills == DeadSplitLimit)
1321 VNInfo* CurrVN = *VI;
1323 // We don't currently try to handle definitions with PHI kills, because
1324 // it would involve processing more than one VNInfo at once.
1325 if (CurrVN->hasPHIKill) continue;
1327 // We also don't try to handle the results of PHI joins, since there's
1328 // no defining instruction to analyze.
1329 unsigned DefIdx = CurrVN->def;
1330 if (DefIdx == ~0U || DefIdx == ~1U) continue;
1332 // We're only interested in eliminating cruft introduced by the splitter,
1333 // is of the form load-use or load-use-store. First, check that the
1334 // definition is a load, and remember what stack slot we loaded it from.
1335 MachineInstr* DefMI = LIs->getInstructionFromIndex(DefIdx);
1337 if (!TII->isLoadFromStackSlot(DefMI, FrameIndex)) continue;
1339 // If the definition has no uses at all, just DCE it.
1340 if (VNUseCount[CurrVN].size() == 0) {
1341 LIs->RemoveMachineInstrFromMaps(DefMI);
1342 (*LI)->removeValNo(CurrVN);
1343 DefMI->eraseFromParent();
1344 VNUseCount.erase(CurrVN);
1350 // Second, get the number of non-store uses of the definition, as well as
1351 // a flag indicating whether it feeds into a later two-address definition.
1352 bool FeedsTwoAddr = false;
1353 unsigned NonSpillCount = getNumberOfNonSpills(VNUseCount[CurrVN],
1354 (*LI)->reg, FrameIndex,
1357 // If there's one non-store use and it doesn't feed a two-addr, then
1358 // this is a load-use-store case that we can try to fold.
1359 if (NonSpillCount == 1 && !FeedsTwoAddr) {
1360 // Start by finding the non-store use MachineInstr.
1361 SmallPtrSet<MachineInstr*, 4>::iterator UI = VNUseCount[CurrVN].begin();
1362 int StoreFrameIndex;
1363 unsigned StoreVReg = TII->isStoreToStackSlot(*UI, StoreFrameIndex);
1364 while (UI != VNUseCount[CurrVN].end() &&
1365 (StoreVReg == (*LI)->reg && StoreFrameIndex == FrameIndex)) {
1367 if (UI != VNUseCount[CurrVN].end())
1368 StoreVReg = TII->isStoreToStackSlot(*UI, StoreFrameIndex);
1370 if (UI == VNUseCount[CurrVN].end()) continue;
1372 MachineInstr* use = *UI;
1374 // Attempt to fold it away!
1375 int OpIdx = use->findRegisterUseOperandIdx((*LI)->reg, false);
1376 if (OpIdx == -1) continue;
1377 SmallVector<unsigned, 1> Ops;
1378 Ops.push_back(OpIdx);
1379 if (!TII->canFoldMemoryOperand(use, Ops)) continue;
1381 MachineInstr* NewMI =
1382 TII->foldMemoryOperand(*use->getParent()->getParent(),
1383 use, Ops, FrameIndex);
1385 if (!NewMI) continue;
1387 // Update relevant analyses.
1388 LIs->RemoveMachineInstrFromMaps(DefMI);
1389 LIs->ReplaceMachineInstrInMaps(use, NewMI);
1390 (*LI)->removeValNo(CurrVN);
1392 DefMI->eraseFromParent();
1393 MachineBasicBlock* MBB = use->getParent();
1394 NewMI = MBB->insert(MBB->erase(use), NewMI);
1395 VNUseCount[CurrVN].erase(use);
1397 // Remove deleted instructions. Note that we need to remove them from
1398 // the VNInfo->use map as well, just to be safe.
1399 for (SmallPtrSet<MachineInstr*, 4>::iterator II =
1400 VNUseCount[CurrVN].begin(), IE = VNUseCount[CurrVN].end();
1402 for (DenseMap<VNInfo*, SmallPtrSet<MachineInstr*, 4> >::iterator
1403 VNI = VNUseCount.begin(), VNE = VNUseCount.end(); VNI != VNE;
1405 if (VNI->first != CurrVN)
1406 VNI->second.erase(*II);
1407 LIs->RemoveMachineInstrFromMaps(*II);
1408 (*II)->eraseFromParent();
1411 VNUseCount.erase(CurrVN);
1413 for (DenseMap<VNInfo*, SmallPtrSet<MachineInstr*, 4> >::iterator
1414 VI = VNUseCount.begin(), VE = VNUseCount.end(); VI != VE; ++VI)
1415 if (VI->second.erase(use))
1416 VI->second.insert(NewMI);
1423 // If there's more than one non-store instruction, we can't profitably
1424 // fold it, so bail.
1425 if (NonSpillCount) continue;
1427 // Otherwise, this is a load-store case, so DCE them.
1428 for (SmallPtrSet<MachineInstr*, 4>::iterator UI =
1429 VNUseCount[CurrVN].begin(), UE = VNUseCount[CurrVN].end();
1431 LIs->RemoveMachineInstrFromMaps(*UI);
1432 (*UI)->eraseFromParent();
1435 VNUseCount.erase(CurrVN);
1437 LIs->RemoveMachineInstrFromMaps(DefMI);
1438 (*LI)->removeValNo(CurrVN);
1439 DefMI->eraseFromParent();
1448 bool PreAllocSplitting::createsNewJoin(LiveRange* LR,
1449 MachineBasicBlock* DefMBB,
1450 MachineBasicBlock* BarrierMBB) {
1451 if (DefMBB == BarrierMBB)
1454 if (LR->valno->hasPHIKill)
1457 unsigned MBBEnd = LIs->getMBBEndIdx(BarrierMBB);
1458 if (LR->end < MBBEnd)
1461 MachineLoopInfo& MLI = getAnalysis<MachineLoopInfo>();
1462 if (MLI.getLoopFor(DefMBB) != MLI.getLoopFor(BarrierMBB))
1465 MachineDominatorTree& MDT = getAnalysis<MachineDominatorTree>();
1466 SmallPtrSet<MachineBasicBlock*, 4> Visited;
1467 typedef std::pair<MachineBasicBlock*,
1468 MachineBasicBlock::succ_iterator> ItPair;
1469 SmallVector<ItPair, 4> Stack;
1470 Stack.push_back(std::make_pair(BarrierMBB, BarrierMBB->succ_begin()));
1472 while (!Stack.empty()) {
1473 ItPair P = Stack.back();
1476 MachineBasicBlock* PredMBB = P.first;
1477 MachineBasicBlock::succ_iterator S = P.second;
1479 if (S == PredMBB->succ_end())
1481 else if (Visited.count(*S)) {
1482 Stack.push_back(std::make_pair(PredMBB, ++S));
1485 Stack.push_back(std::make_pair(PredMBB, S+1));
1487 MachineBasicBlock* MBB = *S;
1488 Visited.insert(MBB);
1490 if (MBB == BarrierMBB)
1493 MachineDomTreeNode* DefMDTN = MDT.getNode(DefMBB);
1494 MachineDomTreeNode* BarrierMDTN = MDT.getNode(BarrierMBB);
1495 MachineDomTreeNode* MDTN = MDT.getNode(MBB)->getIDom();
1497 if (MDTN == DefMDTN)
1499 else if (MDTN == BarrierMDTN)
1501 MDTN = MDTN->getIDom();
1504 MBBEnd = LIs->getMBBEndIdx(MBB);
1505 if (LR->end > MBBEnd)
1506 Stack.push_back(std::make_pair(MBB, MBB->succ_begin()));
1513 bool PreAllocSplitting::runOnMachineFunction(MachineFunction &MF) {
1515 TM = &MF.getTarget();
1516 TRI = TM->getRegisterInfo();
1517 TII = TM->getInstrInfo();
1518 MFI = MF.getFrameInfo();
1519 MRI = &MF.getRegInfo();
1520 LIs = &getAnalysis<LiveIntervals>();
1521 LSs = &getAnalysis<LiveStacks>();
1522 VRM = &getAnalysis<VirtRegMap>();
1524 bool MadeChange = false;
1526 // Make sure blocks are numbered in order.
1527 MF.RenumberBlocks();
1529 MachineBasicBlock *Entry = MF.begin();
1530 SmallPtrSet<MachineBasicBlock*,16> Visited;
1532 SmallPtrSet<LiveInterval*, 8> Split;
1534 for (df_ext_iterator<MachineBasicBlock*, SmallPtrSet<MachineBasicBlock*,16> >
1535 DFI = df_ext_begin(Entry, Visited), E = df_ext_end(Entry, Visited);
1538 for (MachineBasicBlock::iterator I = BarrierMBB->begin(),
1539 E = BarrierMBB->end(); I != E; ++I) {
1541 const TargetRegisterClass **BarrierRCs =
1542 Barrier->getDesc().getRegClassBarriers();
1545 BarrierIdx = LIs->getInstructionIndex(Barrier);
1546 MadeChange |= SplitRegLiveIntervals(BarrierRCs, Split);
1550 MadeChange |= removeDeadSpills(Split);