STATISTIC(numAborts , "Number of times interval joining aborted");
char SimpleRegisterCoalescing::ID = 0;
-namespace {
- static cl::opt<bool>
- EnableJoining("join-liveintervals",
- cl::desc("Coalesce copies (default=true)"),
- cl::init(true));
+static cl::opt<bool>
+EnableJoining("join-liveintervals",
+ cl::desc("Coalesce copies (default=true)"),
+ cl::init(true));
- static cl::opt<bool>
- NewHeuristic("new-coalescer-heuristic",
- cl::desc("Use new coalescer heuristic"),
- cl::init(false));
+static cl::opt<bool>
+NewHeuristic("new-coalescer-heuristic",
+ cl::desc("Use new coalescer heuristic"),
+ cl::init(false));
- RegisterPass<SimpleRegisterCoalescing>
- X("simple-register-coalescing", "Simple Register Coalescing");
+static RegisterPass<SimpleRegisterCoalescing>
+X("simple-register-coalescing", "Simple Register Coalescing");
- // Declare that we implement the RegisterCoalescer interface
- RegisterAnalysisGroup<RegisterCoalescer, true/*The Default*/> V(X);
-}
+// Declare that we implement the RegisterCoalescer interface
+static RegisterAnalysisGroup<RegisterCoalescer, true/*The Default*/> V(X);
const PassInfo *llvm::SimpleRegisterCoalescingID = X.getPassInfo();
// BValNo is a value number in B that is defined by a copy from A. 'B3' in
// the example above.
LiveInterval::iterator BLR = IntB.FindLiveRangeContaining(CopyIdx);
+ if (BLR == IntB.end()) // Should never happen!
+ return false;
VNInfo *BValNo = BLR->valno;
// Get the location that B is defined at. Two options: either this value has
// AValNo is the value number in A that defines the copy, A3 in the example.
LiveInterval::iterator ALR = IntA.FindLiveRangeContaining(CopyIdx-1);
+ if (ALR == IntA.end()) // Should never happen!
+ return false;
VNInfo *AValNo = ALR->valno;
// If AValNo is defined as a copy from IntB, we can potentially process this.
// Get the LiveRange in IntB that this value number starts with.
LiveInterval::iterator ValLR = IntB.FindLiveRangeContaining(AValNo->def-1);
+ if (ValLR == IntB.end()) // Should never happen!
+ return false;
// Make sure that the end of the live range is inside the same block as
// CopyMI.
// BValNo is a value number in B that is defined by a copy from A. 'B3' in
// the example above.
LiveInterval::iterator BLR = IntB.FindLiveRangeContaining(CopyIdx);
+ if (BLR == IntB.end()) // Should never happen!
+ return false;
VNInfo *BValNo = BLR->valno;
// Get the location that B is defined at. Two options: either this value has
// AValNo is the value number in A that defines the copy, A3 in the example.
LiveInterval::iterator ALR = IntA.FindLiveRangeContaining(CopyIdx-1);
+ if (ALR == IntA.end()) // Should never happen!
+ return false;
VNInfo *AValNo = ALR->valno;
// If other defs can reach uses of this def, then it's not safe to perform
// the optimization.
MachineInstr *UseMI = &*UI;
unsigned UseIdx = li_->getInstructionIndex(UseMI);
LiveInterval::iterator ULR = IntA.FindLiveRangeContaining(UseIdx);
+ if (ULR == IntA.end())
+ continue;
if (ULR->valno == AValNo && JoinedCopies.count(UseMI))
return false;
}
continue;
unsigned UseIdx = li_->getInstructionIndex(UseMI);
LiveInterval::iterator ULR = IntA.FindLiveRangeContaining(UseIdx);
- if (ULR->valno != AValNo)
+ if (ULR == IntA.end() || ULR->valno != AValNo)
continue;
UseMO.setReg(NewReg);
if (UseMI == CopyMI)
// remove that val# as well. However this live range is being
// extended to the end of the existing live range defined by the copy.
unsigned DefIdx = li_->getDefIndex(UseIdx);
- LiveInterval::iterator DLR = IntB.FindLiveRangeContaining(DefIdx);
+ const LiveRange *DLR = IntB.getLiveRangeContaining(DefIdx);
BHasPHIKill |= DLR->valno->hasPHIKill;
assert(DLR->valno->def == DefIdx);
BDeadValNos.push_back(DLR->valno);
/// isBackEdgeCopy - Returns true if CopyMI is a back edge copy.
///
bool SimpleRegisterCoalescing::isBackEdgeCopy(MachineInstr *CopyMI,
- unsigned DstReg) {
+ unsigned DstReg) const {
MachineBasicBlock *MBB = CopyMI->getParent();
const MachineLoop *L = loopInfo->getLoopFor(MBB);
if (!L)
MachineOperand &O = I.getOperand();
MachineInstr *UseMI = &*I;
++I;
+ unsigned OldSubIdx = O.getSubReg();
if (DstIsPhys) {
- unsigned UseSubIdx = O.getSubReg();
unsigned UseDstReg = DstReg;
- if (UseSubIdx)
- UseDstReg = tri_->getSubReg(DstReg, UseSubIdx);
+ if (OldSubIdx)
+ UseDstReg = tri_->getSubReg(DstReg, OldSubIdx);
O.setReg(UseDstReg);
O.setSubReg(0);
} else {
- unsigned OldSubIdx = O.getSubReg();
// Sub-register indexes goes from small to large. e.g.
- // RAX: 0 -> AL, 1 -> AH, 2 -> AX, 3 -> EAX
- // EAX: 0 -> AL, 1 -> AH, 2 -> AX
+ // RAX: 1 -> AL, 2 -> AX, 3 -> EAX
+ // EAX: 1 -> AL, 2 -> AX
// So RAX's sub-register 2 is AX, RAX's sub-regsiter 3 is EAX, whose
// sub-register 2 is also AX.
if (SubIdx && OldSubIdx && SubIdx != OldSubIdx)
}
}
+/// RemoveDeadImpDef - Remove implicit_def instructions which are "re-defining"
+/// registers due to insert_subreg coalescing. e.g.
+/// r1024 = op
+/// r1025 = implicit_def
+/// r1025 = insert_subreg r1025, r1024
+/// = op r1025
+/// =>
+/// r1025 = op
+/// r1025 = implicit_def
+/// r1025 = insert_subreg r1025, r1025
+/// = op r1025
+void
+SimpleRegisterCoalescing::RemoveDeadImpDef(unsigned Reg, LiveInterval &LI) {
+ for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(Reg),
+ E = mri_->reg_end(); I != E; ) {
+ MachineOperand &O = I.getOperand();
+ MachineInstr *DefMI = &*I;
+ ++I;
+ if (!O.isDef())
+ continue;
+ if (DefMI->getOpcode() != TargetInstrInfo::IMPLICIT_DEF)
+ continue;
+ if (!LI.liveBeforeAndAt(li_->getInstructionIndex(DefMI)))
+ continue;
+ li_->RemoveMachineInstrFromMaps(DefMI);
+ DefMI->eraseFromParent();
+ }
+}
+
/// RemoveUnnecessaryKills - Remove kill markers that are no longer accurate
/// due to live range lengthening as the result of coalescing.
void SimpleRegisterCoalescing::RemoveUnnecessaryKills(unsigned Reg,
unsigned UseIdx = li_->getUseIndex(li_->getInstructionIndex(UseMI));
if (JoinedCopies.count(UseMI))
continue;
- LiveInterval::const_iterator UI = LI.FindLiveRangeContaining(UseIdx);
- assert(UI != LI.end());
+ const LiveRange *UI = LI.getLiveRangeContaining(UseIdx);
if (!LI.isKill(UI->valno, UseIdx+1))
UseMO.setIsKill(false);
}
/// removeIntervalIfEmpty - Check if the live interval of a physical register
/// is empty, if so remove it and also remove the empty intervals of its
-/// sub-registers.
-static void removeIntervalIfEmpty(LiveInterval &li, LiveIntervals *li_,
+/// sub-registers. Return true if live interval is removed.
+static bool removeIntervalIfEmpty(LiveInterval &li, LiveIntervals *li_,
const TargetRegisterInfo *tri_) {
if (li.empty()) {
- li_->removeInterval(li.reg);
if (TargetRegisterInfo::isPhysicalRegister(li.reg))
for (const unsigned* SR = tri_->getSubRegisters(li.reg); *SR; ++SR) {
if (!li_->hasInterval(*SR))
if (sli.empty())
li_->removeInterval(*SR);
}
+ li_->removeInterval(li.reg);
+ return true;
}
+ return false;
}
/// ShortenDeadCopyLiveRange - Shorten a live range defined by a dead copy.
-///
-void SimpleRegisterCoalescing::ShortenDeadCopyLiveRange(LiveInterval &li,
+/// Return true if live interval is removed.
+bool SimpleRegisterCoalescing::ShortenDeadCopyLiveRange(LiveInterval &li,
MachineInstr *CopyMI) {
unsigned CopyIdx = li_->getInstructionIndex(CopyMI);
LiveInterval::iterator MLR =
li.FindLiveRangeContaining(li_->getDefIndex(CopyIdx));
if (MLR == li.end())
- return; // Already removed by ShortenDeadCopySrcLiveRange.
+ return false; // Already removed by ShortenDeadCopySrcLiveRange.
unsigned RemoveStart = MLR->start;
unsigned RemoveEnd = MLR->end;
// Remove the liverange that's defined by this.
if (RemoveEnd == li_->getDefIndex(CopyIdx)+1) {
removeRange(li, RemoveStart, RemoveEnd, li_, tri_);
- removeIntervalIfEmpty(li, li_, tri_);
+ return removeIntervalIfEmpty(li, li_, tri_);
}
+ return false;
}
/// PropagateDeadness - Propagate the dead marker to the instruction which
}
}
-/// ShortenDeadCopyLiveRange - Shorten a live range as it's artificially
-/// extended by a dead copy. Mark the last use (if any) of the val# as kill
-/// as ends the live range there. If there isn't another use, then this
-/// live range is dead.
-void
+/// isSameOrFallThroughBB - Return true if MBB == SuccMBB or MBB simply
+/// fallthoughs to SuccMBB.
+static bool isSameOrFallThroughBB(MachineBasicBlock *MBB,
+ MachineBasicBlock *SuccMBB,
+ const TargetInstrInfo *tii_) {
+ if (MBB == SuccMBB)
+ return true;
+ MachineBasicBlock *TBB = 0, *FBB = 0;
+ std::vector<MachineOperand> Cond;
+ return !tii_->AnalyzeBranch(*MBB, TBB, FBB, Cond) && !TBB && !FBB &&
+ MBB->isSuccessor(SuccMBB);
+}
+
+/// ShortenDeadCopySrcLiveRange - Shorten a live range as it's artificially
+/// extended by a dead copy. Mark the last use (if any) of the val# as kill as
+/// ends the live range there. If there isn't another use, then this live range
+/// is dead. Return true if live interval is removed.
+bool
SimpleRegisterCoalescing::ShortenDeadCopySrcLiveRange(LiveInterval &li,
MachineInstr *CopyMI) {
unsigned CopyIdx = li_->getInstructionIndex(CopyMI);
// first instruction index starts at > 0 value.
assert(TargetRegisterInfo::isPhysicalRegister(li.reg));
// Live-in to the function but dead. Remove it from entry live-in set.
- mf_->begin()->removeLiveIn(li.reg);
- LiveInterval::iterator LR = li.FindLiveRangeContaining(CopyIdx);
+ if (mf_->begin()->isLiveIn(li.reg))
+ mf_->begin()->removeLiveIn(li.reg);
+ const LiveRange *LR = li.getLiveRangeContaining(CopyIdx);
removeRange(li, LR->start, LR->end, li_, tri_);
- removeIntervalIfEmpty(li, li_, tri_);
- return;
+ return removeIntervalIfEmpty(li, li_, tri_);
}
LiveInterval::iterator LR = li.FindLiveRangeContaining(CopyIdx-1);
if (LR == li.end())
// Livein but defined by a phi.
- return;
+ return false;
unsigned RemoveStart = LR->start;
unsigned RemoveEnd = li_->getDefIndex(CopyIdx)+1;
if (LR->end > RemoveEnd)
// More uses past this copy? Nothing to do.
- return;
+ return false;
+ MachineBasicBlock *CopyMBB = CopyMI->getParent();
+ unsigned MBBStart = li_->getMBBStartIdx(CopyMBB);
unsigned LastUseIdx;
- MachineOperand *LastUse =
- lastRegisterUse(LR->start, CopyIdx-1, li.reg, LastUseIdx);
+ MachineOperand *LastUse = lastRegisterUse(LR->start, CopyIdx-1, li.reg,
+ LastUseIdx);
if (LastUse) {
+ MachineInstr *LastUseMI = LastUse->getParent();
+ if (!isSameOrFallThroughBB(LastUseMI->getParent(), CopyMBB, tii_)) {
+ // r1024 = op
+ // ...
+ // BB1:
+ // = r1024
+ //
+ // BB2:
+ // r1025<dead> = r1024<kill>
+ if (MBBStart < LR->end)
+ removeRange(li, MBBStart, LR->end, li_, tri_);
+ return false;
+ }
+
// There are uses before the copy, just shorten the live range to the end
// of last use.
LastUse->setIsKill();
- MachineInstr *LastUseMI = LastUse->getParent();
removeRange(li, li_->getDefIndex(LastUseIdx), LR->end, li_, tri_);
unsigned SrcReg, DstReg;
if (tii_->isMoveInstr(*LastUseMI, SrcReg, DstReg) &&
int DeadIdx = LastUseMI->findRegisterDefOperandIdx(li.reg, false, tri_);
LastUseMI->getOperand(DeadIdx).setIsDead();
}
- return;
+ return false;
}
// Is it livein?
- MachineBasicBlock *CopyMBB = CopyMI->getParent();
- unsigned MBBStart = li_->getMBBStartIdx(CopyMBB);
if (LR->start <= MBBStart && LR->end > MBBStart) {
if (LR->start == 0) {
assert(TargetRegisterInfo::isPhysicalRegister(li.reg));
PropagateDeadness(li, CopyMI, RemoveStart, li_, tri_);
removeRange(li, RemoveStart, LR->end, li_, tri_);
- removeIntervalIfEmpty(li, li_, tri_);
+ return removeIntervalIfEmpty(li, li_, tri_);
+}
+
+/// CanCoalesceWithImpDef - Returns true if the specified copy instruction
+/// from an implicit def to another register can be coalesced away.
+bool SimpleRegisterCoalescing::CanCoalesceWithImpDef(MachineInstr *CopyMI,
+ LiveInterval &li,
+ LiveInterval &ImpLi) const{
+ if (!CopyMI->killsRegister(ImpLi.reg))
+ return false;
+ unsigned CopyIdx = li_->getDefIndex(li_->getInstructionIndex(CopyMI));
+ LiveInterval::iterator LR = li.FindLiveRangeContaining(CopyIdx);
+ if (LR == li.end())
+ return false;
+ if (LR->valno->hasPHIKill)
+ return false;
+ if (LR->valno->def != CopyIdx)
+ return false;
+ // Make sure all of val# uses are copies.
+ for (MachineRegisterInfo::use_iterator UI = mri_->use_begin(li.reg),
+ UE = mri_->use_end(); UI != UE;) {
+ MachineInstr *UseMI = &*UI;
+ ++UI;
+ if (JoinedCopies.count(UseMI))
+ continue;
+ unsigned UseIdx = li_->getUseIndex(li_->getInstructionIndex(UseMI));
+ LiveInterval::iterator ULR = li.FindLiveRangeContaining(UseIdx);
+ if (ULR == li.end() || ULR->valno != LR->valno)
+ continue;
+ // If the use is not a use, then it's not safe to coalesce the move.
+ unsigned SrcReg, DstReg;
+ if (!tii_->isMoveInstr(*UseMI, SrcReg, DstReg)) {
+ if (UseMI->getOpcode() == TargetInstrInfo::INSERT_SUBREG &&
+ UseMI->getOperand(1).getReg() == li.reg)
+ continue;
+ return false;
+ }
+ }
+ return true;
+}
+
+
+/// RemoveCopiesFromValNo - The specified value# is defined by an implicit
+/// def and it is being removed. Turn all copies from this value# into
+/// identity copies so they will be removed.
+void SimpleRegisterCoalescing::RemoveCopiesFromValNo(LiveInterval &li,
+ VNInfo *VNI) {
+ MachineInstr *ImpDef = NULL;
+ MachineOperand *LastUse = NULL;
+ unsigned LastUseIdx = li_->getUseIndex(VNI->def);
+ for (MachineRegisterInfo::reg_iterator RI = mri_->reg_begin(li.reg),
+ RE = mri_->reg_end(); RI != RE;) {
+ MachineOperand *MO = &RI.getOperand();
+ MachineInstr *MI = &*RI;
+ ++RI;
+ if (MO->isDef()) {
+ if (MI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF) {
+ assert(!ImpDef && "Multiple implicit_def defining same register?");
+ ImpDef = MI;
+ }
+ continue;
+ }
+ if (JoinedCopies.count(MI))
+ continue;
+ unsigned UseIdx = li_->getUseIndex(li_->getInstructionIndex(MI));
+ LiveInterval::iterator ULR = li.FindLiveRangeContaining(UseIdx);
+ if (ULR == li.end() || ULR->valno != VNI)
+ continue;
+ // If the use is a copy, turn it into an identity copy.
+ unsigned SrcReg, DstReg;
+ if (tii_->isMoveInstr(*MI, SrcReg, DstReg) && SrcReg == li.reg) {
+ // Each use MI may have multiple uses of this register. Change them all.
+ for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
+ MachineOperand &MO = MI->getOperand(i);
+ if (MO.isReg() && MO.getReg() == li.reg)
+ MO.setReg(DstReg);
+ }
+ JoinedCopies.insert(MI);
+ } else if (UseIdx > LastUseIdx) {
+ LastUseIdx = UseIdx;
+ LastUse = MO;
+ }
+ }
+ if (LastUse)
+ LastUse->setIsKill();
+ else {
+ // Remove dead implicit_def.
+ li_->RemoveMachineInstrFromMaps(ImpDef);
+ ImpDef->eraseFromParent();
+ }
+}
+
+static unsigned getMatchingSuperReg(unsigned Reg, unsigned SubIdx,
+ const TargetRegisterClass *RC,
+ const TargetRegisterInfo* TRI) {
+ for (const unsigned *SRs = TRI->getSuperRegisters(Reg);
+ unsigned SR = *SRs; ++SRs)
+ if (Reg == TRI->getSubReg(SR, SubIdx) && RC->contains(SR))
+ return SR;
+ return 0;
}
/// JoinCopy - Attempt to join intervals corresponding to SrcReg/DstReg,
unsigned SrcReg;
unsigned DstReg;
bool isExtSubReg = CopyMI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG;
+ bool isInsSubReg = CopyMI->getOpcode() == TargetInstrInfo::INSERT_SUBREG;
unsigned SubIdx = 0;
if (isExtSubReg) {
DstReg = CopyMI->getOperand(0).getReg();
SrcReg = CopyMI->getOperand(1).getReg();
+ } else if (isInsSubReg) {
+ if (CopyMI->getOperand(2).getSubReg()) {
+ DOUT << "\tSource of insert_subreg is already coalesced "
+ << "to another register.\n";
+ return false; // Not coalescable.
+ }
+ DstReg = CopyMI->getOperand(0).getReg();
+ SrcReg = CopyMI->getOperand(2).getReg();
} else if (!tii_->isMoveInstr(*CopyMI, SrcReg, DstReg)) {
assert(0 && "Unrecognized copy instruction!");
return false;
}
unsigned RealDstReg = 0;
- if (isExtSubReg) {
- SubIdx = CopyMI->getOperand(2).getImm();
- if (SrcIsPhys) {
+ unsigned RealSrcReg = 0;
+ if (isExtSubReg || isInsSubReg) {
+ SubIdx = CopyMI->getOperand(isExtSubReg ? 2 : 3).getImm();
+ if (SrcIsPhys && isExtSubReg) {
// r1024 = EXTRACT_SUBREG EAX, 0 then r1024 is really going to be
// coalesced with AX.
- SrcReg = tri_->getSubReg(SrcReg, SubIdx);
+ unsigned DstSubIdx = CopyMI->getOperand(0).getSubReg();
+ if (DstSubIdx) {
+ // r1024<2> = EXTRACT_SUBREG EAX, 2. Then r1024 has already been
+ // coalesced to a larger register so the subreg indices cancel out.
+ if (DstSubIdx != SubIdx) {
+ DOUT << "\t Sub-register indices mismatch.\n";
+ return false; // Not coalescable.
+ }
+ } else
+ SrcReg = tri_->getSubReg(SrcReg, SubIdx);
+ SubIdx = 0;
+ } else if (DstIsPhys && isInsSubReg) {
+ // EAX = INSERT_SUBREG EAX, r1024, 0
+ unsigned SrcSubIdx = CopyMI->getOperand(2).getSubReg();
+ if (SrcSubIdx) {
+ // EAX = INSERT_SUBREG EAX, r1024<2>, 2 Then r1024 has already been
+ // coalesced to a larger register so the subreg indices cancel out.
+ if (SrcSubIdx != SubIdx) {
+ DOUT << "\t Sub-register indices mismatch.\n";
+ return false; // Not coalescable.
+ }
+ } else
+ DstReg = tri_->getSubReg(DstReg, SubIdx);
SubIdx = 0;
- } else if (DstIsPhys) {
+ } else if ((DstIsPhys && isExtSubReg) || (SrcIsPhys && isInsSubReg)) {
// If this is a extract_subreg where dst is a physical register, e.g.
// cl = EXTRACT_SUBREG reg1024, 1
// then create and update the actual physical register allocated to RHS.
- const TargetRegisterClass *RC = mri_->getRegClass(SrcReg);
- for (const unsigned *SRs = tri_->getSuperRegisters(DstReg);
- unsigned SR = *SRs; ++SRs) {
- if (DstReg == tri_->getSubReg(SR, SubIdx) &&
- RC->contains(SR)) {
- RealDstReg = SR;
- break;
- }
+ // Ditto for
+ // reg1024 = INSERT_SUBREG r1024, cl, 1
+ if (CopyMI->getOperand(1).getSubReg()) {
+ DOUT << "\tSrc of extract_ / insert_subreg already coalesced with reg"
+ << " of a super-class.\n";
+ return false; // Not coalescable.
+ }
+ const TargetRegisterClass *RC =
+ mri_->getRegClass(isExtSubReg ? SrcReg : DstReg);
+ if (isExtSubReg) {
+ RealDstReg = getMatchingSuperReg(DstReg, SubIdx, RC, tri_);
+ assert(RealDstReg && "Invalid extra_subreg instruction!");
+ } else {
+ RealSrcReg = getMatchingSuperReg(SrcReg, SubIdx, RC, tri_);
+ assert(RealSrcReg && "Invalid extra_subreg instruction!");
}
- assert(RealDstReg && "Invalid extra_subreg instruction!");
// For this type of EXTRACT_SUBREG, conservatively
// check if the live interval of the source register interfere with the
// actual super physical register we are trying to coalesce with.
- LiveInterval &RHS = li_->getInterval(SrcReg);
- if (li_->hasInterval(RealDstReg) &&
- RHS.overlaps(li_->getInterval(RealDstReg))) {
+ unsigned PhysReg = isExtSubReg ? RealDstReg : RealSrcReg;
+ LiveInterval &RHS = li_->getInterval(isExtSubReg ? SrcReg : DstReg);
+ if (li_->hasInterval(PhysReg) &&
+ RHS.overlaps(li_->getInterval(PhysReg))) {
DOUT << "Interfere with register ";
- DEBUG(li_->getInterval(RealDstReg).print(DOUT, tri_));
+ DEBUG(li_->getInterval(PhysReg).print(DOUT, tri_));
return false; // Not coalescable
}
- for (const unsigned* SR = tri_->getSubRegisters(RealDstReg); *SR; ++SR)
+ for (const unsigned* SR = tri_->getSubRegisters(PhysReg); *SR; ++SR)
if (li_->hasInterval(*SR) && RHS.overlaps(li_->getInterval(*SR))) {
DOUT << "Interfere with sub-register ";
DEBUG(li_->getInterval(*SR).print(DOUT, tri_));
}
SubIdx = 0;
} else {
- unsigned SrcSize= li_->getInterval(SrcReg).getSize() / InstrSlots::NUM;
- unsigned DstSize= li_->getInterval(DstReg).getSize() / InstrSlots::NUM;
- const TargetRegisterClass *RC = mri_->getRegClass(DstReg);
- unsigned Threshold = allocatableRCRegs_[RC].count();
- // Be conservative. If both sides are virtual registers, do not coalesce
- // if this will cause a high use density interval to target a smaller set
- // of registers.
- if (DstSize > Threshold || SrcSize > Threshold) {
- LiveVariables::VarInfo &svi = lv_->getVarInfo(SrcReg);
- LiveVariables::VarInfo &dvi = lv_->getVarInfo(DstReg);
- if ((float)dvi.NumUses / DstSize < (float)svi.NumUses / SrcSize) {
- Again = true; // May be possible to coalesce later.
- return false;
+ unsigned OldSubIdx = isExtSubReg ? CopyMI->getOperand(0).getSubReg()
+ : CopyMI->getOperand(2).getSubReg();
+ if (OldSubIdx) {
+ if (OldSubIdx == SubIdx && !differingRegisterClasses(SrcReg, DstReg))
+ // r1024<2> = EXTRACT_SUBREG r1025, 2. Then r1024 has already been
+ // coalesced to a larger register so the subreg indices cancel out.
+ // Also check if the other larger register is of the same register
+ // class as the would be resulting register.
+ SubIdx = 0;
+ else {
+ DOUT << "\t Sub-register indices mismatch.\n";
+ return false; // Not coalescable.
+ }
+ }
+ if (SubIdx) {
+ unsigned LargeReg = isExtSubReg ? SrcReg : DstReg;
+ unsigned SmallReg = isExtSubReg ? DstReg : SrcReg;
+ unsigned LargeRegSize =
+ li_->getInterval(LargeReg).getSize() / InstrSlots::NUM;
+ unsigned SmallRegSize =
+ li_->getInterval(SmallReg).getSize() / InstrSlots::NUM;
+ const TargetRegisterClass *RC = mri_->getRegClass(SmallReg);
+ unsigned Threshold = allocatableRCRegs_[RC].count();
+ // Be conservative. If both sides are virtual registers, do not coalesce
+ // if this will cause a high use density interval to target a smaller
+ // set of registers.
+ if (SmallRegSize > Threshold || LargeRegSize > Threshold) {
+ LiveVariables::VarInfo &svi = lv_->getVarInfo(LargeReg);
+ LiveVariables::VarInfo &dvi = lv_->getVarInfo(SmallReg);
+ if ((float)dvi.NumUses / SmallRegSize <
+ (float)svi.NumUses / LargeRegSize) {
+ Again = true; // May be possible to coalesce later.
+ return false;
+ }
}
}
}
DOUT << ": ";
// Check if it is necessary to propagate "isDead" property.
- MachineOperand *mopd = CopyMI->findRegisterDefOperand(DstReg, false);
- bool isDead = mopd->isDead();
-
- // We need to be careful about coalescing a source physical register with a
- // virtual register. Once the coalescing is done, it cannot be broken and
- // these are not spillable! If the destination interval uses are far away,
- // think twice about coalescing them!
- if (!isDead && (SrcIsPhys || DstIsPhys) && !isExtSubReg) {
- LiveInterval &JoinVInt = SrcIsPhys ? DstInt : SrcInt;
- unsigned JoinVReg = SrcIsPhys ? DstReg : SrcReg;
- unsigned JoinPReg = SrcIsPhys ? SrcReg : DstReg;
- const TargetRegisterClass *RC = mri_->getRegClass(JoinVReg);
- unsigned Threshold = allocatableRCRegs_[RC].count() * 2;
- if (TheCopy.isBackEdge)
- Threshold *= 2; // Favors back edge copies.
-
- // If the virtual register live interval is long but it has low use desity,
- // do not join them, instead mark the physical register as its allocation
- // preference.
- unsigned Length = JoinVInt.getSize() / InstrSlots::NUM;
- LiveVariables::VarInfo &vi = lv_->getVarInfo(JoinVReg);
- if (Length > Threshold &&
- (((float)vi.NumUses / Length) < (1.0 / Threshold))) {
- JoinVInt.preference = JoinPReg;
- ++numAborts;
- DOUT << "\tMay tie down a physical register, abort!\n";
- Again = true; // May be possible to coalesce later.
- return false;
+ if (!isExtSubReg && !isInsSubReg) {
+ MachineOperand *mopd = CopyMI->findRegisterDefOperand(DstReg, false);
+ bool isDead = mopd->isDead();
+
+ // We need to be careful about coalescing a source physical register with a
+ // virtual register. Once the coalescing is done, it cannot be broken and
+ // these are not spillable! If the destination interval uses are far away,
+ // think twice about coalescing them!
+ if (!isDead && (SrcIsPhys || DstIsPhys)) {
+ LiveInterval &JoinVInt = SrcIsPhys ? DstInt : SrcInt;
+ unsigned JoinVReg = SrcIsPhys ? DstReg : SrcReg;
+ unsigned JoinPReg = SrcIsPhys ? SrcReg : DstReg;
+ const TargetRegisterClass *RC = mri_->getRegClass(JoinVReg);
+ unsigned Threshold = allocatableRCRegs_[RC].count() * 2;
+ if (TheCopy.isBackEdge)
+ Threshold *= 2; // Favors back edge copies.
+
+ // If the virtual register live interval is long but it has low use desity,
+ // do not join them, instead mark the physical register as its allocation
+ // preference.
+ unsigned Length = JoinVInt.getSize() / InstrSlots::NUM;
+ LiveVariables::VarInfo &vi = lv_->getVarInfo(JoinVReg);
+ if (Length > Threshold &&
+ (((float)vi.NumUses / Length) < (1.0 / Threshold))) {
+ JoinVInt.preference = JoinPReg;
+ ++numAborts;
+ DOUT << "\tMay tie down a physical register, abort!\n";
+ Again = true; // May be possible to coalesce later.
+ return false;
+ }
}
}
bool Swapped = false;
// If SrcInt is implicitly defined, it's safe to coalesce.
bool isEmpty = SrcInt.empty();
- if (isEmpty && DstInt.getNumValNums() != 1) {
+ if (isEmpty && !CanCoalesceWithImpDef(CopyMI, DstInt, SrcInt)) {
// Only coalesce an empty interval (defined by implicit_def) with
- // another interval that's defined by a single copy.
+ // another interval which has a valno defined by the CopyMI and the CopyMI
+ // is a kill of the implicit def.
DOUT << "Not profitable!\n";
return false;
}
// Coalescing failed.
// If we can eliminate the copy without merging the live ranges, do so now.
- if (!isExtSubReg &&
+ if (!isExtSubReg && !isInsSubReg &&
(AdjustCopiesBackFrom(SrcInt, DstInt, CopyMI) ||
RemoveCopyByCommutingDef(SrcInt, DstInt, CopyMI))) {
JoinedCopies.insert(CopyMI);
// If this is a extract_subreg where dst is a physical register, e.g.
// cl = EXTRACT_SUBREG reg1024, 1
// then create and update the actual physical register allocated to RHS.
- if (RealDstReg) {
- LiveInterval &RealDstInt = li_->getOrCreateInterval(RealDstReg);
+ if (RealDstReg || RealSrcReg) {
+ LiveInterval &RealInt =
+ li_->getOrCreateInterval(RealDstReg ? RealDstReg : RealSrcReg);
SmallSet<const VNInfo*, 4> CopiedValNos;
for (LiveInterval::Ranges::const_iterator I = ResSrcInt->ranges.begin(),
E = ResSrcInt->ranges.end(); I != E; ++I) {
- LiveInterval::const_iterator DstLR =
- ResDstInt->FindLiveRangeContaining(I->start);
- assert(DstLR != ResDstInt->end() && "Invalid joined interval!");
+ const LiveRange *DstLR = ResDstInt->getLiveRangeContaining(I->start);
+ assert(DstLR && "Invalid joined interval!");
const VNInfo *DstValNo = DstLR->valno;
if (CopiedValNos.insert(DstValNo)) {
- VNInfo *ValNo = RealDstInt.getNextValue(DstValNo->def, DstValNo->copy,
- li_->getVNInfoAllocator());
+ VNInfo *ValNo = RealInt.getNextValue(DstValNo->def, DstValNo->copy,
+ li_->getVNInfoAllocator());
ValNo->hasPHIKill = DstValNo->hasPHIKill;
- RealDstInt.addKills(ValNo, DstValNo->kills);
- RealDstInt.MergeValueInAsValue(*ResDstInt, DstValNo, ValNo);
+ RealInt.addKills(ValNo, DstValNo->kills);
+ RealInt.MergeValueInAsValue(*ResDstInt, DstValNo, ValNo);
}
}
- DstReg = RealDstReg;
+
+ DstReg = RealDstReg ? RealDstReg : RealSrcReg;
}
// Update the liveintervals of sub-registers.
// If this is a EXTRACT_SUBREG, make sure the result of coalescing is the
// larger super-register.
- if (isExtSubReg && !SrcIsPhys && !DstIsPhys) {
- if (!Swapped) {
+ if ((isExtSubReg || isInsSubReg) && !SrcIsPhys && !DstIsPhys) {
+ if ((isExtSubReg && !Swapped) || (isInsSubReg && Swapped)) {
ResSrcInt->Copy(*ResDstInt, li_->getVNInfoAllocator());
std::swap(SrcReg, DstReg);
std::swap(ResSrcInt, ResDstInt);
// SrcReg is guarateed to be the register whose live interval that is
// being merged.
li_->removeInterval(SrcReg);
+ if (isInsSubReg)
+ // Avoid:
+ // r1024 = op
+ // r1024 = implicit_def
+ // ...
+ // = r1024
+ RemoveDeadImpDef(DstReg, *ResDstInt);
UpdateRegDefsUses(SrcReg, DstReg, SubIdx);
if (isEmpty) {
// by the copy is being defined by an IMPLICIT_DEF which defines a zero
// length interval. Remove the val#.
unsigned CopyIdx = li_->getDefIndex(li_->getInstructionIndex(CopyMI));
- LiveInterval::iterator LR = ResDstInt->FindLiveRangeContaining(CopyIdx);
+ const LiveRange *LR = ResDstInt->getLiveRangeContaining(CopyIdx);
VNInfo *ImpVal = LR->valno;
assert(ImpVal->def == CopyIdx);
+ unsigned NextDef = LR->end;
+ RemoveCopiesFromValNo(*ResDstInt, ImpVal);
ResDstInt->removeValNo(ImpVal);
+ LR = ResDstInt->FindLiveRangeContaining(NextDef);
+ if (LR != ResDstInt->end() && LR->valno->def == NextDef) {
+ // Special case: vr1024 = implicit_def
+ // vr1024 = insert_subreg vr1024, vr1025, c
+ // The insert_subreg becomes a "copy" that defines a val# which can itself
+ // be coalesced away.
+ MachineInstr *DefMI = li_->getInstructionFromIndex(NextDef);
+ if (DefMI->getOpcode() == TargetInstrInfo::INSERT_SUBREG)
+ LR->valno->copy = DefMI;
+ }
}
DOUT << "\n\t\tJoined. Result = "; ResDstInt->print(DOUT, tri_);
return std::find(V.begin(), V.end(), Val) != V.end();
}
+/// RangeIsDefinedByCopyFromReg - Return true if the specified live range of
+/// the specified live interval is defined by a copy from the specified
+/// register.
+bool SimpleRegisterCoalescing::RangeIsDefinedByCopyFromReg(LiveInterval &li,
+ LiveRange *LR,
+ unsigned Reg) {
+ unsigned SrcReg = li_->getVNInfoSourceReg(LR->valno);
+ if (SrcReg == Reg)
+ return true;
+ if (LR->valno->def == ~0U &&
+ TargetRegisterInfo::isPhysicalRegister(li.reg) &&
+ *tri_->getSuperRegisters(li.reg)) {
+ // It's a sub-register live interval, we may not have precise information.
+ // Re-compute it.
+ MachineInstr *DefMI = li_->getInstructionFromIndex(LR->start);
+ unsigned SrcReg, DstReg;
+ if (tii_->isMoveInstr(*DefMI, SrcReg, DstReg) &&
+ DstReg == li.reg && SrcReg == Reg) {
+ // Cache computed info.
+ LR->valno->def = LR->start;
+ LR->valno->copy = DefMI;
+ return true;
+ }
+ }
+ return false;
+}
+
/// SimpleJoin - Attempt to joint the specified interval into this one. The
/// caller of this method must guarantee that the RHS only contains a single
/// value number and that the RHS is not defined by a copy from this
// If we haven't already recorded that this value # is safe, check it.
if (!InVector(LHSIt->valno, EliminatedLHSVals)) {
// Copy from the RHS?
- unsigned SrcReg = li_->getVNInfoSourceReg(LHSIt->valno);
- if (SrcReg != RHS.reg)
+ if (!RangeIsDefinedByCopyFromReg(LHS, LHSIt, RHS.reg))
return false; // Nope, bail out.
EliminatedLHSVals.push_back(LHSIt->valno);
} else {
// Otherwise, if this is a copy from the RHS, mark it as being merged
// in.
- if (li_->getVNInfoSourceReg(LHSIt->valno) == RHS.reg) {
+ if (RangeIsDefinedByCopyFromReg(LHS, LHSIt, RHS.reg)) {
EliminatedLHSVals.push_back(LHSIt->valno);
// We know this entire LHS live range is okay, so skip it now.
}
}
LHSValNo = Smallest;
+ } else if (EliminatedLHSVals.empty()) {
+ if (TargetRegisterInfo::isPhysicalRegister(LHS.reg) &&
+ *tri_->getSuperRegisters(LHS.reg))
+ // Imprecise sub-register information. Can't handle it.
+ return false;
+ assert(0 && "No copies from the RHS?");
} else {
- assert(!EliminatedLHSVals.empty() && "No copies from the RHS?");
LHSValNo = EliminatedLHSVals[0];
}
std::vector<CopyRec> VirtCopies;
std::vector<CopyRec> PhysCopies;
+ std::vector<CopyRec> ImpDefCopies;
unsigned LoopDepth = loopInfo->getLoopDepth(MBB);
for (MachineBasicBlock::iterator MII = MBB->begin(), E = MBB->end();
MII != E;) {
if (Inst->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG) {
DstReg = Inst->getOperand(0).getReg();
SrcReg = Inst->getOperand(1).getReg();
+ } else if (Inst->getOpcode() == TargetInstrInfo::INSERT_SUBREG) {
+ DstReg = Inst->getOperand(0).getReg();
+ SrcReg = Inst->getOperand(2).getReg();
} else if (!tii_->isMoveInstr(*Inst, SrcReg, DstReg))
continue;
if (NewHeuristic) {
JoinQueue->push(CopyRec(Inst, LoopDepth, isBackEdgeCopy(Inst, DstReg)));
} else {
- if (SrcIsPhys || DstIsPhys)
+ if (li_->hasInterval(SrcReg) && li_->getInterval(SrcReg).empty())
+ ImpDefCopies.push_back(CopyRec(Inst, 0, false));
+ else if (SrcIsPhys || DstIsPhys)
PhysCopies.push_back(CopyRec(Inst, 0, false));
else
VirtCopies.push_back(CopyRec(Inst, 0, false));
if (NewHeuristic)
return;
- // Try coalescing physical register + virtual register first.
+ // Try coalescing implicit copies first, followed by copies to / from
+ // physical registers, then finally copies from virtual registers to
+ // virtual registers.
+ for (unsigned i = 0, e = ImpDefCopies.size(); i != e; ++i) {
+ CopyRec &TheCopy = ImpDefCopies[i];
+ bool Again = false;
+ if (!JoinCopy(TheCopy, Again))
+ if (Again)
+ TryAgain.push_back(TheCopy);
+ }
for (unsigned i = 0, e = PhysCopies.size(); i != e; ++i) {
CopyRec &TheCopy = PhysCopies[i];
bool Again = false;
LiveInterval &SrcInt = li_->getInterval(SrcReg);
if (!SrcInt.empty())
return false;
+ if (!li_->hasInterval(DstReg))
+ return false;
LiveInterval &DstInt = li_->getInterval(DstReg);
- LiveInterval::iterator DstLR = DstInt.FindLiveRangeContaining(CopyIdx);
+ const LiveRange *DstLR = DstInt.getLiveRangeContaining(CopyIdx);
DstInt.removeValNo(DstLR->valno);
CopyMI->setDesc(tii_->get(TargetInstrInfo::IMPLICIT_DEF));
for (int i = CopyMI->getNumOperands() - 1, e = 0; i > e; --i)
assert(DefMI->getOpcode() == TargetInstrInfo::IMPLICIT_DEF);
li_->RemoveMachineInstrFromMaps(DefMI);
DefMI->eraseFromParent();
- ++numPeep;
}
}
++I;
I->second.print(DOUT, tri_);
DOUT << "\n";
}
-
- // Delete all coalesced copies.
- for (SmallPtrSet<MachineInstr*,32>::iterator I = JoinedCopies.begin(),
- E = JoinedCopies.end(); I != E; ++I) {
- MachineInstr *CopyMI = *I;
- unsigned SrcReg, DstReg;
- tii_->isMoveInstr(*CopyMI, SrcReg, DstReg);
- if (CopyMI->registerDefIsDead(DstReg)) {
- LiveInterval &li = li_->getInterval(DstReg);
- ShortenDeadCopySrcLiveRange(li, CopyMI);
- ShortenDeadCopyLiveRange(li, CopyMI);
- }
- li_->RemoveMachineInstrFromMaps(*I);
- (*I)->eraseFromParent();
- ++numPeep;
- }
}
// Perform a final pass over the instructions and compute spill weights
for (MachineBasicBlock::iterator mii = mbb->begin(), mie = mbb->end();
mii != mie; ) {
- // if the move will be an identity move delete it
- unsigned srcReg, dstReg;
- bool isMove = tii_->isMoveInstr(*mii, srcReg, dstReg);
- if (isMove && srcReg == dstReg) {
- if (li_->hasInterval(srcReg)) {
- LiveInterval &RegInt = li_->getInterval(srcReg);
+ MachineInstr *MI = mii;
+ unsigned SrcReg, DstReg;
+ if (JoinedCopies.count(MI)) {
+ // Delete all coalesced copies.
+ if (!tii_->isMoveInstr(*MI, SrcReg, DstReg)) {
+ assert((MI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
+ MI->getOpcode() == TargetInstrInfo::INSERT_SUBREG) &&
+ "Unrecognized copy instruction");
+ DstReg = MI->getOperand(0).getReg();
+ }
+ if (MI->registerDefIsDead(DstReg)) {
+ LiveInterval &li = li_->getInterval(DstReg);
+ if (!ShortenDeadCopySrcLiveRange(li, MI))
+ ShortenDeadCopyLiveRange(li, MI);
+ }
+ li_->RemoveMachineInstrFromMaps(MI);
+ mii = mbbi->erase(mii);
+ ++numPeep;
+ continue;
+ }
+
+ // If the move will be an identity move delete it
+ bool isMove = tii_->isMoveInstr(*mii, SrcReg, DstReg);
+ if (isMove && SrcReg == DstReg) {
+ if (li_->hasInterval(SrcReg)) {
+ LiveInterval &RegInt = li_->getInterval(SrcReg);
// If def of this move instruction is dead, remove its live range
// from the dstination register's live interval.
- if (mii->registerDefIsDead(dstReg)) {
- ShortenDeadCopySrcLiveRange(RegInt, mii);
- ShortenDeadCopyLiveRange(RegInt, mii);
+ if (mii->registerDefIsDead(DstReg)) {
+ if (!ShortenDeadCopySrcLiveRange(RegInt, mii))
+ ShortenDeadCopyLiveRange(RegInt, mii);
}
}
li_->RemoveMachineInstrFromMaps(mii);
mii = mbbi->erase(mii);
++numPeep;
- } else if (!isMove || !TurnCopyIntoImpDef(mii, mbb, dstReg, srcReg)) {
+ } else if (!isMove || !TurnCopyIntoImpDef(mii, mbb, DstReg, SrcReg)) {
SmallSet<unsigned, 4> UniqueUses;
for (unsigned i = 0, e = mii->getNumOperands(); i != e; ++i) {
const MachineOperand &mop = mii->getOperand(i);