Teach the inline spiller to attempt folding a load instruction into its single
authorJakob Stoklund Olesen <stoklund@2pi.dk>
Sat, 18 Dec 2010 03:04:14 +0000 (03:04 +0000)
committerJakob Stoklund Olesen <stoklund@2pi.dk>
Sat, 18 Dec 2010 03:04:14 +0000 (03:04 +0000)
use before rematerializing the load.

This allows us to produce:

    addps LCPI0_1(%rip), %xmm2

Instead of:

    movaps LCPI0_1(%rip), %xmm3
    addps %xmm3, %xmm2

Saving a register and an instruction. The standard spiller already knows how to
do this.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@122133 91177308-0d34-0410-b5e6-96231b3b80d8

lib/CodeGen/InlineSpiller.cpp
lib/CodeGen/LiveRangeEdit.h

index 462ca6dbc94b1637468b7612e2ee6fb8cd0252e3..443b2d077ab571af092774847e22b821e54152d0 100644 (file)
@@ -85,7 +85,8 @@ private:
 
   bool coalesceStackAccess(MachineInstr *MI);
   bool foldMemoryOperand(MachineBasicBlock::iterator MI,
-                         const SmallVectorImpl<unsigned> &Ops);
+                         const SmallVectorImpl<unsigned> &Ops,
+                         MachineInstr *LoadMI = 0);
   void insertReload(LiveInterval &NewLI, MachineBasicBlock::iterator MI);
   void insertSpill(LiveInterval &NewLI, MachineBasicBlock::iterator MI);
 };
@@ -141,6 +142,14 @@ bool InlineSpiller::reMaterializeFor(MachineBasicBlock::iterator MI) {
     }
   }
 
+  // Before rematerializing into a register for a single instruction, try to
+  // fold a load into the instruction. That avoids allocating a new register.
+  if (RM.OrigMI->getDesc().canFoldAsLoad() &&
+      foldMemoryOperand(MI, Ops, RM.OrigMI)) {
+    edit_->markRematerialized(RM.ParentVNI);
+    return true;
+  }
+
   // Alocate a new register for the remat.
   LiveInterval &NewLI = edit_->create(mri_, lis_, vrm_);
   NewLI.markNotSpillable();
@@ -243,9 +252,13 @@ bool InlineSpiller::coalesceStackAccess(MachineInstr *MI) {
 }
 
 /// foldMemoryOperand - Try folding stack slot references in Ops into MI.
-/// Return true on success, and MI will be erased.
+/// @param MI     Instruction using or defining the current register.
+/// @param Ops    Operandices from readsWritesVirtualRegister().
+/// @param LoadMI Load instruction to use instead of stack slot when non-null.
+/// @return       True on success, and MI will be erased.
 bool InlineSpiller::foldMemoryOperand(MachineBasicBlock::iterator MI,
-                                      const SmallVectorImpl<unsigned> &Ops) {
+                                      const SmallVectorImpl<unsigned> &Ops,
+                                      MachineInstr *LoadMI) {
   // TargetInstrInfo::foldMemoryOperand only expects explicit, non-tied
   // operands.
   SmallVector<unsigned, 8> FoldOps;
@@ -262,11 +275,14 @@ bool InlineSpiller::foldMemoryOperand(MachineBasicBlock::iterator MI,
       FoldOps.push_back(Idx);
   }
 
-  MachineInstr *FoldMI = tii_.foldMemoryOperand(MI, FoldOps, stackSlot_);
+  MachineInstr *FoldMI =
+                LoadMI ? tii_.foldMemoryOperand(MI, FoldOps, LoadMI)
+                       : tii_.foldMemoryOperand(MI, FoldOps, stackSlot_);
   if (!FoldMI)
     return false;
   lis_.ReplaceMachineInstrInMaps(MI, FoldMI);
-  vrm_.addSpillSlotUse(stackSlot_, FoldMI);
+  if (!LoadMI)
+    vrm_.addSpillSlotUse(stackSlot_, FoldMI);
   MI->eraseFromParent();
   DEBUG(dbgs() << "\tfolded: " << *FoldMI);
   return true;
index ad248bf4002c48f2ea43f78c4920cc524cbd5871..37b58279b1bb856bb410fcda4c0b919e486a9a27 100644 (file)
@@ -117,6 +117,12 @@ public:
                             const TargetInstrInfo&,
                             const TargetRegisterInfo&);
 
+  /// markRematerialized - explicitly mark a value as rematerialized after doing
+  /// it manually.
+  void markRematerialized(VNInfo *ParentVNI) {
+    rematted_.insert(ParentVNI);
+  }
+
   /// didRematerialize - Return true if ParentVNI was rematerialized anywhere.
   bool didRematerialize(VNInfo *ParentVNI) const {
     return rematted_.count(ParentVNI);