1 //===-- llvm/CodeGen/Spiller.cpp - Spiller -------------------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 #define DEBUG_TYPE "spiller"
13 #include "VirtRegMap.h"
14 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
15 #include "llvm/CodeGen/LiveStackAnalysis.h"
16 #include "llvm/CodeGen/MachineFunction.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/MachineFrameInfo.h"
19 #include "llvm/Target/TargetMachine.h"
20 #include "llvm/Target/TargetInstrInfo.h"
21 #include "llvm/Support/Debug.h"
25 Spiller::~Spiller() {}
29 /// Utility class for spillers.
30 class SpillerBase : public Spiller {
36 MachineFrameInfo *mfi;
37 MachineRegisterInfo *mri;
38 const TargetInstrInfo *tii;
41 /// Construct a spiller base.
42 SpillerBase(MachineFunction *mf, LiveIntervals *lis, LiveStacks *ls, VirtRegMap *vrm) :
43 mf(mf), lis(lis), ls(ls), vrm(vrm)
45 mfi = mf->getFrameInfo();
46 mri = &mf->getRegInfo();
47 tii = mf->getTarget().getInstrInfo();
50 /// Insert a store of the given vreg to the given stack slot immediately
51 /// after the given instruction. Returns the base index of the inserted
52 /// instruction. The caller is responsible for adding an appropriate
53 /// LiveInterval to the LiveIntervals analysis.
54 unsigned insertStoreFor(MachineInstr *mi, unsigned ss,
56 const TargetRegisterClass *trc) {
57 MachineBasicBlock::iterator nextInstItr(mi);
60 if (!lis->hasGapAfterInstr(lis->getInstructionIndex(mi))) {
61 lis->scaleNumbering(2);
62 ls->scaleNumbering(2);
65 unsigned miIdx = lis->getInstructionIndex(mi);
67 assert(lis->hasGapAfterInstr(miIdx));
69 tii->storeRegToStackSlot(*mi->getParent(), nextInstItr, newVReg,
71 MachineBasicBlock::iterator storeInstItr(mi);
73 MachineInstr *storeInst = &*storeInstItr;
74 unsigned storeInstIdx = miIdx + LiveInterval::InstrSlots::NUM;
76 assert(lis->getInstructionFromIndex(storeInstIdx) == 0 &&
77 "Store inst index already in use.");
79 lis->InsertMachineInstrInMaps(storeInst, storeInstIdx);
84 /// Insert a load of the given veg from the given stack slot immediately
85 /// before the given instruction. Returns the base index of the inserted
86 /// instruction. The caller is responsible for adding an appropriate
87 /// LiveInterval to the LiveIntervals analysis.
88 unsigned insertLoadFor(MachineInstr *mi, unsigned ss,
90 const TargetRegisterClass *trc) {
91 MachineBasicBlock::iterator useInstItr(mi);
93 if (!lis->hasGapBeforeInstr(lis->getInstructionIndex(mi))) {
94 lis->scaleNumbering(2);
95 ls->scaleNumbering(2);
98 unsigned miIdx = lis->getInstructionIndex(mi);
100 assert(lis->hasGapBeforeInstr(miIdx));
102 tii->loadRegFromStackSlot(*mi->getParent(), useInstItr, newVReg, ss, trc);
103 MachineBasicBlock::iterator loadInstItr(mi);
105 MachineInstr *loadInst = &*loadInstItr;
106 unsigned loadInstIdx = miIdx - LiveInterval::InstrSlots::NUM;
108 assert(lis->getInstructionFromIndex(loadInstIdx) == 0 &&
109 "Load inst index already in use.");
111 lis->InsertMachineInstrInMaps(loadInst, loadInstIdx);
117 /// Add spill ranges for every use/def of the live interval, inserting loads
118 /// immediately before each use, and stores after each def. No folding is
120 std::vector<LiveInterval*> trivialSpillEverywhere(LiveInterval *li) {
121 DOUT << "Spilling everywhere " << *li << "\n";
123 assert(li->weight != HUGE_VALF &&
124 "Attempting to spill already spilled value.");
126 assert(!li->isStackSlot() &&
127 "Trying to spill a stack slot.");
129 std::vector<LiveInterval*> added;
131 const TargetRegisterClass *trc = mri->getRegClass(li->reg);
132 unsigned ss = vrm->assignVirt2StackSlot(li->reg);
134 for (MachineRegisterInfo::reg_iterator
135 regItr = mri->reg_begin(li->reg); regItr != mri->reg_end();) {
137 MachineInstr *mi = &*regItr;
140 } while (regItr != mri->reg_end() && (&*regItr == mi));
142 SmallVector<unsigned, 2> indices;
146 for (unsigned i = 0; i != mi->getNumOperands(); ++i) {
147 MachineOperand &op = mi->getOperand(i);
149 if (!op.isReg() || op.getReg() != li->reg)
152 hasUse |= mi->getOperand(i).isUse();
153 hasDef |= mi->getOperand(i).isDef();
155 indices.push_back(i);
158 unsigned newVReg = mri->createVirtualRegister(trc);
160 vrm->assignVirt2StackSlot(newVReg, ss);
162 LiveInterval *newLI = &lis->getOrCreateInterval(newVReg);
163 newLI->weight = HUGE_VALF;
165 for (unsigned i = 0; i < indices.size(); ++i) {
166 mi->getOperand(indices[i]).setReg(newVReg);
168 if (mi->getOperand(indices[i]).isUse()) {
169 mi->getOperand(indices[i]).setIsKill(true);
173 assert(hasUse || hasDef);
176 unsigned loadInstIdx = insertLoadFor(mi, ss, newVReg, trc);
177 unsigned start = lis->getDefIndex(loadInstIdx),
178 end = lis->getUseIndex(lis->getInstructionIndex(mi));
181 newLI->getNextValue(loadInstIdx, 0, lis->getVNInfoAllocator());
182 vni->kills.push_back(lis->getInstructionIndex(mi));
183 LiveRange lr(start, end, vni);
189 unsigned storeInstIdx = insertStoreFor(mi, ss, newVReg, trc);
190 unsigned start = lis->getDefIndex(lis->getInstructionIndex(mi)),
191 end = lis->getUseIndex(storeInstIdx);
194 newLI->getNextValue(storeInstIdx, 0, lis->getVNInfoAllocator());
195 vni->kills.push_back(storeInstIdx);
196 LiveRange lr(start, end, vni);
201 added.push_back(newLI);
211 /// Spills any live range using the spill-everywhere method with no attempt at
213 class TrivialSpiller : public SpillerBase {
215 TrivialSpiller(MachineFunction *mf, LiveIntervals *lis, LiveStacks *ls, VirtRegMap *vrm) :
216 SpillerBase(mf, lis, ls, vrm) {}
218 std::vector<LiveInterval*> spill(LiveInterval *li) {
219 return trivialSpillEverywhere(li);
226 llvm::Spiller* llvm::createSpiller(MachineFunction *mf, LiveIntervals *lis,
227 LiveStacks *ls, VirtRegMap *vrm) {
228 return new TrivialSpiller(mf, lis, ls, vrm);