1 //===-- DelaySlotFiller.cpp - SPARC delay slot filler ---------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This is a simple local pass that attempts to fill delay slots with useful
11 // instructions. If no instructions can be moved into the delay slot, then a
13 //===----------------------------------------------------------------------===//
16 #include "SparcSubtarget.h"
17 #include "llvm/ADT/SmallSet.h"
18 #include "llvm/ADT/Statistic.h"
19 #include "llvm/CodeGen/MachineFunctionPass.h"
20 #include "llvm/CodeGen/MachineInstrBuilder.h"
21 #include "llvm/CodeGen/MachineRegisterInfo.h"
22 #include "llvm/Support/CommandLine.h"
23 #include "llvm/Target/TargetInstrInfo.h"
24 #include "llvm/Target/TargetMachine.h"
25 #include "llvm/Target/TargetRegisterInfo.h"
29 #define DEBUG_TYPE "delay-slot-filler"
31 STATISTIC(FilledSlots, "Number of delay slots filled");
33 static cl::opt<bool> DisableDelaySlotFiller(
34 "disable-sparc-delay-filler",
36 cl::desc("Disable the Sparc delay slot filler."),
40 struct Filler : public MachineFunctionPass {
41 /// Target machine description which we query for reg. names, data
45 const SparcSubtarget *Subtarget;
48 Filler(TargetMachine &tm) : MachineFunctionPass(ID), TM(tm) {}
50 const char *getPassName() const override {
51 return "SPARC Delay Slot Filler";
54 bool runOnMachineBasicBlock(MachineBasicBlock &MBB);
55 bool runOnMachineFunction(MachineFunction &F) override {
57 Subtarget = &F.getSubtarget<SparcSubtarget>();
59 // This pass invalidates liveness information when it reorders
60 // instructions to fill delay slot.
61 F.getRegInfo().invalidateLiveness();
63 for (MachineFunction::iterator FI = F.begin(), FE = F.end();
65 Changed |= runOnMachineBasicBlock(*FI);
69 void insertCallDefsUses(MachineBasicBlock::iterator MI,
70 SmallSet<unsigned, 32>& RegDefs,
71 SmallSet<unsigned, 32>& RegUses);
73 void insertDefsUses(MachineBasicBlock::iterator MI,
74 SmallSet<unsigned, 32>& RegDefs,
75 SmallSet<unsigned, 32>& RegUses);
77 bool IsRegInSet(SmallSet<unsigned, 32>& RegSet,
80 bool delayHasHazard(MachineBasicBlock::iterator candidate,
81 bool &sawLoad, bool &sawStore,
82 SmallSet<unsigned, 32> &RegDefs,
83 SmallSet<unsigned, 32> &RegUses);
85 MachineBasicBlock::iterator
86 findDelayInstr(MachineBasicBlock &MBB, MachineBasicBlock::iterator slot);
88 bool needsUnimp(MachineBasicBlock::iterator I, unsigned &StructSize);
90 bool tryCombineRestoreWithPrevInst(MachineBasicBlock &MBB,
91 MachineBasicBlock::iterator MBBI);
95 } // end of anonymous namespace
97 /// createSparcDelaySlotFillerPass - Returns a pass that fills in delay
98 /// slots in Sparc MachineFunctions
100 FunctionPass *llvm::createSparcDelaySlotFillerPass(TargetMachine &tm) {
101 return new Filler(tm);
105 /// runOnMachineBasicBlock - Fill in delay slots for the given basic block.
106 /// We assume there is only one delay slot per delayed instruction.
108 bool Filler::runOnMachineBasicBlock(MachineBasicBlock &MBB) {
109 bool Changed = false;
110 Subtarget = &MBB.getParent()->getSubtarget<SparcSubtarget>();
111 const TargetInstrInfo *TII = Subtarget->getInstrInfo();
113 for (MachineBasicBlock::iterator I = MBB.begin(); I != MBB.end(); ) {
114 MachineBasicBlock::iterator MI = I;
117 // If MI is restore, try combining it with previous inst.
118 if (!DisableDelaySlotFiller &&
119 (MI->getOpcode() == SP::RESTORErr
120 || MI->getOpcode() == SP::RESTOREri)) {
121 Changed |= tryCombineRestoreWithPrevInst(MBB, MI);
125 // TODO: If we ever want to support v7, this needs to be extended
126 // to cover all floating point operations.
127 if (!Subtarget->isV9() &&
128 (MI->getOpcode() == SP::FCMPS || MI->getOpcode() == SP::FCMPD
129 || MI->getOpcode() == SP::FCMPQ)) {
130 BuildMI(MBB, I, MI->getDebugLoc(), TII->get(SP::NOP));
135 // If MI has no delay slot, skip.
136 if (!MI->hasDelaySlot())
139 MachineBasicBlock::iterator D = MBB.end();
141 if (!DisableDelaySlotFiller)
142 D = findDelayInstr(MBB, MI);
148 BuildMI(MBB, I, MI->getDebugLoc(), TII->get(SP::NOP));
150 MBB.splice(I, &MBB, D);
152 unsigned structSize = 0;
153 if (needsUnimp(MI, structSize)) {
154 MachineBasicBlock::iterator J = MI;
155 ++J; // skip the delay filler.
156 assert (J != MBB.end() && "MI needs a delay instruction.");
157 BuildMI(MBB, ++J, MI->getDebugLoc(),
158 TII->get(SP::UNIMP)).addImm(structSize);
159 // Bundle the delay filler and unimp with the instruction.
160 MIBundleBuilder(MBB, MachineBasicBlock::iterator(MI), J);
162 MIBundleBuilder(MBB, MachineBasicBlock::iterator(MI), I);
168 MachineBasicBlock::iterator
169 Filler::findDelayInstr(MachineBasicBlock &MBB,
170 MachineBasicBlock::iterator slot)
172 SmallSet<unsigned, 32> RegDefs;
173 SmallSet<unsigned, 32> RegUses;
174 bool sawLoad = false;
175 bool sawStore = false;
177 if (slot == MBB.begin())
180 if (slot->getOpcode() == SP::RET || slot->getOpcode() == SP::TLS_CALL)
183 if (slot->getOpcode() == SP::RETL) {
184 MachineBasicBlock::iterator J = slot;
187 if (J->getOpcode() == SP::RESTORErr
188 || J->getOpcode() == SP::RESTOREri) {
189 // change retl to ret.
190 slot->setDesc(Subtarget->getInstrInfo()->get(SP::RET));
195 // Call's delay filler can def some of call's uses.
197 insertCallDefsUses(slot, RegDefs, RegUses);
199 insertDefsUses(slot, RegDefs, RegUses);
203 MachineBasicBlock::iterator I = slot;
206 done = (I == MBB.begin());
212 if (I->isDebugValue())
215 if (I->hasUnmodeledSideEffects() || I->isInlineAsm() || I->isPosition() ||
216 I->hasDelaySlot() || I->isBundledWithSucc())
219 if (delayHasHazard(I, sawLoad, sawStore, RegDefs, RegUses)) {
220 insertDefsUses(I, RegDefs, RegUses);
229 bool Filler::delayHasHazard(MachineBasicBlock::iterator candidate,
232 SmallSet<unsigned, 32> &RegDefs,
233 SmallSet<unsigned, 32> &RegUses)
236 if (candidate->isImplicitDef() || candidate->isKill())
239 if (candidate->mayLoad()) {
245 if (candidate->mayStore()) {
253 for (unsigned i = 0, e = candidate->getNumOperands(); i!= e; ++i) {
254 const MachineOperand &MO = candidate->getOperand(i);
258 unsigned Reg = MO.getReg();
261 // check whether Reg is defined or used before delay slot.
262 if (IsRegInSet(RegDefs, Reg) || IsRegInSet(RegUses, Reg))
266 // check whether Reg is defined before delay slot.
267 if (IsRegInSet(RegDefs, Reg))
275 void Filler::insertCallDefsUses(MachineBasicBlock::iterator MI,
276 SmallSet<unsigned, 32>& RegDefs,
277 SmallSet<unsigned, 32>& RegUses)
279 // Call defines o7, which is visible to the instruction in delay slot.
280 RegDefs.insert(SP::O7);
282 switch(MI->getOpcode()) {
283 default: llvm_unreachable("Unknown opcode.");
284 case SP::CALL: break;
287 assert(MI->getNumOperands() >= 2);
288 const MachineOperand &Reg = MI->getOperand(0);
289 assert(Reg.isReg() && "CALL first operand is not a register.");
290 assert(Reg.isUse() && "CALL first operand is not a use.");
291 RegUses.insert(Reg.getReg());
293 const MachineOperand &RegOrImm = MI->getOperand(1);
294 if (RegOrImm.isImm())
296 assert(RegOrImm.isReg() && "CALLrr second operand is not a register.");
297 assert(RegOrImm.isUse() && "CALLrr second operand is not a use.");
298 RegUses.insert(RegOrImm.getReg());
303 // Insert Defs and Uses of MI into the sets RegDefs and RegUses.
304 void Filler::insertDefsUses(MachineBasicBlock::iterator MI,
305 SmallSet<unsigned, 32>& RegDefs,
306 SmallSet<unsigned, 32>& RegUses)
308 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
309 const MachineOperand &MO = MI->getOperand(i);
313 unsigned Reg = MO.getReg();
319 // Implicit register uses of retl are return values and
320 // retl does not use them.
321 if (MO.isImplicit() && MI->getOpcode() == SP::RETL)
328 // returns true if the Reg or its alias is in the RegSet.
329 bool Filler::IsRegInSet(SmallSet<unsigned, 32>& RegSet, unsigned Reg)
331 // Check Reg and all aliased Registers.
332 for (MCRegAliasIterator AI(Reg, Subtarget->getRegisterInfo(), true);
334 if (RegSet.count(*AI))
339 bool Filler::needsUnimp(MachineBasicBlock::iterator I, unsigned &StructSize)
344 unsigned structSizeOpNum = 0;
345 switch (I->getOpcode()) {
346 default: llvm_unreachable("Unknown call opcode.");
347 case SP::CALL: structSizeOpNum = 1; break;
349 case SP::CALLri: structSizeOpNum = 2; break;
350 case SP::TLS_CALL: return false;
353 const MachineOperand &MO = I->getOperand(structSizeOpNum);
356 StructSize = MO.getImm();
360 static bool combineRestoreADD(MachineBasicBlock::iterator RestoreMI,
361 MachineBasicBlock::iterator AddMI,
362 const TargetInstrInfo *TII)
364 // Before: add <op0>, <op1>, %i[0-7]
365 // restore %g0, %g0, %i[0-7]
367 // After : restore <op0>, <op1>, %o[0-7]
369 unsigned reg = AddMI->getOperand(0).getReg();
370 if (reg < SP::I0 || reg > SP::I7)
374 RestoreMI->eraseFromParent();
376 // Change ADD to RESTORE.
377 AddMI->setDesc(TII->get((AddMI->getOpcode() == SP::ADDrr)
381 // Map the destination register.
382 AddMI->getOperand(0).setReg(reg - SP::I0 + SP::O0);
387 static bool combineRestoreOR(MachineBasicBlock::iterator RestoreMI,
388 MachineBasicBlock::iterator OrMI,
389 const TargetInstrInfo *TII)
391 // Before: or <op0>, <op1>, %i[0-7]
392 // restore %g0, %g0, %i[0-7]
393 // and <op0> or <op1> is zero,
395 // After : restore <op0>, <op1>, %o[0-7]
397 unsigned reg = OrMI->getOperand(0).getReg();
398 if (reg < SP::I0 || reg > SP::I7)
401 // check whether it is a copy.
402 if (OrMI->getOpcode() == SP::ORrr
403 && OrMI->getOperand(1).getReg() != SP::G0
404 && OrMI->getOperand(2).getReg() != SP::G0)
407 if (OrMI->getOpcode() == SP::ORri
408 && OrMI->getOperand(1).getReg() != SP::G0
409 && (!OrMI->getOperand(2).isImm() || OrMI->getOperand(2).getImm() != 0))
413 RestoreMI->eraseFromParent();
415 // Change OR to RESTORE.
416 OrMI->setDesc(TII->get((OrMI->getOpcode() == SP::ORrr)
420 // Map the destination register.
421 OrMI->getOperand(0).setReg(reg - SP::I0 + SP::O0);
426 static bool combineRestoreSETHIi(MachineBasicBlock::iterator RestoreMI,
427 MachineBasicBlock::iterator SetHiMI,
428 const TargetInstrInfo *TII)
430 // Before: sethi imm3, %i[0-7]
431 // restore %g0, %g0, %g0
433 // After : restore %g0, (imm3<<10), %o[0-7]
435 unsigned reg = SetHiMI->getOperand(0).getReg();
436 if (reg < SP::I0 || reg > SP::I7)
439 if (!SetHiMI->getOperand(1).isImm())
442 int64_t imm = SetHiMI->getOperand(1).getImm();
444 // Is it a 3 bit immediate?
448 // Make it a 13 bit immediate.
449 imm = (imm << 10) & 0x1FFF;
451 assert(RestoreMI->getOpcode() == SP::RESTORErr);
453 RestoreMI->setDesc(TII->get(SP::RESTOREri));
455 RestoreMI->getOperand(0).setReg(reg - SP::I0 + SP::O0);
456 RestoreMI->getOperand(1).setReg(SP::G0);
457 RestoreMI->getOperand(2).ChangeToImmediate(imm);
460 // Erase the original SETHI.
461 SetHiMI->eraseFromParent();
466 bool Filler::tryCombineRestoreWithPrevInst(MachineBasicBlock &MBB,
467 MachineBasicBlock::iterator MBBI)
469 // No previous instruction.
470 if (MBBI == MBB.begin())
473 // assert that MBBI is a "restore %g0, %g0, %g0".
474 assert(MBBI->getOpcode() == SP::RESTORErr
475 && MBBI->getOperand(0).getReg() == SP::G0
476 && MBBI->getOperand(1).getReg() == SP::G0
477 && MBBI->getOperand(2).getReg() == SP::G0);
479 MachineBasicBlock::iterator PrevInst = std::prev(MBBI);
481 // It cannot be combined with a bundled instruction.
482 if (PrevInst->isBundledWithSucc())
485 const TargetInstrInfo *TII = Subtarget->getInstrInfo();
487 switch (PrevInst->getOpcode()) {
490 case SP::ADDri: return combineRestoreADD(MBBI, PrevInst, TII); break;
492 case SP::ORri: return combineRestoreOR(MBBI, PrevInst, TII); break;
493 case SP::SETHIi: return combineRestoreSETHIi(MBBI, PrevInst, TII); break;
495 // It cannot combine with the previous instruction.