1 //===-- DwarfEHPrepare - Prepare exception handling for code generation ---===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This pass mulches exception handling code into a form adapted to code
11 // generation. Required if using dwarf exception handling.
13 //===----------------------------------------------------------------------===//
15 #define DEBUG_TYPE "dwarfehprepare"
16 #include "llvm/Function.h"
17 #include "llvm/Instructions.h"
18 #include "llvm/IntrinsicInst.h"
19 #include "llvm/Module.h"
20 #include "llvm/Pass.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/Analysis/Dominators.h"
23 #include "llvm/CodeGen/Passes.h"
24 #include "llvm/MC/MCAsmInfo.h"
25 #include "llvm/Support/CallSite.h"
26 #include "llvm/Target/TargetLowering.h"
27 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
28 #include "llvm/Transforms/Utils/PromoteMemToReg.h"
31 STATISTIC(NumLandingPadsSplit, "Number of landing pads split");
32 STATISTIC(NumUnwindsLowered, "Number of unwind instructions lowered");
33 STATISTIC(NumExceptionValuesMoved, "Number of eh.exception calls moved");
34 STATISTIC(NumStackTempsIntroduced, "Number of stack temporaries introduced");
37 class DwarfEHPrepare : public FunctionPass {
38 const TargetMachine *TM;
39 const TargetLowering *TLI;
42 // The eh.exception intrinsic.
43 Function *ExceptionValueIntrinsic;
45 // The eh.selector intrinsic.
46 Function *SelectorIntrinsic;
48 // _Unwind_Resume_or_Rethrow call.
51 // The EH language-specific catch-all type.
52 GlobalVariable *EHCatchAllValue;
54 // _Unwind_Resume or the target equivalent.
55 Constant *RewindFunction;
57 // Dominator info is used when turning stack temporaries into registers.
59 DominanceFrontier *DF;
61 // The function we are running on.
64 // The landing pads for this function.
65 typedef SmallPtrSet<BasicBlock*, 8> BBSet;
68 // Stack temporary used to hold eh.exception values.
69 AllocaInst *ExceptionValueVar;
71 bool NormalizeLandingPads();
73 bool MoveExceptionValueCalls();
74 bool FinishStackTemporaries();
75 bool PromoteStackTemporaries();
77 Instruction *CreateExceptionValueCall(BasicBlock *BB);
78 Instruction *CreateValueLoad(BasicBlock *BB);
80 /// CreateReadOfExceptionValue - Return the result of the eh.exception
81 /// intrinsic by calling the intrinsic if in a landing pad, or loading it
82 /// from the exception value variable otherwise.
83 Instruction *CreateReadOfExceptionValue(BasicBlock *BB) {
84 return LandingPads.count(BB) ?
85 CreateExceptionValueCall(BB) : CreateValueLoad(BB);
88 /// CleanupSelectors - Any remaining eh.selector intrinsic calls which still
89 /// use the "llvm.eh.catch.all.value" call need to convert to using its
90 /// initializer instead.
91 bool CleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels);
93 bool HasCatchAllInSelector(IntrinsicInst *);
95 /// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
96 void FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels,
97 SmallPtrSet<IntrinsicInst*, 32> &CatchAllSels);
99 /// FindAllURoRInvokes - Find all URoR invokes in the function.
100 void FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes);
102 /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow"
103 /// calls. The "unwind" part of these invokes jump to a landing pad within
104 /// the current function. This is a candidate to merge the selector
105 /// associated with the URoR invoke with the one from the URoR's landing
107 bool HandleURoRInvokes();
109 /// FindSelectorAndURoR - Find the eh.selector call and URoR call associated
110 /// with the eh.exception call. This recursively looks past instructions
111 /// which don't change the EH pointer value, like casts or PHI nodes.
112 bool FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
113 SmallPtrSet<IntrinsicInst*, 8> &SelCalls);
115 /// PromoteStoreInst - Perform Mem2Reg on a StoreInst.
116 bool PromoteStoreInst(StoreInst *SI) {
117 if (!SI || !DT || !DF) return false;
119 AllocaInst *AI = dyn_cast<AllocaInst>(SI->getOperand(1));
120 if (!AI || !isAllocaPromotable(AI)) return false;
122 // Turn the alloca into a register.
123 std::vector<AllocaInst*> Allocas(1, AI);
124 PromoteMemToReg(Allocas, *DT, *DF);
128 /// PromoteEHPtrStore - Promote the storing of an EH pointer into a
129 /// register. This should get rid of the store and subsequent loads.
130 bool PromoteEHPtrStore(IntrinsicInst *II) {
131 if (!DT || !DF) return false;
133 bool Changed = false;
138 for (Value::use_iterator
139 I = II->use_begin(), E = II->use_end(); I != E; ++I) {
140 SI = dyn_cast<StoreInst>(*I);
144 if (!PromoteStoreInst(SI))
154 static char ID; // Pass identification, replacement for typeid.
155 DwarfEHPrepare(const TargetMachine *tm, bool fast) :
156 FunctionPass(ID), TM(tm), TLI(TM->getTargetLowering()),
158 ExceptionValueIntrinsic(0), SelectorIntrinsic(0),
159 URoR(0), EHCatchAllValue(0), RewindFunction(0) {}
161 virtual bool runOnFunction(Function &Fn);
163 // getAnalysisUsage - We need dominance frontiers for memory promotion.
164 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
166 AU.addRequired<DominatorTree>();
167 AU.addPreserved<DominatorTree>();
169 AU.addRequired<DominanceFrontier>();
170 AU.addPreserved<DominanceFrontier>();
173 const char *getPassName() const {
174 return "Exception handling preparation";
178 } // end anonymous namespace
180 char DwarfEHPrepare::ID = 0;
182 FunctionPass *llvm::createDwarfEHPass(const TargetMachine *tm, bool fast) {
183 return new DwarfEHPrepare(tm, fast);
186 /// HasCatchAllInSelector - Return true if the intrinsic instruction has a
188 bool DwarfEHPrepare::HasCatchAllInSelector(IntrinsicInst *II) {
189 if (!EHCatchAllValue) return false;
191 unsigned ArgIdx = II->getNumArgOperands() - 1;
192 GlobalVariable *GV = dyn_cast<GlobalVariable>(II->getArgOperand(ArgIdx));
193 return GV == EHCatchAllValue;
196 /// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups.
197 void DwarfEHPrepare::
198 FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels,
199 SmallPtrSet<IntrinsicInst*, 32> &CatchAllSels) {
200 for (Value::use_iterator
201 I = SelectorIntrinsic->use_begin(),
202 E = SelectorIntrinsic->use_end(); I != E; ++I) {
203 IntrinsicInst *II = cast<IntrinsicInst>(*I);
205 if (II->getParent()->getParent() != F)
208 if (!HasCatchAllInSelector(II))
211 CatchAllSels.insert(II);
215 /// FindAllURoRInvokes - Find all URoR invokes in the function.
216 void DwarfEHPrepare::
217 FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes) {
218 for (Value::use_iterator
219 I = URoR->use_begin(),
220 E = URoR->use_end(); I != E; ++I) {
221 if (InvokeInst *II = dyn_cast<InvokeInst>(*I))
222 URoRInvokes.insert(II);
226 /// CleanupSelectors - Any remaining eh.selector intrinsic calls which still use
227 /// the "llvm.eh.catch.all.value" call need to convert to using its
228 /// initializer instead.
229 bool DwarfEHPrepare::CleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels) {
230 if (!EHCatchAllValue) return false;
232 if (!SelectorIntrinsic) {
234 Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
235 if (!SelectorIntrinsic) return false;
238 bool Changed = false;
239 for (SmallPtrSet<IntrinsicInst*, 32>::iterator
240 I = Sels.begin(), E = Sels.end(); I != E; ++I) {
241 IntrinsicInst *Sel = *I;
243 // Index of the "llvm.eh.catch.all.value" variable.
244 unsigned OpIdx = Sel->getNumArgOperands() - 1;
245 GlobalVariable *GV = dyn_cast<GlobalVariable>(Sel->getArgOperand(OpIdx));
246 if (GV != EHCatchAllValue) continue;
247 Sel->setArgOperand(OpIdx, EHCatchAllValue->getInitializer());
254 /// FindSelectorAndURoR - Find the eh.selector call associated with the
255 /// eh.exception call. And indicate if there is a URoR "invoke" associated with
256 /// the eh.exception call. This recursively looks past instructions which don't
257 /// change the EH pointer value, like casts or PHI nodes.
259 DwarfEHPrepare::FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke,
260 SmallPtrSet<IntrinsicInst*, 8> &SelCalls) {
261 SmallPtrSet<PHINode*, 32> SeenPHIs;
262 bool Changed = false;
265 for (Value::use_iterator
266 I = Inst->use_begin(), E = Inst->use_end(); I != E; ++I) {
267 Instruction *II = dyn_cast<Instruction>(*I);
268 if (!II || II->getParent()->getParent() != F) continue;
270 if (IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(II)) {
271 if (Sel->getIntrinsicID() == Intrinsic::eh_selector)
272 SelCalls.insert(Sel);
273 } else if (InvokeInst *Invoke = dyn_cast<InvokeInst>(II)) {
274 if (Invoke->getCalledFunction() == URoR)
276 } else if (CastInst *CI = dyn_cast<CastInst>(II)) {
277 Changed |= FindSelectorAndURoR(CI, URoRInvoke, SelCalls);
278 } else if (StoreInst *SI = dyn_cast<StoreInst>(II)) {
279 if (!PromoteStoreInst(SI)) continue;
282 goto restart; // Uses may have changed, restart loop.
283 } else if (PHINode *PN = dyn_cast<PHINode>(II)) {
284 if (SeenPHIs.insert(PN))
285 // Don't process a PHI node more than once.
286 Changed |= FindSelectorAndURoR(PN, URoRInvoke, SelCalls);
293 /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" calls. The
294 /// "unwind" part of these invokes jump to a landing pad within the current
295 /// function. This is a candidate to merge the selector associated with the URoR
296 /// invoke with the one from the URoR's landing pad.
297 bool DwarfEHPrepare::HandleURoRInvokes() {
298 if (!EHCatchAllValue) {
300 F->getParent()->getNamedGlobal("llvm.eh.catch.all.value");
301 if (!EHCatchAllValue) return false;
304 if (!SelectorIntrinsic) {
306 Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector);
307 if (!SelectorIntrinsic) return false;
310 SmallPtrSet<IntrinsicInst*, 32> Sels;
311 SmallPtrSet<IntrinsicInst*, 32> CatchAllSels;
312 FindAllCleanupSelectors(Sels, CatchAllSels);
315 // We require DominatorTree information.
316 return CleanupSelectors(CatchAllSels);
319 URoR = F->getParent()->getFunction("_Unwind_Resume_or_Rethrow");
320 if (!URoR) return CleanupSelectors(CatchAllSels);
323 SmallPtrSet<InvokeInst*, 32> URoRInvokes;
324 FindAllURoRInvokes(URoRInvokes);
326 SmallPtrSet<IntrinsicInst*, 32> SelsToConvert;
328 for (SmallPtrSet<IntrinsicInst*, 32>::iterator
329 SI = Sels.begin(), SE = Sels.end(); SI != SE; ++SI) {
330 const BasicBlock *SelBB = (*SI)->getParent();
331 for (SmallPtrSet<InvokeInst*, 32>::iterator
332 UI = URoRInvokes.begin(), UE = URoRInvokes.end(); UI != UE; ++UI) {
333 const BasicBlock *URoRBB = (*UI)->getParent();
334 if (DT->dominates(SelBB, URoRBB)) {
335 SelsToConvert.insert(*SI);
341 bool Changed = false;
343 if (Sels.size() != SelsToConvert.size()) {
344 // If we haven't been able to convert all of the clean-up selectors, then
345 // loop through the slow way to see if they still need to be converted.
346 if (!ExceptionValueIntrinsic) {
347 ExceptionValueIntrinsic =
348 Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_exception);
349 if (!ExceptionValueIntrinsic)
350 return CleanupSelectors(CatchAllSels);
353 for (Value::use_iterator
354 I = ExceptionValueIntrinsic->use_begin(),
355 E = ExceptionValueIntrinsic->use_end(); I != E; ++I) {
356 IntrinsicInst *EHPtr = dyn_cast<IntrinsicInst>(*I);
357 if (!EHPtr || EHPtr->getParent()->getParent() != F) continue;
359 Changed |= PromoteEHPtrStore(EHPtr);
361 bool URoRInvoke = false;
362 SmallPtrSet<IntrinsicInst*, 8> SelCalls;
363 Changed |= FindSelectorAndURoR(EHPtr, URoRInvoke, SelCalls);
366 // This EH pointer is being used by an invoke of an URoR instruction and
367 // an eh.selector intrinsic call. If the eh.selector is a 'clean-up', we
368 // need to convert it to a 'catch-all'.
369 for (SmallPtrSet<IntrinsicInst*, 8>::iterator
370 SI = SelCalls.begin(), SE = SelCalls.end(); SI != SE; ++SI)
371 if (!HasCatchAllInSelector(*SI))
372 SelsToConvert.insert(*SI);
377 if (!SelsToConvert.empty()) {
378 // Convert all clean-up eh.selectors, which are associated with "invokes" of
379 // URoR calls, into catch-all eh.selectors.
382 for (SmallPtrSet<IntrinsicInst*, 8>::iterator
383 SI = SelsToConvert.begin(), SE = SelsToConvert.end();
385 IntrinsicInst *II = *SI;
387 // Use the exception object pointer and the personality function
388 // from the original selector.
390 IntrinsicInst::op_iterator I = CS.arg_begin();
391 IntrinsicInst::op_iterator E = CS.arg_end();
392 IntrinsicInst::op_iterator B = prior(E);
394 // Exclude last argument if it is an integer.
395 if (isa<ConstantInt>(B)) E = B;
397 // Add exception object pointer (front).
398 // Add personality function (next).
399 // Add in any filter IDs (rest).
400 SmallVector<Value*, 8> Args(I, E);
402 Args.push_back(EHCatchAllValue->getInitializer()); // Catch-all indicator.
404 CallInst *NewSelector =
405 CallInst::Create(SelectorIntrinsic, Args.begin(), Args.end(),
406 "eh.sel.catch.all", II);
408 NewSelector->setTailCall(II->isTailCall());
409 NewSelector->setAttributes(II->getAttributes());
410 NewSelector->setCallingConv(II->getCallingConv());
412 II->replaceAllUsesWith(NewSelector);
413 II->eraseFromParent();
417 Changed |= CleanupSelectors(CatchAllSels);
421 /// NormalizeLandingPads - Normalize and discover landing pads, noting them
422 /// in the LandingPads set. A landing pad is normal if the only CFG edges
423 /// that end at it are unwind edges from invoke instructions. If we inlined
424 /// through an invoke we could have a normal branch from the previous
425 /// unwind block through to the landing pad for the original invoke.
426 /// Abnormal landing pads are fixed up by redirecting all unwind edges to
427 /// a new basic block which falls through to the original.
428 bool DwarfEHPrepare::NormalizeLandingPads() {
429 bool Changed = false;
431 const MCAsmInfo *MAI = TM->getMCAsmInfo();
432 bool usingSjLjEH = MAI->getExceptionHandlingType() == ExceptionHandling::SjLj;
434 for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
435 TerminatorInst *TI = I->getTerminator();
436 if (!isa<InvokeInst>(TI))
438 BasicBlock *LPad = TI->getSuccessor(1);
439 // Skip landing pads that have already been normalized.
440 if (LandingPads.count(LPad))
443 // Check that only invoke unwind edges end at the landing pad.
444 bool OnlyUnwoundTo = true;
445 bool SwitchOK = usingSjLjEH;
446 for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad);
448 TerminatorInst *PT = (*PI)->getTerminator();
449 // The SjLj dispatch block uses a switch instruction. This is effectively
450 // an unwind edge, so we can disregard it here. There will only ever
451 // be one dispatch, however, so if there are multiple switches, one
452 // of them truly is a normal edge, not an unwind edge.
453 if (SwitchOK && isa<SwitchInst>(PT)) {
457 if (!isa<InvokeInst>(PT) || LPad == PT->getSuccessor(0)) {
458 OnlyUnwoundTo = false;
464 // Only unwind edges lead to the landing pad. Remember the landing pad.
465 LandingPads.insert(LPad);
469 // At least one normal edge ends at the landing pad. Redirect the unwind
470 // edges to a new basic block which falls through into this one.
472 // Create the new basic block.
473 BasicBlock *NewBB = BasicBlock::Create(F->getContext(),
474 LPad->getName() + "_unwind_edge");
476 // Insert it into the function right before the original landing pad.
477 LPad->getParent()->getBasicBlockList().insert(LPad, NewBB);
479 // Redirect unwind edges from the original landing pad to NewBB.
480 for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); PI != PE; ) {
481 TerminatorInst *PT = (*PI++)->getTerminator();
482 if (isa<InvokeInst>(PT) && PT->getSuccessor(1) == LPad)
483 // Unwind to the new block.
484 PT->setSuccessor(1, NewBB);
487 // If there are any PHI nodes in LPad, we need to update them so that they
488 // merge incoming values from NewBB instead.
489 for (BasicBlock::iterator II = LPad->begin(); isa<PHINode>(II); ++II) {
490 PHINode *PN = cast<PHINode>(II);
491 pred_iterator PB = pred_begin(NewBB), PE = pred_end(NewBB);
493 // Check to see if all of the values coming in via unwind edges are the
494 // same. If so, we don't need to create a new PHI node.
495 Value *InVal = PN->getIncomingValueForBlock(*PB);
496 for (pred_iterator PI = PB; PI != PE; ++PI) {
497 if (PI != PB && InVal != PN->getIncomingValueForBlock(*PI)) {
504 // Different unwind edges have different values. Create a new PHI node
506 PHINode *NewPN = PHINode::Create(PN->getType(), PN->getName()+".unwind",
508 // Add an entry for each unwind edge, using the value from the old PHI.
509 for (pred_iterator PI = PB; PI != PE; ++PI)
510 NewPN->addIncoming(PN->getIncomingValueForBlock(*PI), *PI);
512 // Now use this new PHI as the common incoming value for NewBB in PN.
516 // Revector exactly one entry in the PHI node to come from NewBB
517 // and delete all other entries that come from unwind edges. If
518 // there are both normal and unwind edges from the same predecessor,
519 // this leaves an entry for the normal edge.
520 for (pred_iterator PI = PB; PI != PE; ++PI)
521 PN->removeIncomingValue(*PI);
522 PN->addIncoming(InVal, NewBB);
525 // Add a fallthrough from NewBB to the original landing pad.
526 BranchInst::Create(LPad, NewBB);
528 // Now update DominatorTree and DominanceFrontier analysis information.
530 DT->splitBlock(NewBB);
532 DF->splitBlock(NewBB);
534 // Remember the newly constructed landing pad. The original landing pad
535 // LPad is no longer a landing pad now that all unwind edges have been
536 // revectored to NewBB.
537 LandingPads.insert(NewBB);
538 ++NumLandingPadsSplit;
545 /// LowerUnwinds - Turn unwind instructions into calls to _Unwind_Resume,
546 /// rethrowing any previously caught exception. This will crash horribly
547 /// at runtime if there is no such exception: using unwind to throw a new
548 /// exception is currently not supported.
549 bool DwarfEHPrepare::LowerUnwinds() {
550 SmallVector<TerminatorInst*, 16> UnwindInsts;
552 for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) {
553 TerminatorInst *TI = I->getTerminator();
554 if (isa<UnwindInst>(TI))
555 UnwindInsts.push_back(TI);
558 if (UnwindInsts.empty()) return false;
560 // Find the rewind function if we didn't already.
561 if (!RewindFunction) {
562 LLVMContext &Ctx = UnwindInsts[0]->getContext();
563 std::vector<const Type*>
564 Params(1, Type::getInt8PtrTy(Ctx));
565 FunctionType *FTy = FunctionType::get(Type::getVoidTy(Ctx),
567 const char *RewindName = TLI->getLibcallName(RTLIB::UNWIND_RESUME);
568 RewindFunction = F->getParent()->getOrInsertFunction(RewindName, FTy);
571 bool Changed = false;
573 for (SmallVectorImpl<TerminatorInst*>::iterator
574 I = UnwindInsts.begin(), E = UnwindInsts.end(); I != E; ++I) {
575 TerminatorInst *TI = *I;
577 // Replace the unwind instruction with a call to _Unwind_Resume (or the
578 // appropriate target equivalent) followed by an UnreachableInst.
580 // Create the call...
581 CallInst *CI = CallInst::Create(RewindFunction,
582 CreateReadOfExceptionValue(TI->getParent()),
584 CI->setCallingConv(TLI->getLibcallCallingConv(RTLIB::UNWIND_RESUME));
585 // ...followed by an UnreachableInst.
586 new UnreachableInst(TI->getContext(), TI);
588 // Nuke the unwind instruction.
589 TI->eraseFromParent();
597 /// MoveExceptionValueCalls - Ensure that eh.exception is only ever called from
598 /// landing pads by replacing calls outside of landing pads with loads from a
599 /// stack temporary. Move eh.exception calls inside landing pads to the start
600 /// of the landing pad (optional, but may make things simpler for later passes).
601 bool DwarfEHPrepare::MoveExceptionValueCalls() {
602 // If the eh.exception intrinsic is not declared in the module then there is
603 // nothing to do. Speed up compilation by checking for this common case.
604 if (!ExceptionValueIntrinsic &&
605 !F->getParent()->getFunction(Intrinsic::getName(Intrinsic::eh_exception)))
608 bool Changed = false;
610 for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB) {
611 for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;)
612 if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++))
613 if (CI->getIntrinsicID() == Intrinsic::eh_exception) {
614 if (!CI->use_empty()) {
615 Value *ExceptionValue = CreateReadOfExceptionValue(BB);
616 if (CI == ExceptionValue) {
617 // The call was at the start of a landing pad - leave it alone.
618 assert(LandingPads.count(BB) &&
619 "Created eh.exception call outside landing pad!");
622 CI->replaceAllUsesWith(ExceptionValue);
624 CI->eraseFromParent();
625 ++NumExceptionValuesMoved;
633 /// FinishStackTemporaries - If we introduced a stack variable to hold the
634 /// exception value then initialize it in each landing pad.
635 bool DwarfEHPrepare::FinishStackTemporaries() {
636 if (!ExceptionValueVar)
640 bool Changed = false;
642 // Make sure that there is a store of the exception value at the start of
644 for (BBSet::iterator LI = LandingPads.begin(), LE = LandingPads.end();
646 Instruction *ExceptionValue = CreateReadOfExceptionValue(*LI);
647 Instruction *Store = new StoreInst(ExceptionValue, ExceptionValueVar);
648 Store->insertAfter(ExceptionValue);
655 /// PromoteStackTemporaries - Turn any stack temporaries we introduced into
656 /// registers if possible.
657 bool DwarfEHPrepare::PromoteStackTemporaries() {
658 if (ExceptionValueVar && DT && DF && isAllocaPromotable(ExceptionValueVar)) {
659 // Turn the exception temporary into registers and phi nodes if possible.
660 std::vector<AllocaInst*> Allocas(1, ExceptionValueVar);
661 PromoteMemToReg(Allocas, *DT, *DF);
667 /// CreateExceptionValueCall - Insert a call to the eh.exception intrinsic at
668 /// the start of the basic block (unless there already is one, in which case
669 /// the existing call is returned).
670 Instruction *DwarfEHPrepare::CreateExceptionValueCall(BasicBlock *BB) {
671 Instruction *Start = BB->getFirstNonPHIOrDbg();
672 // Is this a call to eh.exception?
673 if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(Start))
674 if (CI->getIntrinsicID() == Intrinsic::eh_exception)
675 // Reuse the existing call.
678 // Find the eh.exception intrinsic if we didn't already.
679 if (!ExceptionValueIntrinsic)
680 ExceptionValueIntrinsic = Intrinsic::getDeclaration(F->getParent(),
681 Intrinsic::eh_exception);
684 return CallInst::Create(ExceptionValueIntrinsic, "eh.value.call", Start);
687 /// CreateValueLoad - Insert a load of the exception value stack variable
688 /// (creating it if necessary) at the start of the basic block (unless
689 /// there already is a load, in which case the existing load is returned).
690 Instruction *DwarfEHPrepare::CreateValueLoad(BasicBlock *BB) {
691 Instruction *Start = BB->getFirstNonPHIOrDbg();
692 // Is this a load of the exception temporary?
693 if (ExceptionValueVar)
694 if (LoadInst* LI = dyn_cast<LoadInst>(Start))
695 if (LI->getPointerOperand() == ExceptionValueVar)
696 // Reuse the existing load.
699 // Create the temporary if we didn't already.
700 if (!ExceptionValueVar) {
701 ExceptionValueVar = new AllocaInst(PointerType::getUnqual(
702 Type::getInt8Ty(BB->getContext())), "eh.value", F->begin()->begin());
703 ++NumStackTempsIntroduced;
707 return new LoadInst(ExceptionValueVar, "eh.value.load", Start);
710 bool DwarfEHPrepare::runOnFunction(Function &Fn) {
711 bool Changed = false;
713 // Initialize internal state.
714 DT = getAnalysisIfAvailable<DominatorTree>();
715 DF = getAnalysisIfAvailable<DominanceFrontier>();
716 ExceptionValueVar = 0;
719 // Ensure that only unwind edges end at landing pads (a landing pad is a
720 // basic block where an invoke unwind edge ends).
721 Changed |= NormalizeLandingPads();
723 // Turn unwind instructions into libcalls.
724 Changed |= LowerUnwinds();
726 // TODO: Move eh.selector calls to landing pads and combine them.
728 // Move eh.exception calls to landing pads.
729 Changed |= MoveExceptionValueCalls();
731 // Initialize any stack temporaries we introduced.
732 Changed |= FinishStackTemporaries();
734 // Turn any stack temporaries into registers if possible.
736 Changed |= PromoteStackTemporaries();
738 Changed |= HandleURoRInvokes();