1 //===- ObjCARC.cpp - ObjC ARC Optimization --------------------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines ObjC ARC optimizations. ARC stands for
11 // Automatic Reference Counting and is a system for managing reference counts
12 // for objects in Objective C.
14 // The optimizations performed include elimination of redundant, partially
15 // redundant, and inconsequential reference count operations, elimination of
16 // redundant weak pointer operations, pattern-matching and replacement of
17 // low-level operations into higher-level operations, and numerous minor
20 // This file also defines a simple ARC-aware AliasAnalysis.
22 // WARNING: This file knows about certain library functions. It recognizes them
23 // by name, and hardwires knowedge of their semantics.
25 // WARNING: This file knows about how certain Objective-C library functions are
26 // used. Naive LLVM IR transformations which would otherwise be
27 // behavior-preserving may break these assumptions.
29 //===----------------------------------------------------------------------===//
31 #define DEBUG_TYPE "objc-arc"
32 #include "llvm/Function.h"
33 #include "llvm/Intrinsics.h"
34 #include "llvm/GlobalVariable.h"
35 #include "llvm/DerivedTypes.h"
36 #include "llvm/Module.h"
37 #include "llvm/Analysis/ValueTracking.h"
38 #include "llvm/Transforms/Utils/Local.h"
39 #include "llvm/Support/CallSite.h"
40 #include "llvm/Support/CommandLine.h"
41 #include "llvm/ADT/StringSwitch.h"
42 #include "llvm/ADT/DenseMap.h"
43 #include "llvm/ADT/STLExtras.h"
46 // A handy option to enable/disable all optimizations in this file.
47 static cl::opt<bool> EnableARCOpts("enable-objc-arc-opts", cl::init(true));
49 //===----------------------------------------------------------------------===//
51 //===----------------------------------------------------------------------===//
54 /// MapVector - An associative container with fast insertion-order
55 /// (deterministic) iteration over its elements. Plus the special
57 template<class KeyT, class ValueT>
59 /// Map - Map keys to indices in Vector.
60 typedef DenseMap<KeyT, size_t> MapTy;
63 /// Vector - Keys and values.
64 typedef std::vector<std::pair<KeyT, ValueT> > VectorTy;
68 typedef typename VectorTy::iterator iterator;
69 typedef typename VectorTy::const_iterator const_iterator;
70 iterator begin() { return Vector.begin(); }
71 iterator end() { return Vector.end(); }
72 const_iterator begin() const { return Vector.begin(); }
73 const_iterator end() const { return Vector.end(); }
77 assert(Vector.size() >= Map.size()); // May differ due to blotting.
78 for (typename MapTy::const_iterator I = Map.begin(), E = Map.end();
80 assert(I->second < Vector.size());
81 assert(Vector[I->second].first == I->first);
83 for (typename VectorTy::const_iterator I = Vector.begin(),
84 E = Vector.end(); I != E; ++I)
86 (Map.count(I->first) &&
87 Map[I->first] == size_t(I - Vector.begin())));
91 ValueT &operator[](const KeyT &Arg) {
92 std::pair<typename MapTy::iterator, bool> Pair =
93 Map.insert(std::make_pair(Arg, size_t(0)));
95 size_t Num = Vector.size();
96 Pair.first->second = Num;
97 Vector.push_back(std::make_pair(Arg, ValueT()));
98 return Vector[Num].second;
100 return Vector[Pair.first->second].second;
103 std::pair<iterator, bool>
104 insert(const std::pair<KeyT, ValueT> &InsertPair) {
105 std::pair<typename MapTy::iterator, bool> Pair =
106 Map.insert(std::make_pair(InsertPair.first, size_t(0)));
108 size_t Num = Vector.size();
109 Pair.first->second = Num;
110 Vector.push_back(InsertPair);
111 return std::make_pair(Vector.begin() + Num, true);
113 return std::make_pair(Vector.begin() + Pair.first->second, false);
116 const_iterator find(const KeyT &Key) const {
117 typename MapTy::const_iterator It = Map.find(Key);
118 if (It == Map.end()) return Vector.end();
119 return Vector.begin() + It->second;
122 /// blot - This is similar to erase, but instead of removing the element
123 /// from the vector, it just zeros out the key in the vector. This leaves
124 /// iterators intact, but clients must be prepared for zeroed-out keys when
126 void blot(const KeyT &Key) {
127 typename MapTy::iterator It = Map.find(Key);
128 if (It == Map.end()) return;
129 Vector[It->second].first = KeyT();
140 //===----------------------------------------------------------------------===//
142 //===----------------------------------------------------------------------===//
145 /// InstructionClass - A simple classification for instructions.
146 enum InstructionClass {
147 IC_Retain, ///< objc_retain
148 IC_RetainRV, ///< objc_retainAutoreleasedReturnValue
149 IC_RetainBlock, ///< objc_retainBlock
150 IC_Release, ///< objc_release
151 IC_Autorelease, ///< objc_autorelease
152 IC_AutoreleaseRV, ///< objc_autoreleaseReturnValue
153 IC_AutoreleasepoolPush, ///< objc_autoreleasePoolPush
154 IC_AutoreleasepoolPop, ///< objc_autoreleasePoolPop
155 IC_NoopCast, ///< objc_retainedObject, etc.
156 IC_FusedRetainAutorelease, ///< objc_retainAutorelease
157 IC_FusedRetainAutoreleaseRV, ///< objc_retainAutoreleaseReturnValue
158 IC_LoadWeakRetained, ///< objc_loadWeakRetained (primitive)
159 IC_StoreWeak, ///< objc_storeWeak (primitive)
160 IC_InitWeak, ///< objc_initWeak (derived)
161 IC_LoadWeak, ///< objc_loadWeak (derived)
162 IC_MoveWeak, ///< objc_moveWeak (derived)
163 IC_CopyWeak, ///< objc_copyWeak (derived)
164 IC_DestroyWeak, ///< objc_destroyWeak (derived)
165 IC_StoreStrong, ///< objc_storeStrong (derived)
166 IC_CallOrUser, ///< could call objc_release and/or "use" pointers
167 IC_Call, ///< could call objc_release
168 IC_User, ///< could "use" a pointer
169 IC_None ///< anything else
173 /// IsPotentialUse - Test whether the given value is possible a
174 /// reference-counted pointer.
175 static bool IsPotentialUse(const Value *Op) {
176 // Pointers to static or stack storage are not reference-counted pointers.
177 if (isa<Constant>(Op) || isa<AllocaInst>(Op))
179 // Special arguments are not reference-counted.
180 if (const Argument *Arg = dyn_cast<Argument>(Op))
181 if (Arg->hasByValAttr() ||
182 Arg->hasNestAttr() ||
183 Arg->hasStructRetAttr())
185 // Only consider values with pointer types.
186 // It seemes intuitive to exclude function pointer types as well, since
187 // functions are never reference-counted, however clang occasionally
188 // bitcasts reference-counted pointers to function-pointer type
190 PointerType *Ty = dyn_cast<PointerType>(Op->getType());
193 // Conservatively assume anything else is a potential use.
197 /// GetCallSiteClass - Helper for GetInstructionClass. Determines what kind
198 /// of construct CS is.
199 static InstructionClass GetCallSiteClass(ImmutableCallSite CS) {
200 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
202 if (IsPotentialUse(*I))
203 return CS.onlyReadsMemory() ? IC_User : IC_CallOrUser;
205 return CS.onlyReadsMemory() ? IC_None : IC_Call;
208 /// GetFunctionClass - Determine if F is one of the special known Functions.
209 /// If it isn't, return IC_CallOrUser.
210 static InstructionClass GetFunctionClass(const Function *F) {
211 Function::const_arg_iterator AI = F->arg_begin(), AE = F->arg_end();
215 return StringSwitch<InstructionClass>(F->getName())
216 .Case("objc_autoreleasePoolPush", IC_AutoreleasepoolPush)
217 .Default(IC_CallOrUser);
220 const Argument *A0 = AI++;
222 // Argument is a pointer.
223 if (PointerType *PTy = dyn_cast<PointerType>(A0->getType())) {
224 Type *ETy = PTy->getElementType();
226 if (ETy->isIntegerTy(8))
227 return StringSwitch<InstructionClass>(F->getName())
228 .Case("objc_retain", IC_Retain)
229 .Case("objc_retainAutoreleasedReturnValue", IC_RetainRV)
230 .Case("objc_retainBlock", IC_RetainBlock)
231 .Case("objc_release", IC_Release)
232 .Case("objc_autorelease", IC_Autorelease)
233 .Case("objc_autoreleaseReturnValue", IC_AutoreleaseRV)
234 .Case("objc_autoreleasePoolPop", IC_AutoreleasepoolPop)
235 .Case("objc_retainedObject", IC_NoopCast)
236 .Case("objc_unretainedObject", IC_NoopCast)
237 .Case("objc_unretainedPointer", IC_NoopCast)
238 .Case("objc_retain_autorelease", IC_FusedRetainAutorelease)
239 .Case("objc_retainAutorelease", IC_FusedRetainAutorelease)
240 .Case("objc_retainAutoreleaseReturnValue",IC_FusedRetainAutoreleaseRV)
241 .Default(IC_CallOrUser);
244 if (PointerType *Pte = dyn_cast<PointerType>(ETy))
245 if (Pte->getElementType()->isIntegerTy(8))
246 return StringSwitch<InstructionClass>(F->getName())
247 .Case("objc_loadWeakRetained", IC_LoadWeakRetained)
248 .Case("objc_loadWeak", IC_LoadWeak)
249 .Case("objc_destroyWeak", IC_DestroyWeak)
250 .Default(IC_CallOrUser);
253 // Two arguments, first is i8**.
254 const Argument *A1 = AI++;
256 if (PointerType *PTy = dyn_cast<PointerType>(A0->getType()))
257 if (PointerType *Pte = dyn_cast<PointerType>(PTy->getElementType()))
258 if (Pte->getElementType()->isIntegerTy(8))
259 if (PointerType *PTy1 = dyn_cast<PointerType>(A1->getType())) {
260 Type *ETy1 = PTy1->getElementType();
261 // Second argument is i8*
262 if (ETy1->isIntegerTy(8))
263 return StringSwitch<InstructionClass>(F->getName())
264 .Case("objc_storeWeak", IC_StoreWeak)
265 .Case("objc_initWeak", IC_InitWeak)
266 .Case("objc_storeStrong", IC_StoreStrong)
267 .Default(IC_CallOrUser);
268 // Second argument is i8**.
269 if (PointerType *Pte1 = dyn_cast<PointerType>(ETy1))
270 if (Pte1->getElementType()->isIntegerTy(8))
271 return StringSwitch<InstructionClass>(F->getName())
272 .Case("objc_moveWeak", IC_MoveWeak)
273 .Case("objc_copyWeak", IC_CopyWeak)
274 .Default(IC_CallOrUser);
278 return IC_CallOrUser;
281 /// GetInstructionClass - Determine what kind of construct V is.
282 static InstructionClass GetInstructionClass(const Value *V) {
283 if (const Instruction *I = dyn_cast<Instruction>(V)) {
284 // Any instruction other than bitcast and gep with a pointer operand have a
285 // use of an objc pointer. Bitcasts, GEPs, Selects, PHIs transfer a pointer
286 // to a subsequent use, rather than using it themselves, in this sense.
287 // As a short cut, several other opcodes are known to have no pointer
288 // operands of interest. And ret is never followed by a release, so it's
289 // not interesting to examine.
290 switch (I->getOpcode()) {
291 case Instruction::Call: {
292 const CallInst *CI = cast<CallInst>(I);
293 // Check for calls to special functions.
294 if (const Function *F = CI->getCalledFunction()) {
295 InstructionClass Class = GetFunctionClass(F);
296 if (Class != IC_CallOrUser)
299 // None of the intrinsic functions do objc_release. For intrinsics, the
300 // only question is whether or not they may be users.
301 switch (F->getIntrinsicID()) {
303 case Intrinsic::bswap: case Intrinsic::ctpop:
304 case Intrinsic::ctlz: case Intrinsic::cttz:
305 case Intrinsic::returnaddress: case Intrinsic::frameaddress:
306 case Intrinsic::stacksave: case Intrinsic::stackrestore:
307 case Intrinsic::vastart: case Intrinsic::vacopy: case Intrinsic::vaend:
308 // Don't let dbg info affect our results.
309 case Intrinsic::dbg_declare: case Intrinsic::dbg_value:
310 // Short cut: Some intrinsics obviously don't use ObjC pointers.
313 for (Function::const_arg_iterator AI = F->arg_begin(),
314 AE = F->arg_end(); AI != AE; ++AI)
315 if (IsPotentialUse(AI))
320 return GetCallSiteClass(CI);
322 case Instruction::Invoke:
323 return GetCallSiteClass(cast<InvokeInst>(I));
324 case Instruction::BitCast:
325 case Instruction::GetElementPtr:
326 case Instruction::Select: case Instruction::PHI:
327 case Instruction::Ret: case Instruction::Br:
328 case Instruction::Switch: case Instruction::IndirectBr:
329 case Instruction::Alloca: case Instruction::VAArg:
330 case Instruction::Add: case Instruction::FAdd:
331 case Instruction::Sub: case Instruction::FSub:
332 case Instruction::Mul: case Instruction::FMul:
333 case Instruction::SDiv: case Instruction::UDiv: case Instruction::FDiv:
334 case Instruction::SRem: case Instruction::URem: case Instruction::FRem:
335 case Instruction::Shl: case Instruction::LShr: case Instruction::AShr:
336 case Instruction::And: case Instruction::Or: case Instruction::Xor:
337 case Instruction::SExt: case Instruction::ZExt: case Instruction::Trunc:
338 case Instruction::IntToPtr: case Instruction::FCmp:
339 case Instruction::FPTrunc: case Instruction::FPExt:
340 case Instruction::FPToUI: case Instruction::FPToSI:
341 case Instruction::UIToFP: case Instruction::SIToFP:
342 case Instruction::InsertElement: case Instruction::ExtractElement:
343 case Instruction::ShuffleVector:
344 case Instruction::ExtractValue:
346 case Instruction::ICmp:
347 // Comparing a pointer with null, or any other constant, isn't an
348 // interesting use, because we don't care what the pointer points to, or
349 // about the values of any other dynamic reference-counted pointers.
350 if (IsPotentialUse(I->getOperand(1)))
354 // For anything else, check all the operands.
355 // Note that this includes both operands of a Store: while the first
356 // operand isn't actually being dereferenced, it is being stored to
357 // memory where we can no longer track who might read it and dereference
358 // it, so we have to consider it potentially used.
359 for (User::const_op_iterator OI = I->op_begin(), OE = I->op_end();
361 if (IsPotentialUse(*OI))
366 // Otherwise, it's totally inert for ARC purposes.
370 /// GetBasicInstructionClass - Determine what kind of construct V is. This is
371 /// similar to GetInstructionClass except that it only detects objc runtine
372 /// calls. This allows it to be faster.
373 static InstructionClass GetBasicInstructionClass(const Value *V) {
374 if (const CallInst *CI = dyn_cast<CallInst>(V)) {
375 if (const Function *F = CI->getCalledFunction())
376 return GetFunctionClass(F);
377 // Otherwise, be conservative.
378 return IC_CallOrUser;
381 // Otherwise, be conservative.
382 return isa<InvokeInst>(V) ? IC_CallOrUser : IC_User;
385 /// IsRetain - Test if the the given class is objc_retain or
387 static bool IsRetain(InstructionClass Class) {
388 return Class == IC_Retain ||
389 Class == IC_RetainRV;
392 /// IsAutorelease - Test if the the given class is objc_autorelease or
394 static bool IsAutorelease(InstructionClass Class) {
395 return Class == IC_Autorelease ||
396 Class == IC_AutoreleaseRV;
399 /// IsForwarding - Test if the given class represents instructions which return
400 /// their argument verbatim.
401 static bool IsForwarding(InstructionClass Class) {
402 // objc_retainBlock technically doesn't always return its argument
403 // verbatim, but it doesn't matter for our purposes here.
404 return Class == IC_Retain ||
405 Class == IC_RetainRV ||
406 Class == IC_Autorelease ||
407 Class == IC_AutoreleaseRV ||
408 Class == IC_RetainBlock ||
409 Class == IC_NoopCast;
412 /// IsNoopOnNull - Test if the given class represents instructions which do
413 /// nothing if passed a null pointer.
414 static bool IsNoopOnNull(InstructionClass Class) {
415 return Class == IC_Retain ||
416 Class == IC_RetainRV ||
417 Class == IC_Release ||
418 Class == IC_Autorelease ||
419 Class == IC_AutoreleaseRV ||
420 Class == IC_RetainBlock;
423 /// IsAlwaysTail - Test if the given class represents instructions which are
424 /// always safe to mark with the "tail" keyword.
425 static bool IsAlwaysTail(InstructionClass Class) {
426 // IC_RetainBlock may be given a stack argument.
427 return Class == IC_Retain ||
428 Class == IC_RetainRV ||
429 Class == IC_Autorelease ||
430 Class == IC_AutoreleaseRV;
433 /// IsNoThrow - Test if the given class represents instructions which are always
434 /// safe to mark with the nounwind attribute..
435 static bool IsNoThrow(InstructionClass Class) {
436 // objc_retainBlock is not nounwind because it calls user copy constructors
437 // which could theoretically throw.
438 return Class == IC_Retain ||
439 Class == IC_RetainRV ||
440 Class == IC_Release ||
441 Class == IC_Autorelease ||
442 Class == IC_AutoreleaseRV ||
443 Class == IC_AutoreleasepoolPush ||
444 Class == IC_AutoreleasepoolPop;
447 /// EraseInstruction - Erase the given instruction. ObjC calls return their
448 /// argument verbatim, so if it's such a call and the return value has users,
449 /// replace them with the argument value.
450 static void EraseInstruction(Instruction *CI) {
451 Value *OldArg = cast<CallInst>(CI)->getArgOperand(0);
453 bool Unused = CI->use_empty();
456 // Replace the return value with the argument.
457 assert(IsForwarding(GetBasicInstructionClass(CI)) &&
458 "Can't delete non-forwarding instruction with users!");
459 CI->replaceAllUsesWith(OldArg);
462 CI->eraseFromParent();
465 RecursivelyDeleteTriviallyDeadInstructions(OldArg);
468 /// GetUnderlyingObjCPtr - This is a wrapper around getUnderlyingObject which
469 /// also knows how to look through objc_retain and objc_autorelease calls, which
470 /// we know to return their argument verbatim.
471 static const Value *GetUnderlyingObjCPtr(const Value *V) {
473 V = GetUnderlyingObject(V);
474 if (!IsForwarding(GetBasicInstructionClass(V)))
476 V = cast<CallInst>(V)->getArgOperand(0);
482 /// StripPointerCastsAndObjCCalls - This is a wrapper around
483 /// Value::stripPointerCasts which also knows how to look through objc_retain
484 /// and objc_autorelease calls, which we know to return their argument verbatim.
485 static const Value *StripPointerCastsAndObjCCalls(const Value *V) {
487 V = V->stripPointerCasts();
488 if (!IsForwarding(GetBasicInstructionClass(V)))
490 V = cast<CallInst>(V)->getArgOperand(0);
495 /// StripPointerCastsAndObjCCalls - This is a wrapper around
496 /// Value::stripPointerCasts which also knows how to look through objc_retain
497 /// and objc_autorelease calls, which we know to return their argument verbatim.
498 static Value *StripPointerCastsAndObjCCalls(Value *V) {
500 V = V->stripPointerCasts();
501 if (!IsForwarding(GetBasicInstructionClass(V)))
503 V = cast<CallInst>(V)->getArgOperand(0);
508 /// GetObjCArg - Assuming the given instruction is one of the special calls such
509 /// as objc_retain or objc_release, return the argument value, stripped of no-op
510 /// casts and forwarding calls.
511 static Value *GetObjCArg(Value *Inst) {
512 return StripPointerCastsAndObjCCalls(cast<CallInst>(Inst)->getArgOperand(0));
515 /// IsObjCIdentifiedObject - This is similar to AliasAnalysis'
516 /// isObjCIdentifiedObject, except that it uses special knowledge of
517 /// ObjC conventions...
518 static bool IsObjCIdentifiedObject(const Value *V) {
519 // Assume that call results and arguments have their own "provenance".
520 // Constants (including GlobalVariables) and Allocas are never
521 // reference-counted.
522 if (isa<CallInst>(V) || isa<InvokeInst>(V) ||
523 isa<Argument>(V) || isa<Constant>(V) ||
527 if (const LoadInst *LI = dyn_cast<LoadInst>(V)) {
528 const Value *Pointer =
529 StripPointerCastsAndObjCCalls(LI->getPointerOperand());
530 if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(Pointer)) {
531 // A constant pointer can't be pointing to an object on the heap. It may
532 // be reference-counted, but it won't be deleted.
533 if (GV->isConstant())
535 StringRef Name = GV->getName();
536 // These special variables are known to hold values which are not
537 // reference-counted pointers.
538 if (Name.startswith("\01L_OBJC_SELECTOR_REFERENCES_") ||
539 Name.startswith("\01L_OBJC_CLASSLIST_REFERENCES_") ||
540 Name.startswith("\01L_OBJC_CLASSLIST_SUP_REFS_$_") ||
541 Name.startswith("\01L_OBJC_METH_VAR_NAME_") ||
542 Name.startswith("\01l_objc_msgSend_fixup_"))
550 /// FindSingleUseIdentifiedObject - This is similar to
551 /// StripPointerCastsAndObjCCalls but it stops as soon as it finds a value
552 /// with multiple uses.
553 static const Value *FindSingleUseIdentifiedObject(const Value *Arg) {
554 if (Arg->hasOneUse()) {
555 if (const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
556 return FindSingleUseIdentifiedObject(BC->getOperand(0));
557 if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Arg))
558 if (GEP->hasAllZeroIndices())
559 return FindSingleUseIdentifiedObject(GEP->getPointerOperand());
560 if (IsForwarding(GetBasicInstructionClass(Arg)))
561 return FindSingleUseIdentifiedObject(
562 cast<CallInst>(Arg)->getArgOperand(0));
563 if (!IsObjCIdentifiedObject(Arg))
568 // If we found an identifiable object but it has multiple uses, but they
569 // are trivial uses, we can still consider this to be a single-use
571 if (IsObjCIdentifiedObject(Arg)) {
572 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
575 if (!U->use_empty() || StripPointerCastsAndObjCCalls(U) != Arg)
585 /// ModuleHasARC - Test if the given module looks interesting to run ARC
587 static bool ModuleHasARC(const Module &M) {
589 M.getNamedValue("objc_retain") ||
590 M.getNamedValue("objc_release") ||
591 M.getNamedValue("objc_autorelease") ||
592 M.getNamedValue("objc_retainAutoreleasedReturnValue") ||
593 M.getNamedValue("objc_retainBlock") ||
594 M.getNamedValue("objc_autoreleaseReturnValue") ||
595 M.getNamedValue("objc_autoreleasePoolPush") ||
596 M.getNamedValue("objc_loadWeakRetained") ||
597 M.getNamedValue("objc_loadWeak") ||
598 M.getNamedValue("objc_destroyWeak") ||
599 M.getNamedValue("objc_storeWeak") ||
600 M.getNamedValue("objc_initWeak") ||
601 M.getNamedValue("objc_moveWeak") ||
602 M.getNamedValue("objc_copyWeak") ||
603 M.getNamedValue("objc_retainedObject") ||
604 M.getNamedValue("objc_unretainedObject") ||
605 M.getNamedValue("objc_unretainedPointer");
608 /// DoesObjCBlockEscape - Test whether the given pointer, which is an
609 /// Objective C block pointer, does not "escape". This differs from regular
610 /// escape analysis in that a use as an argument to a call is not considered
612 static bool DoesObjCBlockEscape(const Value *BlockPtr) {
613 // Walk the def-use chains.
614 SmallVector<const Value *, 4> Worklist;
615 Worklist.push_back(BlockPtr);
617 const Value *V = Worklist.pop_back_val();
618 for (Value::const_use_iterator UI = V->use_begin(), UE = V->use_end();
620 const User *UUser = *UI;
621 // Special - Use by a call (callee or argument) is not considered
623 switch (GetBasicInstructionClass(UUser)) {
628 case IC_AutoreleaseRV:
629 // These special functions make copies of their pointer arguments.
633 // Use by an instruction which copies the value is an escape if the
634 // result is an escape.
635 if (isa<BitCastInst>(UUser) || isa<GetElementPtrInst>(UUser) ||
636 isa<PHINode>(UUser) || isa<SelectInst>(UUser)) {
637 Worklist.push_back(UUser);
640 // Use by a load is not an escape.
641 if (isa<LoadInst>(UUser))
643 // Use by a store is not an escape if the use is the address.
644 if (const StoreInst *SI = dyn_cast<StoreInst>(UUser))
645 if (V != SI->getValueOperand())
649 // Regular calls and other stuff are not considered escapes.
652 // Otherwise, conservatively assume an escape.
655 } while (!Worklist.empty());
661 //===----------------------------------------------------------------------===//
662 // ARC AliasAnalysis.
663 //===----------------------------------------------------------------------===//
665 #include "llvm/Pass.h"
666 #include "llvm/Analysis/AliasAnalysis.h"
667 #include "llvm/Analysis/Passes.h"
670 /// ObjCARCAliasAnalysis - This is a simple alias analysis
671 /// implementation that uses knowledge of ARC constructs to answer queries.
673 /// TODO: This class could be generalized to know about other ObjC-specific
674 /// tricks. Such as knowing that ivars in the non-fragile ABI are non-aliasing
675 /// even though their offsets are dynamic.
676 class ObjCARCAliasAnalysis : public ImmutablePass,
677 public AliasAnalysis {
679 static char ID; // Class identification, replacement for typeinfo
680 ObjCARCAliasAnalysis() : ImmutablePass(ID) {
681 initializeObjCARCAliasAnalysisPass(*PassRegistry::getPassRegistry());
685 virtual void initializePass() {
686 InitializeAliasAnalysis(this);
689 /// getAdjustedAnalysisPointer - This method is used when a pass implements
690 /// an analysis interface through multiple inheritance. If needed, it
691 /// should override this to adjust the this pointer as needed for the
692 /// specified pass info.
693 virtual void *getAdjustedAnalysisPointer(const void *PI) {
694 if (PI == &AliasAnalysis::ID)
695 return (AliasAnalysis*)this;
699 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
700 virtual AliasResult alias(const Location &LocA, const Location &LocB);
701 virtual bool pointsToConstantMemory(const Location &Loc, bool OrLocal);
702 virtual ModRefBehavior getModRefBehavior(ImmutableCallSite CS);
703 virtual ModRefBehavior getModRefBehavior(const Function *F);
704 virtual ModRefResult getModRefInfo(ImmutableCallSite CS,
705 const Location &Loc);
706 virtual ModRefResult getModRefInfo(ImmutableCallSite CS1,
707 ImmutableCallSite CS2);
709 } // End of anonymous namespace
711 // Register this pass...
712 char ObjCARCAliasAnalysis::ID = 0;
713 INITIALIZE_AG_PASS(ObjCARCAliasAnalysis, AliasAnalysis, "objc-arc-aa",
714 "ObjC-ARC-Based Alias Analysis", false, true, false)
716 ImmutablePass *llvm::createObjCARCAliasAnalysisPass() {
717 return new ObjCARCAliasAnalysis();
721 ObjCARCAliasAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
722 AU.setPreservesAll();
723 AliasAnalysis::getAnalysisUsage(AU);
726 AliasAnalysis::AliasResult
727 ObjCARCAliasAnalysis::alias(const Location &LocA, const Location &LocB) {
729 return AliasAnalysis::alias(LocA, LocB);
731 // First, strip off no-ops, including ObjC-specific no-ops, and try making a
732 // precise alias query.
733 const Value *SA = StripPointerCastsAndObjCCalls(LocA.Ptr);
734 const Value *SB = StripPointerCastsAndObjCCalls(LocB.Ptr);
736 AliasAnalysis::alias(Location(SA, LocA.Size, LocA.TBAATag),
737 Location(SB, LocB.Size, LocB.TBAATag));
738 if (Result != MayAlias)
741 // If that failed, climb to the underlying object, including climbing through
742 // ObjC-specific no-ops, and try making an imprecise alias query.
743 const Value *UA = GetUnderlyingObjCPtr(SA);
744 const Value *UB = GetUnderlyingObjCPtr(SB);
745 if (UA != SA || UB != SB) {
746 Result = AliasAnalysis::alias(Location(UA), Location(UB));
747 // We can't use MustAlias or PartialAlias results here because
748 // GetUnderlyingObjCPtr may return an offsetted pointer value.
749 if (Result == NoAlias)
753 // If that failed, fail. We don't need to chain here, since that's covered
754 // by the earlier precise query.
759 ObjCARCAliasAnalysis::pointsToConstantMemory(const Location &Loc,
762 return AliasAnalysis::pointsToConstantMemory(Loc, OrLocal);
764 // First, strip off no-ops, including ObjC-specific no-ops, and try making
765 // a precise alias query.
766 const Value *S = StripPointerCastsAndObjCCalls(Loc.Ptr);
767 if (AliasAnalysis::pointsToConstantMemory(Location(S, Loc.Size, Loc.TBAATag),
771 // If that failed, climb to the underlying object, including climbing through
772 // ObjC-specific no-ops, and try making an imprecise alias query.
773 const Value *U = GetUnderlyingObjCPtr(S);
775 return AliasAnalysis::pointsToConstantMemory(Location(U), OrLocal);
777 // If that failed, fail. We don't need to chain here, since that's covered
778 // by the earlier precise query.
782 AliasAnalysis::ModRefBehavior
783 ObjCARCAliasAnalysis::getModRefBehavior(ImmutableCallSite CS) {
784 // We have nothing to do. Just chain to the next AliasAnalysis.
785 return AliasAnalysis::getModRefBehavior(CS);
788 AliasAnalysis::ModRefBehavior
789 ObjCARCAliasAnalysis::getModRefBehavior(const Function *F) {
791 return AliasAnalysis::getModRefBehavior(F);
793 switch (GetFunctionClass(F)) {
795 return DoesNotAccessMemory;
800 return AliasAnalysis::getModRefBehavior(F);
803 AliasAnalysis::ModRefResult
804 ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS, const Location &Loc) {
806 return AliasAnalysis::getModRefInfo(CS, Loc);
808 switch (GetBasicInstructionClass(CS.getInstruction())) {
812 case IC_AutoreleaseRV:
814 case IC_AutoreleasepoolPush:
815 case IC_FusedRetainAutorelease:
816 case IC_FusedRetainAutoreleaseRV:
817 // These functions don't access any memory visible to the compiler.
818 // Note that this doesn't include objc_retainBlock, becuase it updates
819 // pointers when it copies block data.
825 return AliasAnalysis::getModRefInfo(CS, Loc);
828 AliasAnalysis::ModRefResult
829 ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS1,
830 ImmutableCallSite CS2) {
831 // TODO: Theoretically we could check for dependencies between objc_* calls
832 // and OnlyAccessesArgumentPointees calls or other well-behaved calls.
833 return AliasAnalysis::getModRefInfo(CS1, CS2);
836 //===----------------------------------------------------------------------===//
838 //===----------------------------------------------------------------------===//
840 #include "llvm/Support/InstIterator.h"
841 #include "llvm/Transforms/Scalar.h"
844 /// ObjCARCExpand - Early ARC transformations.
845 class ObjCARCExpand : public FunctionPass {
846 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
847 virtual bool doInitialization(Module &M);
848 virtual bool runOnFunction(Function &F);
850 /// Run - A flag indicating whether this optimization pass should run.
855 ObjCARCExpand() : FunctionPass(ID) {
856 initializeObjCARCExpandPass(*PassRegistry::getPassRegistry());
861 char ObjCARCExpand::ID = 0;
862 INITIALIZE_PASS(ObjCARCExpand,
863 "objc-arc-expand", "ObjC ARC expansion", false, false)
865 Pass *llvm::createObjCARCExpandPass() {
866 return new ObjCARCExpand();
869 void ObjCARCExpand::getAnalysisUsage(AnalysisUsage &AU) const {
870 AU.setPreservesCFG();
873 bool ObjCARCExpand::doInitialization(Module &M) {
874 Run = ModuleHasARC(M);
878 bool ObjCARCExpand::runOnFunction(Function &F) {
882 // If nothing in the Module uses ARC, don't do anything.
886 bool Changed = false;
888 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ++I) {
889 Instruction *Inst = &*I;
891 switch (GetBasicInstructionClass(Inst)) {
895 case IC_AutoreleaseRV:
896 case IC_FusedRetainAutorelease:
897 case IC_FusedRetainAutoreleaseRV:
898 // These calls return their argument verbatim, as a low-level
899 // optimization. However, this makes high-level optimizations
900 // harder. Undo any uses of this optimization that the front-end
901 // emitted here. We'll redo them in the contract pass.
903 Inst->replaceAllUsesWith(cast<CallInst>(Inst)->getArgOperand(0));
913 //===----------------------------------------------------------------------===//
914 // ARC autorelease pool elimination.
915 //===----------------------------------------------------------------------===//
917 #include "llvm/Constants.h"
920 /// ObjCARCAPElim - Autorelease pool elimination.
921 class ObjCARCAPElim : public ModulePass {
922 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
923 virtual bool runOnModule(Module &M);
925 bool MayAutorelease(CallSite CS, unsigned Depth = 0);
926 bool OptimizeBB(BasicBlock *BB);
930 ObjCARCAPElim() : ModulePass(ID) {
931 initializeObjCARCAPElimPass(*PassRegistry::getPassRegistry());
936 char ObjCARCAPElim::ID = 0;
937 INITIALIZE_PASS(ObjCARCAPElim,
939 "ObjC ARC autorelease pool elimination",
942 Pass *llvm::createObjCARCAPElimPass() {
943 return new ObjCARCAPElim();
946 void ObjCARCAPElim::getAnalysisUsage(AnalysisUsage &AU) const {
947 AU.setPreservesCFG();
950 /// MayAutorelease - Interprocedurally determine if calls made by the
951 /// given call site can possibly produce autoreleases.
952 bool ObjCARCAPElim::MayAutorelease(CallSite CS, unsigned Depth) {
953 if (Function *Callee = CS.getCalledFunction()) {
954 if (Callee->isDeclaration() || Callee->mayBeOverridden())
956 for (Function::iterator I = Callee->begin(), E = Callee->end();
959 for (BasicBlock::iterator J = BB->begin(), F = BB->end(); J != F; ++J)
960 if (CallSite JCS = CallSite(J))
961 // This recursion depth limit is arbitrary. It's just great
962 // enough to cover known interesting testcases.
964 !JCS.onlyReadsMemory() &&
965 MayAutorelease(JCS, Depth + 1))
974 bool ObjCARCAPElim::OptimizeBB(BasicBlock *BB) {
975 bool Changed = false;
977 Instruction *Push = 0;
978 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ) {
979 Instruction *Inst = I++;
980 switch (GetBasicInstructionClass(Inst)) {
981 case IC_AutoreleasepoolPush:
984 case IC_AutoreleasepoolPop:
985 // If this pop matches a push and nothing in between can autorelease,
987 if (Push && cast<CallInst>(Inst)->getArgOperand(0) == Push) {
989 Inst->eraseFromParent();
990 Push->eraseFromParent();
995 if (MayAutorelease(CallSite(Inst)))
1006 bool ObjCARCAPElim::runOnModule(Module &M) {
1010 // If nothing in the Module uses ARC, don't do anything.
1011 if (!ModuleHasARC(M))
1014 // Find the llvm.global_ctors variable, as the first step in
1015 // identifying the global constructors. In theory, unnecessary autorelease
1016 // pools could occur anywhere, but in practice it's pretty rare. Global
1017 // ctors are a place where autorelease pools get inserted automatically,
1018 // so it's pretty common for them to be unnecessary, and it's pretty
1019 // profitable to eliminate them.
1020 GlobalVariable *GV = M.getGlobalVariable("llvm.global_ctors");
1024 assert(GV->hasDefinitiveInitializer() &&
1025 "llvm.global_ctors is uncooperative!");
1027 bool Changed = false;
1029 // Dig the constructor functions out of GV's initializer.
1030 ConstantArray *Init = cast<ConstantArray>(GV->getInitializer());
1031 for (User::op_iterator OI = Init->op_begin(), OE = Init->op_end();
1034 // llvm.global_ctors is an array of pairs where the second members
1035 // are constructor functions.
1036 Function *F = dyn_cast<Function>(cast<ConstantStruct>(Op)->getOperand(1));
1037 // If the user used a constructor function with the wrong signature and
1038 // it got bitcasted or whatever, look the other way.
1041 // Only look at function definitions.
1042 if (F->isDeclaration())
1044 // Only look at functions with one basic block.
1045 if (llvm::next(F->begin()) != F->end())
1047 // Ok, a single-block constructor function definition. Try to optimize it.
1048 Changed |= OptimizeBB(F->begin());
1054 //===----------------------------------------------------------------------===//
1055 // ARC optimization.
1056 //===----------------------------------------------------------------------===//
1058 // TODO: On code like this:
1061 // stuff_that_cannot_release()
1062 // objc_autorelease(%x)
1063 // stuff_that_cannot_release()
1065 // stuff_that_cannot_release()
1066 // objc_autorelease(%x)
1068 // The second retain and autorelease can be deleted.
1070 // TODO: It should be possible to delete
1071 // objc_autoreleasePoolPush and objc_autoreleasePoolPop
1072 // pairs if nothing is actually autoreleased between them. Also, autorelease
1073 // calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code
1074 // after inlining) can be turned into plain release calls.
1076 // TODO: Critical-edge splitting. If the optimial insertion point is
1077 // a critical edge, the current algorithm has to fail, because it doesn't
1078 // know how to split edges. It should be possible to make the optimizer
1079 // think in terms of edges, rather than blocks, and then split critical
1082 // TODO: OptimizeSequences could generalized to be Interprocedural.
1084 // TODO: Recognize that a bunch of other objc runtime calls have
1085 // non-escaping arguments and non-releasing arguments, and may be
1086 // non-autoreleasing.
1088 // TODO: Sink autorelease calls as far as possible. Unfortunately we
1089 // usually can't sink them past other calls, which would be the main
1090 // case where it would be useful.
1092 // TODO: The pointer returned from objc_loadWeakRetained is retained.
1094 // TODO: Delete release+retain pairs (rare).
1096 #include "llvm/GlobalAlias.h"
1097 #include "llvm/Constants.h"
1098 #include "llvm/LLVMContext.h"
1099 #include "llvm/Support/ErrorHandling.h"
1100 #include "llvm/Support/CFG.h"
1101 #include "llvm/ADT/Statistic.h"
1102 #include "llvm/ADT/SmallPtrSet.h"
1103 #include "llvm/ADT/DenseSet.h"
1105 STATISTIC(NumNoops, "Number of no-op objc calls eliminated");
1106 STATISTIC(NumPartialNoops, "Number of partially no-op objc calls eliminated");
1107 STATISTIC(NumAutoreleases,"Number of autoreleases converted to releases");
1108 STATISTIC(NumRets, "Number of return value forwarding "
1109 "retain+autoreleaes eliminated");
1110 STATISTIC(NumRRs, "Number of retain+release paths eliminated");
1111 STATISTIC(NumPeeps, "Number of calls peephole-optimized");
1114 /// ProvenanceAnalysis - This is similar to BasicAliasAnalysis, and it
1115 /// uses many of the same techniques, except it uses special ObjC-specific
1116 /// reasoning about pointer relationships.
1117 class ProvenanceAnalysis {
1120 typedef std::pair<const Value *, const Value *> ValuePairTy;
1121 typedef DenseMap<ValuePairTy, bool> CachedResultsTy;
1122 CachedResultsTy CachedResults;
1124 bool relatedCheck(const Value *A, const Value *B);
1125 bool relatedSelect(const SelectInst *A, const Value *B);
1126 bool relatedPHI(const PHINode *A, const Value *B);
1128 // Do not implement.
1129 void operator=(const ProvenanceAnalysis &);
1130 ProvenanceAnalysis(const ProvenanceAnalysis &);
1133 ProvenanceAnalysis() {}
1135 void setAA(AliasAnalysis *aa) { AA = aa; }
1137 AliasAnalysis *getAA() const { return AA; }
1139 bool related(const Value *A, const Value *B);
1142 CachedResults.clear();
1147 bool ProvenanceAnalysis::relatedSelect(const SelectInst *A, const Value *B) {
1148 // If the values are Selects with the same condition, we can do a more precise
1149 // check: just check for relations between the values on corresponding arms.
1150 if (const SelectInst *SB = dyn_cast<SelectInst>(B))
1151 if (A->getCondition() == SB->getCondition()) {
1152 if (related(A->getTrueValue(), SB->getTrueValue()))
1154 if (related(A->getFalseValue(), SB->getFalseValue()))
1159 // Check both arms of the Select node individually.
1160 if (related(A->getTrueValue(), B))
1162 if (related(A->getFalseValue(), B))
1165 // The arms both checked out.
1169 bool ProvenanceAnalysis::relatedPHI(const PHINode *A, const Value *B) {
1170 // If the values are PHIs in the same block, we can do a more precise as well
1171 // as efficient check: just check for relations between the values on
1172 // corresponding edges.
1173 if (const PHINode *PNB = dyn_cast<PHINode>(B))
1174 if (PNB->getParent() == A->getParent()) {
1175 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i)
1176 if (related(A->getIncomingValue(i),
1177 PNB->getIncomingValueForBlock(A->getIncomingBlock(i))))
1182 // Check each unique source of the PHI node against B.
1183 SmallPtrSet<const Value *, 4> UniqueSrc;
1184 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i) {
1185 const Value *PV1 = A->getIncomingValue(i);
1186 if (UniqueSrc.insert(PV1) && related(PV1, B))
1190 // All of the arms checked out.
1194 /// isStoredObjCPointer - Test if the value of P, or any value covered by its
1195 /// provenance, is ever stored within the function (not counting callees).
1196 static bool isStoredObjCPointer(const Value *P) {
1197 SmallPtrSet<const Value *, 8> Visited;
1198 SmallVector<const Value *, 8> Worklist;
1199 Worklist.push_back(P);
1202 P = Worklist.pop_back_val();
1203 for (Value::const_use_iterator UI = P->use_begin(), UE = P->use_end();
1205 const User *Ur = *UI;
1206 if (isa<StoreInst>(Ur)) {
1207 if (UI.getOperandNo() == 0)
1208 // The pointer is stored.
1210 // The pointed is stored through.
1213 if (isa<CallInst>(Ur))
1214 // The pointer is passed as an argument, ignore this.
1216 if (isa<PtrToIntInst>(P))
1217 // Assume the worst.
1219 if (Visited.insert(Ur))
1220 Worklist.push_back(Ur);
1222 } while (!Worklist.empty());
1224 // Everything checked out.
1228 bool ProvenanceAnalysis::relatedCheck(const Value *A, const Value *B) {
1229 // Skip past provenance pass-throughs.
1230 A = GetUnderlyingObjCPtr(A);
1231 B = GetUnderlyingObjCPtr(B);
1237 // Ask regular AliasAnalysis, for a first approximation.
1238 switch (AA->alias(A, B)) {
1239 case AliasAnalysis::NoAlias:
1241 case AliasAnalysis::MustAlias:
1242 case AliasAnalysis::PartialAlias:
1244 case AliasAnalysis::MayAlias:
1248 bool AIsIdentified = IsObjCIdentifiedObject(A);
1249 bool BIsIdentified = IsObjCIdentifiedObject(B);
1251 // An ObjC-Identified object can't alias a load if it is never locally stored.
1252 if (AIsIdentified) {
1253 if (BIsIdentified) {
1254 // If both pointers have provenance, they can be directly compared.
1258 if (isa<LoadInst>(B))
1259 return isStoredObjCPointer(A);
1262 if (BIsIdentified && isa<LoadInst>(A))
1263 return isStoredObjCPointer(B);
1266 // Special handling for PHI and Select.
1267 if (const PHINode *PN = dyn_cast<PHINode>(A))
1268 return relatedPHI(PN, B);
1269 if (const PHINode *PN = dyn_cast<PHINode>(B))
1270 return relatedPHI(PN, A);
1271 if (const SelectInst *S = dyn_cast<SelectInst>(A))
1272 return relatedSelect(S, B);
1273 if (const SelectInst *S = dyn_cast<SelectInst>(B))
1274 return relatedSelect(S, A);
1280 bool ProvenanceAnalysis::related(const Value *A, const Value *B) {
1281 // Begin by inserting a conservative value into the map. If the insertion
1282 // fails, we have the answer already. If it succeeds, leave it there until we
1283 // compute the real answer to guard against recursive queries.
1284 if (A > B) std::swap(A, B);
1285 std::pair<CachedResultsTy::iterator, bool> Pair =
1286 CachedResults.insert(std::make_pair(ValuePairTy(A, B), true));
1288 return Pair.first->second;
1290 bool Result = relatedCheck(A, B);
1291 CachedResults[ValuePairTy(A, B)] = Result;
1296 // Sequence - A sequence of states that a pointer may go through in which an
1297 // objc_retain and objc_release are actually needed.
1300 S_Retain, ///< objc_retain(x)
1301 S_CanRelease, ///< foo(x) -- x could possibly see a ref count decrement
1302 S_Use, ///< any use of x
1303 S_Stop, ///< like S_Release, but code motion is stopped
1304 S_Release, ///< objc_release(x)
1305 S_MovableRelease ///< objc_release(x), !clang.imprecise_release
1309 static Sequence MergeSeqs(Sequence A, Sequence B, bool TopDown) {
1313 if (A == S_None || B == S_None)
1316 if (A > B) std::swap(A, B);
1318 // Choose the side which is further along in the sequence.
1319 if ((A == S_Retain || A == S_CanRelease) &&
1320 (B == S_CanRelease || B == S_Use))
1323 // Choose the side which is further along in the sequence.
1324 if ((A == S_Use || A == S_CanRelease) &&
1325 (B == S_Use || B == S_Release || B == S_Stop || B == S_MovableRelease))
1327 // If both sides are releases, choose the more conservative one.
1328 if (A == S_Stop && (B == S_Release || B == S_MovableRelease))
1330 if (A == S_Release && B == S_MovableRelease)
1338 /// RRInfo - Unidirectional information about either a
1339 /// retain-decrement-use-release sequence or release-use-decrement-retain
1340 /// reverese sequence.
1342 /// KnownSafe - After an objc_retain, the reference count of the referenced
1343 /// object is known to be positive. Similarly, before an objc_release, the
1344 /// reference count of the referenced object is known to be positive. If
1345 /// there are retain-release pairs in code regions where the retain count
1346 /// is known to be positive, they can be eliminated, regardless of any side
1347 /// effects between them.
1349 /// Also, a retain+release pair nested within another retain+release
1350 /// pair all on the known same pointer value can be eliminated, regardless
1351 /// of any intervening side effects.
1353 /// KnownSafe is true when either of these conditions is satisfied.
1356 /// IsRetainBlock - True if the Calls are objc_retainBlock calls (as
1357 /// opposed to objc_retain calls).
1360 /// IsTailCallRelease - True of the objc_release calls are all marked
1361 /// with the "tail" keyword.
1362 bool IsTailCallRelease;
1364 /// Partial - True of we've seen an opportunity for partial RR elimination,
1365 /// such as pushing calls into a CFG triangle or into one side of a
1367 /// TODO: Consider moving this to PtrState.
1370 /// ReleaseMetadata - If the Calls are objc_release calls and they all have
1371 /// a clang.imprecise_release tag, this is the metadata tag.
1372 MDNode *ReleaseMetadata;
1374 /// Calls - For a top-down sequence, the set of objc_retains or
1375 /// objc_retainBlocks. For bottom-up, the set of objc_releases.
1376 SmallPtrSet<Instruction *, 2> Calls;
1378 /// ReverseInsertPts - The set of optimal insert positions for
1379 /// moving calls in the opposite sequence.
1380 SmallPtrSet<Instruction *, 2> ReverseInsertPts;
1383 KnownSafe(false), IsRetainBlock(false),
1384 IsTailCallRelease(false), Partial(false),
1385 ReleaseMetadata(0) {}
1391 void RRInfo::clear() {
1393 IsRetainBlock = false;
1394 IsTailCallRelease = false;
1396 ReleaseMetadata = 0;
1398 ReverseInsertPts.clear();
1402 /// PtrState - This class summarizes several per-pointer runtime properties
1403 /// which are propogated through the flow graph.
1405 /// RefCount - The known minimum number of reference count increments.
1408 /// NestCount - The known minimum level of retain+release nesting.
1411 /// Seq - The current position in the sequence.
1415 /// RRI - Unidirectional information about the current sequence.
1416 /// TODO: Encapsulate this better.
1419 PtrState() : RefCount(0), NestCount(0), Seq(S_None) {}
1421 void SetAtLeastOneRefCount() {
1422 if (RefCount == 0) RefCount = 1;
1425 void IncrementRefCount() {
1426 if (RefCount != UINT_MAX) ++RefCount;
1429 void DecrementRefCount() {
1430 if (RefCount != 0) --RefCount;
1433 bool IsKnownIncremented() const {
1434 return RefCount > 0;
1437 void IncrementNestCount() {
1438 if (NestCount != UINT_MAX) ++NestCount;
1441 void DecrementNestCount() {
1442 if (NestCount != 0) --NestCount;
1445 bool IsKnownNested() const {
1446 return NestCount > 0;
1449 void SetSeq(Sequence NewSeq) {
1453 Sequence GetSeq() const {
1457 void ClearSequenceProgress() {
1462 void Merge(const PtrState &Other, bool TopDown);
1467 PtrState::Merge(const PtrState &Other, bool TopDown) {
1468 Seq = MergeSeqs(Seq, Other.Seq, TopDown);
1469 RefCount = std::min(RefCount, Other.RefCount);
1470 NestCount = std::min(NestCount, Other.NestCount);
1472 // We can't merge a plain objc_retain with an objc_retainBlock.
1473 if (RRI.IsRetainBlock != Other.RRI.IsRetainBlock)
1476 // If we're not in a sequence (anymore), drop all associated state.
1477 if (Seq == S_None) {
1479 } else if (RRI.Partial || Other.RRI.Partial) {
1480 // If we're doing a merge on a path that's previously seen a partial
1481 // merge, conservatively drop the sequence, to avoid doing partial
1482 // RR elimination. If the branch predicates for the two merge differ,
1483 // mixing them is unsafe.
1487 // Conservatively merge the ReleaseMetadata information.
1488 if (RRI.ReleaseMetadata != Other.RRI.ReleaseMetadata)
1489 RRI.ReleaseMetadata = 0;
1491 RRI.KnownSafe = RRI.KnownSafe && Other.RRI.KnownSafe;
1492 RRI.IsTailCallRelease = RRI.IsTailCallRelease && Other.RRI.IsTailCallRelease;
1493 RRI.Calls.insert(Other.RRI.Calls.begin(), Other.RRI.Calls.end());
1495 // Merge the insert point sets. If there are any differences,
1496 // that makes this a partial merge.
1497 RRI.Partial = RRI.ReverseInsertPts.size() !=
1498 Other.RRI.ReverseInsertPts.size();
1499 for (SmallPtrSet<Instruction *, 2>::const_iterator
1500 I = Other.RRI.ReverseInsertPts.begin(),
1501 E = Other.RRI.ReverseInsertPts.end(); I != E; ++I)
1502 RRI.Partial |= RRI.ReverseInsertPts.insert(*I);
1507 /// BBState - Per-BasicBlock state.
1509 /// TopDownPathCount - The number of unique control paths from the entry
1510 /// which can reach this block.
1511 unsigned TopDownPathCount;
1513 /// BottomUpPathCount - The number of unique control paths to exits
1514 /// from this block.
1515 unsigned BottomUpPathCount;
1517 /// MapTy - A type for PerPtrTopDown and PerPtrBottomUp.
1518 typedef MapVector<const Value *, PtrState> MapTy;
1520 /// PerPtrTopDown - The top-down traversal uses this to record information
1521 /// known about a pointer at the bottom of each block.
1522 MapTy PerPtrTopDown;
1524 /// PerPtrBottomUp - The bottom-up traversal uses this to record information
1525 /// known about a pointer at the top of each block.
1526 MapTy PerPtrBottomUp;
1529 BBState() : TopDownPathCount(0), BottomUpPathCount(0) {}
1531 typedef MapTy::iterator ptr_iterator;
1532 typedef MapTy::const_iterator ptr_const_iterator;
1534 ptr_iterator top_down_ptr_begin() { return PerPtrTopDown.begin(); }
1535 ptr_iterator top_down_ptr_end() { return PerPtrTopDown.end(); }
1536 ptr_const_iterator top_down_ptr_begin() const {
1537 return PerPtrTopDown.begin();
1539 ptr_const_iterator top_down_ptr_end() const {
1540 return PerPtrTopDown.end();
1543 ptr_iterator bottom_up_ptr_begin() { return PerPtrBottomUp.begin(); }
1544 ptr_iterator bottom_up_ptr_end() { return PerPtrBottomUp.end(); }
1545 ptr_const_iterator bottom_up_ptr_begin() const {
1546 return PerPtrBottomUp.begin();
1548 ptr_const_iterator bottom_up_ptr_end() const {
1549 return PerPtrBottomUp.end();
1552 /// SetAsEntry - Mark this block as being an entry block, which has one
1553 /// path from the entry by definition.
1554 void SetAsEntry() { TopDownPathCount = 1; }
1556 /// SetAsExit - Mark this block as being an exit block, which has one
1557 /// path to an exit by definition.
1558 void SetAsExit() { BottomUpPathCount = 1; }
1560 PtrState &getPtrTopDownState(const Value *Arg) {
1561 return PerPtrTopDown[Arg];
1564 PtrState &getPtrBottomUpState(const Value *Arg) {
1565 return PerPtrBottomUp[Arg];
1568 void clearBottomUpPointers() {
1569 PerPtrBottomUp.clear();
1572 void clearTopDownPointers() {
1573 PerPtrTopDown.clear();
1576 void InitFromPred(const BBState &Other);
1577 void InitFromSucc(const BBState &Other);
1578 void MergePred(const BBState &Other);
1579 void MergeSucc(const BBState &Other);
1581 /// GetAllPathCount - Return the number of possible unique paths from an
1582 /// entry to an exit which pass through this block. This is only valid
1583 /// after both the top-down and bottom-up traversals are complete.
1584 unsigned GetAllPathCount() const {
1585 return TopDownPathCount * BottomUpPathCount;
1588 /// IsVisitedTopDown - Test whether the block for this BBState has been
1589 /// visited by the top-down portion of the algorithm.
1590 bool isVisitedTopDown() const {
1591 return TopDownPathCount != 0;
1596 void BBState::InitFromPred(const BBState &Other) {
1597 PerPtrTopDown = Other.PerPtrTopDown;
1598 TopDownPathCount = Other.TopDownPathCount;
1601 void BBState::InitFromSucc(const BBState &Other) {
1602 PerPtrBottomUp = Other.PerPtrBottomUp;
1603 BottomUpPathCount = Other.BottomUpPathCount;
1606 /// MergePred - The top-down traversal uses this to merge information about
1607 /// predecessors to form the initial state for a new block.
1608 void BBState::MergePred(const BBState &Other) {
1609 // Other.TopDownPathCount can be 0, in which case it is either dead or a
1610 // loop backedge. Loop backedges are special.
1611 TopDownPathCount += Other.TopDownPathCount;
1613 // For each entry in the other set, if our set has an entry with the same key,
1614 // merge the entries. Otherwise, copy the entry and merge it with an empty
1616 for (ptr_const_iterator MI = Other.top_down_ptr_begin(),
1617 ME = Other.top_down_ptr_end(); MI != ME; ++MI) {
1618 std::pair<ptr_iterator, bool> Pair = PerPtrTopDown.insert(*MI);
1619 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
1623 // For each entry in our set, if the other set doesn't have an entry with the
1624 // same key, force it to merge with an empty entry.
1625 for (ptr_iterator MI = top_down_ptr_begin(),
1626 ME = top_down_ptr_end(); MI != ME; ++MI)
1627 if (Other.PerPtrTopDown.find(MI->first) == Other.PerPtrTopDown.end())
1628 MI->second.Merge(PtrState(), /*TopDown=*/true);
1631 /// MergeSucc - The bottom-up traversal uses this to merge information about
1632 /// successors to form the initial state for a new block.
1633 void BBState::MergeSucc(const BBState &Other) {
1634 // Other.BottomUpPathCount can be 0, in which case it is either dead or a
1635 // loop backedge. Loop backedges are special.
1636 BottomUpPathCount += Other.BottomUpPathCount;
1638 // For each entry in the other set, if our set has an entry with the
1639 // same key, merge the entries. Otherwise, copy the entry and merge
1640 // it with an empty entry.
1641 for (ptr_const_iterator MI = Other.bottom_up_ptr_begin(),
1642 ME = Other.bottom_up_ptr_end(); MI != ME; ++MI) {
1643 std::pair<ptr_iterator, bool> Pair = PerPtrBottomUp.insert(*MI);
1644 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
1648 // For each entry in our set, if the other set doesn't have an entry
1649 // with the same key, force it to merge with an empty entry.
1650 for (ptr_iterator MI = bottom_up_ptr_begin(),
1651 ME = bottom_up_ptr_end(); MI != ME; ++MI)
1652 if (Other.PerPtrBottomUp.find(MI->first) == Other.PerPtrBottomUp.end())
1653 MI->second.Merge(PtrState(), /*TopDown=*/false);
1657 /// ObjCARCOpt - The main ARC optimization pass.
1658 class ObjCARCOpt : public FunctionPass {
1660 ProvenanceAnalysis PA;
1662 /// Run - A flag indicating whether this optimization pass should run.
1665 /// RetainRVCallee, etc. - Declarations for ObjC runtime
1666 /// functions, for use in creating calls to them. These are initialized
1667 /// lazily to avoid cluttering up the Module with unused declarations.
1668 Constant *RetainRVCallee, *AutoreleaseRVCallee, *ReleaseCallee,
1669 *RetainCallee, *RetainBlockCallee, *AutoreleaseCallee;
1671 /// UsedInThisFunciton - Flags which determine whether each of the
1672 /// interesting runtine functions is in fact used in the current function.
1673 unsigned UsedInThisFunction;
1675 /// ImpreciseReleaseMDKind - The Metadata Kind for clang.imprecise_release
1677 unsigned ImpreciseReleaseMDKind;
1679 /// CopyOnEscapeMDKind - The Metadata Kind for clang.arc.copy_on_escape
1681 unsigned CopyOnEscapeMDKind;
1683 /// NoObjCARCExceptionsMDKind - The Metadata Kind for
1684 /// clang.arc.no_objc_arc_exceptions metadata.
1685 unsigned NoObjCARCExceptionsMDKind;
1687 Constant *getRetainRVCallee(Module *M);
1688 Constant *getAutoreleaseRVCallee(Module *M);
1689 Constant *getReleaseCallee(Module *M);
1690 Constant *getRetainCallee(Module *M);
1691 Constant *getRetainBlockCallee(Module *M);
1692 Constant *getAutoreleaseCallee(Module *M);
1694 bool IsRetainBlockOptimizable(const Instruction *Inst);
1696 void OptimizeRetainCall(Function &F, Instruction *Retain);
1697 bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV);
1698 void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV);
1699 void OptimizeIndividualCalls(Function &F);
1701 void CheckForCFGHazards(const BasicBlock *BB,
1702 DenseMap<const BasicBlock *, BBState> &BBStates,
1703 BBState &MyStates) const;
1704 bool VisitInstructionBottomUp(Instruction *Inst,
1706 MapVector<Value *, RRInfo> &Retains,
1708 bool VisitBottomUp(BasicBlock *BB,
1709 DenseMap<const BasicBlock *, BBState> &BBStates,
1710 MapVector<Value *, RRInfo> &Retains);
1711 bool VisitInstructionTopDown(Instruction *Inst,
1712 DenseMap<Value *, RRInfo> &Releases,
1714 bool VisitTopDown(BasicBlock *BB,
1715 DenseMap<const BasicBlock *, BBState> &BBStates,
1716 DenseMap<Value *, RRInfo> &Releases);
1717 bool Visit(Function &F,
1718 DenseMap<const BasicBlock *, BBState> &BBStates,
1719 MapVector<Value *, RRInfo> &Retains,
1720 DenseMap<Value *, RRInfo> &Releases);
1722 void MoveCalls(Value *Arg, RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
1723 MapVector<Value *, RRInfo> &Retains,
1724 DenseMap<Value *, RRInfo> &Releases,
1725 SmallVectorImpl<Instruction *> &DeadInsts,
1728 bool PerformCodePlacement(DenseMap<const BasicBlock *, BBState> &BBStates,
1729 MapVector<Value *, RRInfo> &Retains,
1730 DenseMap<Value *, RRInfo> &Releases,
1733 void OptimizeWeakCalls(Function &F);
1735 bool OptimizeSequences(Function &F);
1737 void OptimizeReturns(Function &F);
1739 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
1740 virtual bool doInitialization(Module &M);
1741 virtual bool runOnFunction(Function &F);
1742 virtual void releaseMemory();
1746 ObjCARCOpt() : FunctionPass(ID) {
1747 initializeObjCARCOptPass(*PassRegistry::getPassRegistry());
1752 char ObjCARCOpt::ID = 0;
1753 INITIALIZE_PASS_BEGIN(ObjCARCOpt,
1754 "objc-arc", "ObjC ARC optimization", false, false)
1755 INITIALIZE_PASS_DEPENDENCY(ObjCARCAliasAnalysis)
1756 INITIALIZE_PASS_END(ObjCARCOpt,
1757 "objc-arc", "ObjC ARC optimization", false, false)
1759 Pass *llvm::createObjCARCOptPass() {
1760 return new ObjCARCOpt();
1763 void ObjCARCOpt::getAnalysisUsage(AnalysisUsage &AU) const {
1764 AU.addRequired<ObjCARCAliasAnalysis>();
1765 AU.addRequired<AliasAnalysis>();
1766 // ARC optimization doesn't currently split critical edges.
1767 AU.setPreservesCFG();
1770 bool ObjCARCOpt::IsRetainBlockOptimizable(const Instruction *Inst) {
1771 // Without the magic metadata tag, we have to assume this might be an
1772 // objc_retainBlock call inserted to convert a block pointer to an id,
1773 // in which case it really is needed.
1774 if (!Inst->getMetadata(CopyOnEscapeMDKind))
1777 // If the pointer "escapes" (not including being used in a call),
1778 // the copy may be needed.
1779 if (DoesObjCBlockEscape(Inst))
1782 // Otherwise, it's not needed.
1786 Constant *ObjCARCOpt::getRetainRVCallee(Module *M) {
1787 if (!RetainRVCallee) {
1788 LLVMContext &C = M->getContext();
1789 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
1790 std::vector<Type *> Params;
1791 Params.push_back(I8X);
1793 FunctionType::get(I8X, Params, /*isVarArg=*/false);
1794 AttrListPtr Attributes;
1795 Attributes.addAttr(~0u, Attribute::NoUnwind);
1797 M->getOrInsertFunction("objc_retainAutoreleasedReturnValue", FTy,
1800 return RetainRVCallee;
1803 Constant *ObjCARCOpt::getAutoreleaseRVCallee(Module *M) {
1804 if (!AutoreleaseRVCallee) {
1805 LLVMContext &C = M->getContext();
1806 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
1807 std::vector<Type *> Params;
1808 Params.push_back(I8X);
1810 FunctionType::get(I8X, Params, /*isVarArg=*/false);
1811 AttrListPtr Attributes;
1812 Attributes.addAttr(~0u, Attribute::NoUnwind);
1813 AutoreleaseRVCallee =
1814 M->getOrInsertFunction("objc_autoreleaseReturnValue", FTy,
1817 return AutoreleaseRVCallee;
1820 Constant *ObjCARCOpt::getReleaseCallee(Module *M) {
1821 if (!ReleaseCallee) {
1822 LLVMContext &C = M->getContext();
1823 std::vector<Type *> Params;
1824 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1825 AttrListPtr Attributes;
1826 Attributes.addAttr(~0u, Attribute::NoUnwind);
1828 M->getOrInsertFunction(
1830 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
1833 return ReleaseCallee;
1836 Constant *ObjCARCOpt::getRetainCallee(Module *M) {
1837 if (!RetainCallee) {
1838 LLVMContext &C = M->getContext();
1839 std::vector<Type *> Params;
1840 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1841 AttrListPtr Attributes;
1842 Attributes.addAttr(~0u, Attribute::NoUnwind);
1844 M->getOrInsertFunction(
1846 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1849 return RetainCallee;
1852 Constant *ObjCARCOpt::getRetainBlockCallee(Module *M) {
1853 if (!RetainBlockCallee) {
1854 LLVMContext &C = M->getContext();
1855 std::vector<Type *> Params;
1856 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1857 AttrListPtr Attributes;
1858 // objc_retainBlock is not nounwind because it calls user copy constructors
1859 // which could theoretically throw.
1861 M->getOrInsertFunction(
1863 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1866 return RetainBlockCallee;
1869 Constant *ObjCARCOpt::getAutoreleaseCallee(Module *M) {
1870 if (!AutoreleaseCallee) {
1871 LLVMContext &C = M->getContext();
1872 std::vector<Type *> Params;
1873 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1874 AttrListPtr Attributes;
1875 Attributes.addAttr(~0u, Attribute::NoUnwind);
1877 M->getOrInsertFunction(
1879 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1882 return AutoreleaseCallee;
1885 /// CanAlterRefCount - Test whether the given instruction can result in a
1886 /// reference count modification (positive or negative) for the pointer's
1889 CanAlterRefCount(const Instruction *Inst, const Value *Ptr,
1890 ProvenanceAnalysis &PA, InstructionClass Class) {
1892 case IC_Autorelease:
1893 case IC_AutoreleaseRV:
1895 // These operations never directly modify a reference count.
1900 ImmutableCallSite CS = static_cast<const Value *>(Inst);
1901 assert(CS && "Only calls can alter reference counts!");
1903 // See if AliasAnalysis can help us with the call.
1904 AliasAnalysis::ModRefBehavior MRB = PA.getAA()->getModRefBehavior(CS);
1905 if (AliasAnalysis::onlyReadsMemory(MRB))
1907 if (AliasAnalysis::onlyAccessesArgPointees(MRB)) {
1908 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
1910 const Value *Op = *I;
1911 if (IsPotentialUse(Op) && PA.related(Ptr, Op))
1917 // Assume the worst.
1921 /// CanUse - Test whether the given instruction can "use" the given pointer's
1922 /// object in a way that requires the reference count to be positive.
1924 CanUse(const Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA,
1925 InstructionClass Class) {
1926 // IC_Call operations (as opposed to IC_CallOrUser) never "use" objc pointers.
1927 if (Class == IC_Call)
1930 // Consider various instructions which may have pointer arguments which are
1932 if (const ICmpInst *ICI = dyn_cast<ICmpInst>(Inst)) {
1933 // Comparing a pointer with null, or any other constant, isn't really a use,
1934 // because we don't care what the pointer points to, or about the values
1935 // of any other dynamic reference-counted pointers.
1936 if (!IsPotentialUse(ICI->getOperand(1)))
1938 } else if (ImmutableCallSite CS = static_cast<const Value *>(Inst)) {
1939 // For calls, just check the arguments (and not the callee operand).
1940 for (ImmutableCallSite::arg_iterator OI = CS.arg_begin(),
1941 OE = CS.arg_end(); OI != OE; ++OI) {
1942 const Value *Op = *OI;
1943 if (IsPotentialUse(Op) && PA.related(Ptr, Op))
1947 } else if (const StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
1948 // Special-case stores, because we don't care about the stored value, just
1949 // the store address.
1950 const Value *Op = GetUnderlyingObjCPtr(SI->getPointerOperand());
1951 // If we can't tell what the underlying object was, assume there is a
1953 return IsPotentialUse(Op) && PA.related(Op, Ptr);
1956 // Check each operand for a match.
1957 for (User::const_op_iterator OI = Inst->op_begin(), OE = Inst->op_end();
1959 const Value *Op = *OI;
1960 if (IsPotentialUse(Op) && PA.related(Ptr, Op))
1966 /// CanInterruptRV - Test whether the given instruction can autorelease
1967 /// any pointer or cause an autoreleasepool pop.
1969 CanInterruptRV(InstructionClass Class) {
1971 case IC_AutoreleasepoolPop:
1974 case IC_Autorelease:
1975 case IC_AutoreleaseRV:
1976 case IC_FusedRetainAutorelease:
1977 case IC_FusedRetainAutoreleaseRV:
1985 /// DependenceKind - There are several kinds of dependence-like concepts in
1987 enum DependenceKind {
1988 NeedsPositiveRetainCount,
1989 AutoreleasePoolBoundary,
1990 CanChangeRetainCount,
1991 RetainAutoreleaseDep, ///< Blocks objc_retainAutorelease.
1992 RetainAutoreleaseRVDep, ///< Blocks objc_retainAutoreleaseReturnValue.
1993 RetainRVDep ///< Blocks objc_retainAutoreleasedReturnValue.
1997 /// Depends - Test if there can be dependencies on Inst through Arg. This
1998 /// function only tests dependencies relevant for removing pairs of calls.
2000 Depends(DependenceKind Flavor, Instruction *Inst, const Value *Arg,
2001 ProvenanceAnalysis &PA) {
2002 // If we've reached the definition of Arg, stop.
2007 case NeedsPositiveRetainCount: {
2008 InstructionClass Class = GetInstructionClass(Inst);
2010 case IC_AutoreleasepoolPop:
2011 case IC_AutoreleasepoolPush:
2015 return CanUse(Inst, Arg, PA, Class);
2019 case AutoreleasePoolBoundary: {
2020 InstructionClass Class = GetInstructionClass(Inst);
2022 case IC_AutoreleasepoolPop:
2023 case IC_AutoreleasepoolPush:
2024 // These mark the end and begin of an autorelease pool scope.
2027 // Nothing else does this.
2032 case CanChangeRetainCount: {
2033 InstructionClass Class = GetInstructionClass(Inst);
2035 case IC_AutoreleasepoolPop:
2036 // Conservatively assume this can decrement any count.
2038 case IC_AutoreleasepoolPush:
2042 return CanAlterRefCount(Inst, Arg, PA, Class);
2046 case RetainAutoreleaseDep:
2047 switch (GetBasicInstructionClass(Inst)) {
2048 case IC_AutoreleasepoolPop:
2049 case IC_AutoreleasepoolPush:
2050 // Don't merge an objc_autorelease with an objc_retain inside a different
2051 // autoreleasepool scope.
2055 // Check for a retain of the same pointer for merging.
2056 return GetObjCArg(Inst) == Arg;
2058 // Nothing else matters for objc_retainAutorelease formation.
2062 case RetainAutoreleaseRVDep: {
2063 InstructionClass Class = GetBasicInstructionClass(Inst);
2067 // Check for a retain of the same pointer for merging.
2068 return GetObjCArg(Inst) == Arg;
2070 // Anything that can autorelease interrupts
2071 // retainAutoreleaseReturnValue formation.
2072 return CanInterruptRV(Class);
2077 return CanInterruptRV(GetBasicInstructionClass(Inst));
2080 llvm_unreachable("Invalid dependence flavor");
2083 /// FindDependencies - Walk up the CFG from StartPos (which is in StartBB) and
2084 /// find local and non-local dependencies on Arg.
2085 /// TODO: Cache results?
2087 FindDependencies(DependenceKind Flavor,
2089 BasicBlock *StartBB, Instruction *StartInst,
2090 SmallPtrSet<Instruction *, 4> &DependingInstructions,
2091 SmallPtrSet<const BasicBlock *, 4> &Visited,
2092 ProvenanceAnalysis &PA) {
2093 BasicBlock::iterator StartPos = StartInst;
2095 SmallVector<std::pair<BasicBlock *, BasicBlock::iterator>, 4> Worklist;
2096 Worklist.push_back(std::make_pair(StartBB, StartPos));
2098 std::pair<BasicBlock *, BasicBlock::iterator> Pair =
2099 Worklist.pop_back_val();
2100 BasicBlock *LocalStartBB = Pair.first;
2101 BasicBlock::iterator LocalStartPos = Pair.second;
2102 BasicBlock::iterator StartBBBegin = LocalStartBB->begin();
2104 if (LocalStartPos == StartBBBegin) {
2105 pred_iterator PI(LocalStartBB), PE(LocalStartBB, false);
2107 // If we've reached the function entry, produce a null dependence.
2108 DependingInstructions.insert(0);
2110 // Add the predecessors to the worklist.
2112 BasicBlock *PredBB = *PI;
2113 if (Visited.insert(PredBB))
2114 Worklist.push_back(std::make_pair(PredBB, PredBB->end()));
2115 } while (++PI != PE);
2119 Instruction *Inst = --LocalStartPos;
2120 if (Depends(Flavor, Inst, Arg, PA)) {
2121 DependingInstructions.insert(Inst);
2125 } while (!Worklist.empty());
2127 // Determine whether the original StartBB post-dominates all of the blocks we
2128 // visited. If not, insert a sentinal indicating that most optimizations are
2130 for (SmallPtrSet<const BasicBlock *, 4>::const_iterator I = Visited.begin(),
2131 E = Visited.end(); I != E; ++I) {
2132 const BasicBlock *BB = *I;
2135 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2136 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
2137 const BasicBlock *Succ = *SI;
2138 if (Succ != StartBB && !Visited.count(Succ)) {
2139 DependingInstructions.insert(reinterpret_cast<Instruction *>(-1));
2146 static bool isNullOrUndef(const Value *V) {
2147 return isa<ConstantPointerNull>(V) || isa<UndefValue>(V);
2150 static bool isNoopInstruction(const Instruction *I) {
2151 return isa<BitCastInst>(I) ||
2152 (isa<GetElementPtrInst>(I) &&
2153 cast<GetElementPtrInst>(I)->hasAllZeroIndices());
2156 /// OptimizeRetainCall - Turn objc_retain into
2157 /// objc_retainAutoreleasedReturnValue if the operand is a return value.
2159 ObjCARCOpt::OptimizeRetainCall(Function &F, Instruction *Retain) {
2160 CallSite CS(GetObjCArg(Retain));
2161 Instruction *Call = CS.getInstruction();
2163 if (Call->getParent() != Retain->getParent()) return;
2165 // Check that the call is next to the retain.
2166 BasicBlock::iterator I = Call;
2168 while (isNoopInstruction(I)) ++I;
2172 // Turn it to an objc_retainAutoreleasedReturnValue..
2175 cast<CallInst>(Retain)->setCalledFunction(getRetainRVCallee(F.getParent()));
2178 /// OptimizeRetainRVCall - Turn objc_retainAutoreleasedReturnValue into
2179 /// objc_retain if the operand is not a return value. Or, if it can be
2180 /// paired with an objc_autoreleaseReturnValue, delete the pair and
2183 ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
2184 // Check for the argument being from an immediately preceding call or invoke.
2185 Value *Arg = GetObjCArg(RetainRV);
2187 if (Instruction *Call = CS.getInstruction()) {
2188 if (Call->getParent() == RetainRV->getParent()) {
2189 BasicBlock::iterator I = Call;
2191 while (isNoopInstruction(I)) ++I;
2192 if (&*I == RetainRV)
2194 } else if (InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
2195 BasicBlock *RetainRVParent = RetainRV->getParent();
2196 if (II->getNormalDest() == RetainRVParent) {
2197 BasicBlock::iterator I = RetainRVParent->begin();
2198 while (isNoopInstruction(I)) ++I;
2199 if (&*I == RetainRV)
2205 // Check for being preceded by an objc_autoreleaseReturnValue on the same
2206 // pointer. In this case, we can delete the pair.
2207 BasicBlock::iterator I = RetainRV, Begin = RetainRV->getParent()->begin();
2209 do --I; while (I != Begin && isNoopInstruction(I));
2210 if (GetBasicInstructionClass(I) == IC_AutoreleaseRV &&
2211 GetObjCArg(I) == Arg) {
2214 EraseInstruction(I);
2215 EraseInstruction(RetainRV);
2220 // Turn it to a plain objc_retain.
2223 cast<CallInst>(RetainRV)->setCalledFunction(getRetainCallee(F.getParent()));
2227 /// OptimizeAutoreleaseRVCall - Turn objc_autoreleaseReturnValue into
2228 /// objc_autorelease if the result is not used as a return value.
2230 ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV) {
2231 // Check for a return of the pointer value.
2232 const Value *Ptr = GetObjCArg(AutoreleaseRV);
2233 SmallVector<const Value *, 2> Users;
2234 Users.push_back(Ptr);
2236 Ptr = Users.pop_back_val();
2237 for (Value::const_use_iterator UI = Ptr->use_begin(), UE = Ptr->use_end();
2239 const User *I = *UI;
2240 if (isa<ReturnInst>(I) || GetBasicInstructionClass(I) == IC_RetainRV)
2242 if (isa<BitCastInst>(I))
2245 } while (!Users.empty());
2249 cast<CallInst>(AutoreleaseRV)->
2250 setCalledFunction(getAutoreleaseCallee(F.getParent()));
2253 /// OptimizeIndividualCalls - Visit each call, one at a time, and make
2254 /// simplifications without doing any additional analysis.
2255 void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
2256 // Reset all the flags in preparation for recomputing them.
2257 UsedInThisFunction = 0;
2259 // Visit all objc_* calls in F.
2260 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
2261 Instruction *Inst = &*I++;
2262 InstructionClass Class = GetBasicInstructionClass(Inst);
2267 // Delete no-op casts. These function calls have special semantics, but
2268 // the semantics are entirely implemented via lowering in the front-end,
2269 // so by the time they reach the optimizer, they are just no-op calls
2270 // which return their argument.
2272 // There are gray areas here, as the ability to cast reference-counted
2273 // pointers to raw void* and back allows code to break ARC assumptions,
2274 // however these are currently considered to be unimportant.
2278 EraseInstruction(Inst);
2281 // If the pointer-to-weak-pointer is null, it's undefined behavior.
2284 case IC_LoadWeakRetained:
2286 case IC_DestroyWeak: {
2287 CallInst *CI = cast<CallInst>(Inst);
2288 if (isNullOrUndef(CI->getArgOperand(0))) {
2290 Type *Ty = CI->getArgOperand(0)->getType();
2291 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
2292 Constant::getNullValue(Ty),
2294 CI->replaceAllUsesWith(UndefValue::get(CI->getType()));
2295 CI->eraseFromParent();
2302 CallInst *CI = cast<CallInst>(Inst);
2303 if (isNullOrUndef(CI->getArgOperand(0)) ||
2304 isNullOrUndef(CI->getArgOperand(1))) {
2306 Type *Ty = CI->getArgOperand(0)->getType();
2307 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
2308 Constant::getNullValue(Ty),
2310 CI->replaceAllUsesWith(UndefValue::get(CI->getType()));
2311 CI->eraseFromParent();
2317 OptimizeRetainCall(F, Inst);
2320 if (OptimizeRetainRVCall(F, Inst))
2323 case IC_AutoreleaseRV:
2324 OptimizeAutoreleaseRVCall(F, Inst);
2328 // objc_autorelease(x) -> objc_release(x) if x is otherwise unused.
2329 if (IsAutorelease(Class) && Inst->use_empty()) {
2330 CallInst *Call = cast<CallInst>(Inst);
2331 const Value *Arg = Call->getArgOperand(0);
2332 Arg = FindSingleUseIdentifiedObject(Arg);
2337 // Create the declaration lazily.
2338 LLVMContext &C = Inst->getContext();
2340 CallInst::Create(getReleaseCallee(F.getParent()),
2341 Call->getArgOperand(0), "", Call);
2342 NewCall->setMetadata(ImpreciseReleaseMDKind,
2343 MDNode::get(C, ArrayRef<Value *>()));
2344 EraseInstruction(Call);
2350 // For functions which can never be passed stack arguments, add
2352 if (IsAlwaysTail(Class)) {
2354 cast<CallInst>(Inst)->setTailCall();
2357 // Set nounwind as needed.
2358 if (IsNoThrow(Class)) {
2360 cast<CallInst>(Inst)->setDoesNotThrow();
2363 if (!IsNoopOnNull(Class)) {
2364 UsedInThisFunction |= 1 << Class;
2368 const Value *Arg = GetObjCArg(Inst);
2370 // ARC calls with null are no-ops. Delete them.
2371 if (isNullOrUndef(Arg)) {
2374 EraseInstruction(Inst);
2378 // Keep track of which of retain, release, autorelease, and retain_block
2379 // are actually present in this function.
2380 UsedInThisFunction |= 1 << Class;
2382 // If Arg is a PHI, and one or more incoming values to the
2383 // PHI are null, and the call is control-equivalent to the PHI, and there
2384 // are no relevant side effects between the PHI and the call, the call
2385 // could be pushed up to just those paths with non-null incoming values.
2386 // For now, don't bother splitting critical edges for this.
2387 SmallVector<std::pair<Instruction *, const Value *>, 4> Worklist;
2388 Worklist.push_back(std::make_pair(Inst, Arg));
2390 std::pair<Instruction *, const Value *> Pair = Worklist.pop_back_val();
2394 const PHINode *PN = dyn_cast<PHINode>(Arg);
2397 // Determine if the PHI has any null operands, or any incoming
2399 bool HasNull = false;
2400 bool HasCriticalEdges = false;
2401 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
2403 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
2404 if (isNullOrUndef(Incoming))
2406 else if (cast<TerminatorInst>(PN->getIncomingBlock(i)->back())
2407 .getNumSuccessors() != 1) {
2408 HasCriticalEdges = true;
2412 // If we have null operands and no critical edges, optimize.
2413 if (!HasCriticalEdges && HasNull) {
2414 SmallPtrSet<Instruction *, 4> DependingInstructions;
2415 SmallPtrSet<const BasicBlock *, 4> Visited;
2417 // Check that there is nothing that cares about the reference
2418 // count between the call and the phi.
2421 case IC_RetainBlock:
2422 // These can always be moved up.
2425 // These can't be moved across things that care about the retain count.
2426 FindDependencies(NeedsPositiveRetainCount, Arg,
2427 Inst->getParent(), Inst,
2428 DependingInstructions, Visited, PA);
2430 case IC_Autorelease:
2431 // These can't be moved across autorelease pool scope boundaries.
2432 FindDependencies(AutoreleasePoolBoundary, Arg,
2433 Inst->getParent(), Inst,
2434 DependingInstructions, Visited, PA);
2437 case IC_AutoreleaseRV:
2438 // Don't move these; the RV optimization depends on the autoreleaseRV
2439 // being tail called, and the retainRV being immediately after a call
2440 // (which might still happen if we get lucky with codegen layout, but
2441 // it's not worth taking the chance).
2444 llvm_unreachable("Invalid dependence flavor");
2447 if (DependingInstructions.size() == 1 &&
2448 *DependingInstructions.begin() == PN) {
2451 // Clone the call into each predecessor that has a non-null value.
2452 CallInst *CInst = cast<CallInst>(Inst);
2453 Type *ParamTy = CInst->getArgOperand(0)->getType();
2454 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
2456 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
2457 if (!isNullOrUndef(Incoming)) {
2458 CallInst *Clone = cast<CallInst>(CInst->clone());
2459 Value *Op = PN->getIncomingValue(i);
2460 Instruction *InsertPos = &PN->getIncomingBlock(i)->back();
2461 if (Op->getType() != ParamTy)
2462 Op = new BitCastInst(Op, ParamTy, "", InsertPos);
2463 Clone->setArgOperand(0, Op);
2464 Clone->insertBefore(InsertPos);
2465 Worklist.push_back(std::make_pair(Clone, Incoming));
2468 // Erase the original call.
2469 EraseInstruction(CInst);
2473 } while (!Worklist.empty());
2477 /// CheckForCFGHazards - Check for critical edges, loop boundaries, irreducible
2478 /// control flow, or other CFG structures where moving code across the edge
2479 /// would result in it being executed more.
2481 ObjCARCOpt::CheckForCFGHazards(const BasicBlock *BB,
2482 DenseMap<const BasicBlock *, BBState> &BBStates,
2483 BBState &MyStates) const {
2484 // If any top-down local-use or possible-dec has a succ which is earlier in
2485 // the sequence, forget it.
2486 for (BBState::ptr_iterator I = MyStates.top_down_ptr_begin(),
2487 E = MyStates.top_down_ptr_end(); I != E; ++I)
2488 switch (I->second.GetSeq()) {
2491 const Value *Arg = I->first;
2492 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2493 bool SomeSuccHasSame = false;
2494 bool AllSuccsHaveSame = true;
2495 PtrState &S = I->second;
2496 succ_const_iterator SI(TI), SE(TI, false);
2498 // If the terminator is an invoke marked with the
2499 // clang.arc.no_objc_arc_exceptions metadata, the unwind edge can be
2500 // ignored, for ARC purposes.
2501 if (isa<InvokeInst>(TI) && TI->getMetadata(NoObjCARCExceptionsMDKind))
2504 for (; SI != SE; ++SI) {
2505 Sequence SuccSSeq = S_None;
2506 bool SuccSRRIKnownSafe = false;
2507 // If VisitBottomUp has visited this successor, take what we know about it.
2508 DenseMap<const BasicBlock *, BBState>::iterator BBI = BBStates.find(*SI);
2509 if (BBI != BBStates.end()) {
2510 const PtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
2511 SuccSSeq = SuccS.GetSeq();
2512 SuccSRRIKnownSafe = SuccS.RRI.KnownSafe;
2516 case S_CanRelease: {
2517 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe) {
2518 S.ClearSequenceProgress();
2524 SomeSuccHasSame = true;
2528 case S_MovableRelease:
2529 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe)
2530 AllSuccsHaveSame = false;
2533 llvm_unreachable("bottom-up pointer in retain state!");
2536 // If the state at the other end of any of the successor edges
2537 // matches the current state, require all edges to match. This
2538 // guards against loops in the middle of a sequence.
2539 if (SomeSuccHasSame && !AllSuccsHaveSame)
2540 S.ClearSequenceProgress();
2543 case S_CanRelease: {
2544 const Value *Arg = I->first;
2545 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2546 bool SomeSuccHasSame = false;
2547 bool AllSuccsHaveSame = true;
2548 PtrState &S = I->second;
2549 succ_const_iterator SI(TI), SE(TI, false);
2551 // If the terminator is an invoke marked with the
2552 // clang.arc.no_objc_arc_exceptions metadata, the unwind edge can be
2553 // ignored, for ARC purposes.
2554 if (isa<InvokeInst>(TI) && TI->getMetadata(NoObjCARCExceptionsMDKind))
2557 for (; SI != SE; ++SI) {
2558 Sequence SuccSSeq = S_None;
2559 bool SuccSRRIKnownSafe = false;
2560 // If VisitBottomUp has visited this successor, take what we know about it.
2561 DenseMap<const BasicBlock *, BBState>::iterator BBI = BBStates.find(*SI);
2562 if (BBI != BBStates.end()) {
2563 const PtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
2564 SuccSSeq = SuccS.GetSeq();
2565 SuccSRRIKnownSafe = SuccS.RRI.KnownSafe;
2569 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe) {
2570 S.ClearSequenceProgress();
2576 SomeSuccHasSame = true;
2580 case S_MovableRelease:
2582 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe)
2583 AllSuccsHaveSame = false;
2586 llvm_unreachable("bottom-up pointer in retain state!");
2589 // If the state at the other end of any of the successor edges
2590 // matches the current state, require all edges to match. This
2591 // guards against loops in the middle of a sequence.
2592 if (SomeSuccHasSame && !AllSuccsHaveSame)
2593 S.ClearSequenceProgress();
2600 ObjCARCOpt::VisitInstructionBottomUp(Instruction *Inst,
2602 MapVector<Value *, RRInfo> &Retains,
2603 BBState &MyStates) {
2604 bool NestingDetected = false;
2605 InstructionClass Class = GetInstructionClass(Inst);
2606 const Value *Arg = 0;
2610 Arg = GetObjCArg(Inst);
2612 PtrState &S = MyStates.getPtrBottomUpState(Arg);
2614 // If we see two releases in a row on the same pointer. If so, make
2615 // a note, and we'll cicle back to revisit it after we've
2616 // hopefully eliminated the second release, which may allow us to
2617 // eliminate the first release too.
2618 // Theoretically we could implement removal of nested retain+release
2619 // pairs by making PtrState hold a stack of states, but this is
2620 // simple and avoids adding overhead for the non-nested case.
2621 if (S.GetSeq() == S_Release || S.GetSeq() == S_MovableRelease)
2622 NestingDetected = true;
2626 MDNode *ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
2627 S.SetSeq(ReleaseMetadata ? S_MovableRelease : S_Release);
2628 S.RRI.ReleaseMetadata = ReleaseMetadata;
2629 S.RRI.KnownSafe = S.IsKnownNested() || S.IsKnownIncremented();
2630 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
2631 S.RRI.Calls.insert(Inst);
2633 S.IncrementRefCount();
2634 S.IncrementNestCount();
2637 case IC_RetainBlock:
2638 // An objc_retainBlock call with just a use may need to be kept,
2639 // because it may be copying a block from the stack to the heap.
2640 if (!IsRetainBlockOptimizable(Inst))
2645 Arg = GetObjCArg(Inst);
2647 PtrState &S = MyStates.getPtrBottomUpState(Arg);
2648 S.DecrementRefCount();
2649 S.SetAtLeastOneRefCount();
2650 S.DecrementNestCount();
2652 switch (S.GetSeq()) {
2655 case S_MovableRelease:
2657 S.RRI.ReverseInsertPts.clear();
2660 // Don't do retain+release tracking for IC_RetainRV, because it's
2661 // better to let it remain as the first instruction after a call.
2662 if (Class != IC_RetainRV) {
2663 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
2664 Retains[Inst] = S.RRI;
2666 S.ClearSequenceProgress();
2671 llvm_unreachable("bottom-up pointer in retain state!");
2673 return NestingDetected;
2675 case IC_AutoreleasepoolPop:
2676 // Conservatively, clear MyStates for all known pointers.
2677 MyStates.clearBottomUpPointers();
2678 return NestingDetected;
2679 case IC_AutoreleasepoolPush:
2681 // These are irrelevant.
2682 return NestingDetected;
2687 // Consider any other possible effects of this instruction on each
2688 // pointer being tracked.
2689 for (BBState::ptr_iterator MI = MyStates.bottom_up_ptr_begin(),
2690 ME = MyStates.bottom_up_ptr_end(); MI != ME; ++MI) {
2691 const Value *Ptr = MI->first;
2693 continue; // Handled above.
2694 PtrState &S = MI->second;
2695 Sequence Seq = S.GetSeq();
2697 // Check for possible releases.
2698 if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
2699 S.DecrementRefCount();
2702 S.SetSeq(S_CanRelease);
2706 case S_MovableRelease:
2711 llvm_unreachable("bottom-up pointer in retain state!");
2715 // Check for possible direct uses.
2718 case S_MovableRelease:
2719 if (CanUse(Inst, Ptr, PA, Class)) {
2720 assert(S.RRI.ReverseInsertPts.empty());
2721 // If this is an invoke instruction, we're scanning it as part of
2722 // one of its successor blocks, since we can't insert code after it
2723 // in its own block, and we don't want to split critical edges.
2724 if (isa<InvokeInst>(Inst))
2725 S.RRI.ReverseInsertPts.insert(BB->getFirstInsertionPt());
2727 S.RRI.ReverseInsertPts.insert(llvm::next(BasicBlock::iterator(Inst)));
2729 } else if (Seq == S_Release &&
2730 (Class == IC_User || Class == IC_CallOrUser)) {
2731 // Non-movable releases depend on any possible objc pointer use.
2733 assert(S.RRI.ReverseInsertPts.empty());
2734 // As above; handle invoke specially.
2735 if (isa<InvokeInst>(Inst))
2736 S.RRI.ReverseInsertPts.insert(BB->getFirstInsertionPt());
2738 S.RRI.ReverseInsertPts.insert(llvm::next(BasicBlock::iterator(Inst)));
2742 if (CanUse(Inst, Ptr, PA, Class))
2750 llvm_unreachable("bottom-up pointer in retain state!");
2754 return NestingDetected;
2758 ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
2759 DenseMap<const BasicBlock *, BBState> &BBStates,
2760 MapVector<Value *, RRInfo> &Retains) {
2761 bool NestingDetected = false;
2762 BBState &MyStates = BBStates[BB];
2764 // Merge the states from each successor to compute the initial state
2765 // for the current block.
2766 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2767 succ_const_iterator SI(TI), SE(TI, false);
2769 MyStates.SetAsExit();
2771 // If the terminator is an invoke marked with the
2772 // clang.arc.no_objc_arc_exceptions metadata, the unwind edge can be
2773 // ignored, for ARC purposes.
2774 if (isa<InvokeInst>(TI) && TI->getMetadata(NoObjCARCExceptionsMDKind))
2778 const BasicBlock *Succ = *SI++;
2781 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Succ);
2782 // If we haven't seen this node yet, then we've found a CFG cycle.
2783 // Be optimistic here; it's CheckForCFGHazards' job detect trouble.
2784 if (I == BBStates.end())
2786 MyStates.InitFromSucc(I->second);
2790 I = BBStates.find(Succ);
2791 if (I != BBStates.end())
2792 MyStates.MergeSucc(I->second);
2799 // Visit all the instructions, bottom-up.
2800 for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
2801 Instruction *Inst = llvm::prior(I);
2803 // Invoke instructions are visited as part of their successors (below).
2804 if (isa<InvokeInst>(Inst))
2807 NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
2810 // If there's a predecessor with an invoke, visit the invoke as
2811 // if it were part of this block, since we can't insert code after
2812 // an invoke in its own block, and we don't want to split critical
2814 for (pred_iterator PI(BB), PE(BB, false); PI != PE; ++PI) {
2815 BasicBlock *Pred = *PI;
2816 TerminatorInst *PredTI = cast<TerminatorInst>(&Pred->back());
2817 if (isa<InvokeInst>(PredTI))
2818 NestingDetected |= VisitInstructionBottomUp(PredTI, BB, Retains, MyStates);
2821 return NestingDetected;
2825 ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
2826 DenseMap<Value *, RRInfo> &Releases,
2827 BBState &MyStates) {
2828 bool NestingDetected = false;
2829 InstructionClass Class = GetInstructionClass(Inst);
2830 const Value *Arg = 0;
2833 case IC_RetainBlock:
2834 // An objc_retainBlock call with just a use may need to be kept,
2835 // because it may be copying a block from the stack to the heap.
2836 if (!IsRetainBlockOptimizable(Inst))
2841 Arg = GetObjCArg(Inst);
2843 PtrState &S = MyStates.getPtrTopDownState(Arg);
2845 // Don't do retain+release tracking for IC_RetainRV, because it's
2846 // better to let it remain as the first instruction after a call.
2847 if (Class != IC_RetainRV) {
2848 // If we see two retains in a row on the same pointer. If so, make
2849 // a note, and we'll cicle back to revisit it after we've
2850 // hopefully eliminated the second retain, which may allow us to
2851 // eliminate the first retain too.
2852 // Theoretically we could implement removal of nested retain+release
2853 // pairs by making PtrState hold a stack of states, but this is
2854 // simple and avoids adding overhead for the non-nested case.
2855 if (S.GetSeq() == S_Retain)
2856 NestingDetected = true;
2860 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
2861 // Don't check S.IsKnownIncremented() here because it's not
2863 S.RRI.KnownSafe = S.IsKnownNested();
2864 S.RRI.Calls.insert(Inst);
2867 S.SetAtLeastOneRefCount();
2868 S.IncrementRefCount();
2869 S.IncrementNestCount();
2870 return NestingDetected;
2873 Arg = GetObjCArg(Inst);
2875 PtrState &S = MyStates.getPtrTopDownState(Arg);
2876 S.DecrementRefCount();
2877 S.DecrementNestCount();
2879 switch (S.GetSeq()) {
2882 S.RRI.ReverseInsertPts.clear();
2885 S.RRI.ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
2886 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
2887 Releases[Inst] = S.RRI;
2888 S.ClearSequenceProgress();
2894 case S_MovableRelease:
2895 llvm_unreachable("top-down pointer in release state!");
2899 case IC_AutoreleasepoolPop:
2900 // Conservatively, clear MyStates for all known pointers.
2901 MyStates.clearTopDownPointers();
2902 return NestingDetected;
2903 case IC_AutoreleasepoolPush:
2905 // These are irrelevant.
2906 return NestingDetected;
2911 // Consider any other possible effects of this instruction on each
2912 // pointer being tracked.
2913 for (BBState::ptr_iterator MI = MyStates.top_down_ptr_begin(),
2914 ME = MyStates.top_down_ptr_end(); MI != ME; ++MI) {
2915 const Value *Ptr = MI->first;
2917 continue; // Handled above.
2918 PtrState &S = MI->second;
2919 Sequence Seq = S.GetSeq();
2921 // Check for possible releases.
2922 if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
2923 S.DecrementRefCount();
2926 S.SetSeq(S_CanRelease);
2927 assert(S.RRI.ReverseInsertPts.empty());
2928 S.RRI.ReverseInsertPts.insert(Inst);
2930 // One call can't cause a transition from S_Retain to S_CanRelease
2931 // and S_CanRelease to S_Use. If we've made the first transition,
2940 case S_MovableRelease:
2941 llvm_unreachable("top-down pointer in release state!");
2945 // Check for possible direct uses.
2948 if (CanUse(Inst, Ptr, PA, Class))
2957 case S_MovableRelease:
2958 llvm_unreachable("top-down pointer in release state!");
2962 return NestingDetected;
2966 ObjCARCOpt::VisitTopDown(BasicBlock *BB,
2967 DenseMap<const BasicBlock *, BBState> &BBStates,
2968 DenseMap<Value *, RRInfo> &Releases) {
2969 bool NestingDetected = false;
2970 BBState &MyStates = BBStates[BB];
2972 // Merge the states from each predecessor to compute the initial state
2973 // for the current block.
2974 const_pred_iterator PI(BB), PE(BB, false);
2976 MyStates.SetAsEntry();
2979 unsigned OperandNo = PI.getOperandNo();
2980 const Use &Us = PI.getUse();
2983 // Skip invoke unwind edges on invoke instructions marked with
2984 // clang.arc.no_objc_arc_exceptions.
2985 if (const InvokeInst *II = dyn_cast<InvokeInst>(Us.getUser()))
2986 if (OperandNo == II->getNumArgOperands() + 2 &&
2987 II->getMetadata(NoObjCARCExceptionsMDKind))
2990 const BasicBlock *Pred = cast<TerminatorInst>(Us.getUser())->getParent();
2993 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Pred);
2994 // If we haven't seen this node yet, then we've found a CFG cycle.
2995 // Be optimistic here; it's CheckForCFGHazards' job detect trouble.
2996 if (I == BBStates.end() || !I->second.isVisitedTopDown())
2998 MyStates.InitFromPred(I->second);
3002 I = BBStates.find(Pred);
3003 if (I != BBStates.end() && I->second.isVisitedTopDown())
3004 MyStates.MergePred(I->second);
3010 // Visit all the instructions, top-down.
3011 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
3012 Instruction *Inst = I;
3013 NestingDetected |= VisitInstructionTopDown(Inst, Releases, MyStates);
3016 CheckForCFGHazards(BB, BBStates, MyStates);
3017 return NestingDetected;
3021 ComputePostOrders(Function &F,
3022 SmallVectorImpl<BasicBlock *> &PostOrder,
3023 SmallVectorImpl<BasicBlock *> &ReverseCFGPostOrder) {
3024 /// Backedges - Backedges detected in the DFS. These edges will be
3025 /// ignored in the reverse-CFG DFS, so that loops with multiple exits will be
3026 /// traversed in the desired order.
3027 DenseSet<std::pair<BasicBlock *, BasicBlock *> > Backedges;
3029 /// Visited - The visited set, for doing DFS walks.
3030 SmallPtrSet<BasicBlock *, 16> Visited;
3032 // Do DFS, computing the PostOrder.
3033 SmallPtrSet<BasicBlock *, 16> OnStack;
3034 SmallVector<std::pair<BasicBlock *, succ_iterator>, 16> SuccStack;
3035 BasicBlock *EntryBB = &F.getEntryBlock();
3036 SuccStack.push_back(std::make_pair(EntryBB, succ_begin(EntryBB)));
3037 Visited.insert(EntryBB);
3038 OnStack.insert(EntryBB);
3041 TerminatorInst *TI = cast<TerminatorInst>(&SuccStack.back().first->back());
3042 succ_iterator End = succ_iterator(TI, true);
3043 while (SuccStack.back().second != End) {
3044 BasicBlock *BB = *SuccStack.back().second++;
3045 if (Visited.insert(BB)) {
3046 SuccStack.push_back(std::make_pair(BB, succ_begin(BB)));
3050 if (OnStack.count(BB))
3051 Backedges.insert(std::make_pair(SuccStack.back().first, BB));
3053 OnStack.erase(SuccStack.back().first);
3054 PostOrder.push_back(SuccStack.pop_back_val().first);
3055 } while (!SuccStack.empty());
3059 // Compute the exits, which are the starting points for reverse-CFG DFS.
3060 // This includes blocks where all the successors are backedges that
3062 SmallVector<BasicBlock *, 4> Exits;
3063 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) {
3065 TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
3066 for (succ_iterator SI(TI), SE(TI, true); SI != SE; ++SI)
3067 if (!Backedges.count(std::make_pair(BB, *SI)))
3068 goto HasNonBackedgeSucc;
3069 Exits.push_back(BB);
3070 HasNonBackedgeSucc:;
3073 // Do reverse-CFG DFS, computing the reverse-CFG PostOrder.
3074 SmallVector<std::pair<BasicBlock *, pred_iterator>, 16> PredStack;
3075 for (SmallVectorImpl<BasicBlock *>::iterator I = Exits.begin(), E = Exits.end();
3077 BasicBlock *ExitBB = *I;
3078 PredStack.push_back(std::make_pair(ExitBB, pred_begin(ExitBB)));
3079 Visited.insert(ExitBB);
3080 while (!PredStack.empty()) {
3081 reverse_dfs_next_succ:
3082 pred_iterator End = pred_end(PredStack.back().first);
3083 while (PredStack.back().second != End) {
3084 BasicBlock *BB = *PredStack.back().second++;
3085 // Skip backedges detected in the forward-CFG DFS.
3086 if (Backedges.count(std::make_pair(BB, PredStack.back().first)))
3088 if (Visited.insert(BB)) {
3089 PredStack.push_back(std::make_pair(BB, pred_begin(BB)));
3090 goto reverse_dfs_next_succ;
3093 ReverseCFGPostOrder.push_back(PredStack.pop_back_val().first);
3098 // Visit - Visit the function both top-down and bottom-up.
3100 ObjCARCOpt::Visit(Function &F,
3101 DenseMap<const BasicBlock *, BBState> &BBStates,
3102 MapVector<Value *, RRInfo> &Retains,
3103 DenseMap<Value *, RRInfo> &Releases) {
3105 // Use reverse-postorder traversals, because we magically know that loops
3106 // will be well behaved, i.e. they won't repeatedly call retain on a single
3107 // pointer without doing a release. We can't use the ReversePostOrderTraversal
3108 // class here because we want the reverse-CFG postorder to consider each
3109 // function exit point, and we want to ignore selected cycle edges.
3110 SmallVector<BasicBlock *, 16> PostOrder;
3111 SmallVector<BasicBlock *, 16> ReverseCFGPostOrder;
3112 ComputePostOrders(F, PostOrder, ReverseCFGPostOrder);
3114 // Use reverse-postorder on the reverse CFG for bottom-up.
3115 bool BottomUpNestingDetected = false;
3116 for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
3117 ReverseCFGPostOrder.rbegin(), E = ReverseCFGPostOrder.rend();
3119 BottomUpNestingDetected |= VisitBottomUp(*I, BBStates, Retains);
3121 // Use reverse-postorder for top-down.
3122 bool TopDownNestingDetected = false;
3123 for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
3124 PostOrder.rbegin(), E = PostOrder.rend();
3126 TopDownNestingDetected |= VisitTopDown(*I, BBStates, Releases);
3128 return TopDownNestingDetected && BottomUpNestingDetected;
3131 /// MoveCalls - Move the calls in RetainsToMove and ReleasesToMove.
3132 void ObjCARCOpt::MoveCalls(Value *Arg,
3133 RRInfo &RetainsToMove,
3134 RRInfo &ReleasesToMove,
3135 MapVector<Value *, RRInfo> &Retains,
3136 DenseMap<Value *, RRInfo> &Releases,
3137 SmallVectorImpl<Instruction *> &DeadInsts,
3139 Type *ArgTy = Arg->getType();
3140 Type *ParamTy = PointerType::getUnqual(Type::getInt8Ty(ArgTy->getContext()));
3142 // Insert the new retain and release calls.
3143 for (SmallPtrSet<Instruction *, 2>::const_iterator
3144 PI = ReleasesToMove.ReverseInsertPts.begin(),
3145 PE = ReleasesToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
3146 Instruction *InsertPt = *PI;
3147 Value *MyArg = ArgTy == ParamTy ? Arg :
3148 new BitCastInst(Arg, ParamTy, "", InsertPt);
3150 CallInst::Create(RetainsToMove.IsRetainBlock ?
3151 getRetainBlockCallee(M) : getRetainCallee(M),
3152 MyArg, "", InsertPt);
3153 Call->setDoesNotThrow();
3154 if (RetainsToMove.IsRetainBlock)
3155 Call->setMetadata(CopyOnEscapeMDKind,
3156 MDNode::get(M->getContext(), ArrayRef<Value *>()));
3158 Call->setTailCall();
3160 for (SmallPtrSet<Instruction *, 2>::const_iterator
3161 PI = RetainsToMove.ReverseInsertPts.begin(),
3162 PE = RetainsToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
3163 Instruction *InsertPt = *PI;
3164 Value *MyArg = ArgTy == ParamTy ? Arg :
3165 new BitCastInst(Arg, ParamTy, "", InsertPt);
3166 CallInst *Call = CallInst::Create(getReleaseCallee(M), MyArg,
3168 // Attach a clang.imprecise_release metadata tag, if appropriate.
3169 if (MDNode *M = ReleasesToMove.ReleaseMetadata)
3170 Call->setMetadata(ImpreciseReleaseMDKind, M);
3171 Call->setDoesNotThrow();
3172 if (ReleasesToMove.IsTailCallRelease)
3173 Call->setTailCall();
3176 // Delete the original retain and release calls.
3177 for (SmallPtrSet<Instruction *, 2>::const_iterator
3178 AI = RetainsToMove.Calls.begin(),
3179 AE = RetainsToMove.Calls.end(); AI != AE; ++AI) {
3180 Instruction *OrigRetain = *AI;
3181 Retains.blot(OrigRetain);
3182 DeadInsts.push_back(OrigRetain);
3184 for (SmallPtrSet<Instruction *, 2>::const_iterator
3185 AI = ReleasesToMove.Calls.begin(),
3186 AE = ReleasesToMove.Calls.end(); AI != AE; ++AI) {
3187 Instruction *OrigRelease = *AI;
3188 Releases.erase(OrigRelease);
3189 DeadInsts.push_back(OrigRelease);
3193 /// PerformCodePlacement - Identify pairings between the retains and releases,
3194 /// and delete and/or move them.
3196 ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState>
3198 MapVector<Value *, RRInfo> &Retains,
3199 DenseMap<Value *, RRInfo> &Releases,
3201 bool AnyPairsCompletelyEliminated = false;
3202 RRInfo RetainsToMove;
3203 RRInfo ReleasesToMove;
3204 SmallVector<Instruction *, 4> NewRetains;
3205 SmallVector<Instruction *, 4> NewReleases;
3206 SmallVector<Instruction *, 8> DeadInsts;
3208 // Visit each retain.
3209 for (MapVector<Value *, RRInfo>::const_iterator I = Retains.begin(),
3210 E = Retains.end(); I != E; ++I) {
3211 Value *V = I->first;
3212 if (!V) continue; // blotted
3214 Instruction *Retain = cast<Instruction>(V);
3215 Value *Arg = GetObjCArg(Retain);
3217 // If the object being released is in static or stack storage, we know it's
3218 // not being managed by ObjC reference counting, so we can delete pairs
3219 // regardless of what possible decrements or uses lie between them.
3220 bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
3222 // A constant pointer can't be pointing to an object on the heap. It may
3223 // be reference-counted, but it won't be deleted.
3224 if (const LoadInst *LI = dyn_cast<LoadInst>(Arg))
3225 if (const GlobalVariable *GV =
3226 dyn_cast<GlobalVariable>(
3227 StripPointerCastsAndObjCCalls(LI->getPointerOperand())))
3228 if (GV->isConstant())
3231 // If a pair happens in a region where it is known that the reference count
3232 // is already incremented, we can similarly ignore possible decrements.
3233 bool KnownSafeTD = true, KnownSafeBU = true;
3235 // Connect the dots between the top-down-collected RetainsToMove and
3236 // bottom-up-collected ReleasesToMove to form sets of related calls.
3237 // This is an iterative process so that we connect multiple releases
3238 // to multiple retains if needed.
3239 unsigned OldDelta = 0;
3240 unsigned NewDelta = 0;
3241 unsigned OldCount = 0;
3242 unsigned NewCount = 0;
3243 bool FirstRelease = true;
3244 bool FirstRetain = true;
3245 NewRetains.push_back(Retain);
3247 for (SmallVectorImpl<Instruction *>::const_iterator
3248 NI = NewRetains.begin(), NE = NewRetains.end(); NI != NE; ++NI) {
3249 Instruction *NewRetain = *NI;
3250 MapVector<Value *, RRInfo>::const_iterator It = Retains.find(NewRetain);
3251 assert(It != Retains.end());
3252 const RRInfo &NewRetainRRI = It->second;
3253 KnownSafeTD &= NewRetainRRI.KnownSafe;
3254 for (SmallPtrSet<Instruction *, 2>::const_iterator
3255 LI = NewRetainRRI.Calls.begin(),
3256 LE = NewRetainRRI.Calls.end(); LI != LE; ++LI) {
3257 Instruction *NewRetainRelease = *LI;
3258 DenseMap<Value *, RRInfo>::const_iterator Jt =
3259 Releases.find(NewRetainRelease);
3260 if (Jt == Releases.end())
3262 const RRInfo &NewRetainReleaseRRI = Jt->second;
3263 assert(NewRetainReleaseRRI.Calls.count(NewRetain));
3264 if (ReleasesToMove.Calls.insert(NewRetainRelease)) {
3266 BBStates[NewRetainRelease->getParent()].GetAllPathCount();
3268 // Merge the ReleaseMetadata and IsTailCallRelease values.
3270 ReleasesToMove.ReleaseMetadata =
3271 NewRetainReleaseRRI.ReleaseMetadata;
3272 ReleasesToMove.IsTailCallRelease =
3273 NewRetainReleaseRRI.IsTailCallRelease;
3274 FirstRelease = false;
3276 if (ReleasesToMove.ReleaseMetadata !=
3277 NewRetainReleaseRRI.ReleaseMetadata)
3278 ReleasesToMove.ReleaseMetadata = 0;
3279 if (ReleasesToMove.IsTailCallRelease !=
3280 NewRetainReleaseRRI.IsTailCallRelease)
3281 ReleasesToMove.IsTailCallRelease = false;
3284 // Collect the optimal insertion points.
3286 for (SmallPtrSet<Instruction *, 2>::const_iterator
3287 RI = NewRetainReleaseRRI.ReverseInsertPts.begin(),
3288 RE = NewRetainReleaseRRI.ReverseInsertPts.end();
3290 Instruction *RIP = *RI;
3291 if (ReleasesToMove.ReverseInsertPts.insert(RIP))
3292 NewDelta -= BBStates[RIP->getParent()].GetAllPathCount();
3294 NewReleases.push_back(NewRetainRelease);
3299 if (NewReleases.empty()) break;
3301 // Back the other way.
3302 for (SmallVectorImpl<Instruction *>::const_iterator
3303 NI = NewReleases.begin(), NE = NewReleases.end(); NI != NE; ++NI) {
3304 Instruction *NewRelease = *NI;
3305 DenseMap<Value *, RRInfo>::const_iterator It =
3306 Releases.find(NewRelease);
3307 assert(It != Releases.end());
3308 const RRInfo &NewReleaseRRI = It->second;
3309 KnownSafeBU &= NewReleaseRRI.KnownSafe;
3310 for (SmallPtrSet<Instruction *, 2>::const_iterator
3311 LI = NewReleaseRRI.Calls.begin(),
3312 LE = NewReleaseRRI.Calls.end(); LI != LE; ++LI) {
3313 Instruction *NewReleaseRetain = *LI;
3314 MapVector<Value *, RRInfo>::const_iterator Jt =
3315 Retains.find(NewReleaseRetain);
3316 if (Jt == Retains.end())
3318 const RRInfo &NewReleaseRetainRRI = Jt->second;
3319 assert(NewReleaseRetainRRI.Calls.count(NewRelease));
3320 if (RetainsToMove.Calls.insert(NewReleaseRetain)) {
3321 unsigned PathCount =
3322 BBStates[NewReleaseRetain->getParent()].GetAllPathCount();
3323 OldDelta += PathCount;
3324 OldCount += PathCount;
3326 // Merge the IsRetainBlock values.
3328 RetainsToMove.IsRetainBlock = NewReleaseRetainRRI.IsRetainBlock;
3329 FirstRetain = false;
3330 } else if (ReleasesToMove.IsRetainBlock !=
3331 NewReleaseRetainRRI.IsRetainBlock)
3332 // It's not possible to merge the sequences if one uses
3333 // objc_retain and the other uses objc_retainBlock.
3336 // Collect the optimal insertion points.
3338 for (SmallPtrSet<Instruction *, 2>::const_iterator
3339 RI = NewReleaseRetainRRI.ReverseInsertPts.begin(),
3340 RE = NewReleaseRetainRRI.ReverseInsertPts.end();
3342 Instruction *RIP = *RI;
3343 if (RetainsToMove.ReverseInsertPts.insert(RIP)) {
3344 PathCount = BBStates[RIP->getParent()].GetAllPathCount();
3345 NewDelta += PathCount;
3346 NewCount += PathCount;
3349 NewRetains.push_back(NewReleaseRetain);
3353 NewReleases.clear();
3354 if (NewRetains.empty()) break;
3357 // If the pointer is known incremented or nested, we can safely delete the
3358 // pair regardless of what's between them.
3359 if (KnownSafeTD || KnownSafeBU) {
3360 RetainsToMove.ReverseInsertPts.clear();
3361 ReleasesToMove.ReverseInsertPts.clear();
3364 // Determine whether the new insertion points we computed preserve the
3365 // balance of retain and release calls through the program.
3366 // TODO: If the fully aggressive solution isn't valid, try to find a
3367 // less aggressive solution which is.
3372 // Determine whether the original call points are balanced in the retain and
3373 // release calls through the program. If not, conservatively don't touch
3375 // TODO: It's theoretically possible to do code motion in this case, as
3376 // long as the existing imbalances are maintained.
3380 // Ok, everything checks out and we're all set. Let's move some code!
3382 AnyPairsCompletelyEliminated = NewCount == 0;
3383 NumRRs += OldCount - NewCount;
3384 MoveCalls(Arg, RetainsToMove, ReleasesToMove,
3385 Retains, Releases, DeadInsts, M);
3388 NewReleases.clear();
3390 RetainsToMove.clear();
3391 ReleasesToMove.clear();
3394 // Now that we're done moving everything, we can delete the newly dead
3395 // instructions, as we no longer need them as insert points.
3396 while (!DeadInsts.empty())
3397 EraseInstruction(DeadInsts.pop_back_val());
3399 return AnyPairsCompletelyEliminated;
3402 /// OptimizeWeakCalls - Weak pointer optimizations.
3403 void ObjCARCOpt::OptimizeWeakCalls(Function &F) {
3404 // First, do memdep-style RLE and S2L optimizations. We can't use memdep
3405 // itself because it uses AliasAnalysis and we need to do provenance
3407 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
3408 Instruction *Inst = &*I++;
3409 InstructionClass Class = GetBasicInstructionClass(Inst);
3410 if (Class != IC_LoadWeak && Class != IC_LoadWeakRetained)
3413 // Delete objc_loadWeak calls with no users.
3414 if (Class == IC_LoadWeak && Inst->use_empty()) {
3415 Inst->eraseFromParent();
3419 // TODO: For now, just look for an earlier available version of this value
3420 // within the same block. Theoretically, we could do memdep-style non-local
3421 // analysis too, but that would want caching. A better approach would be to
3422 // use the technique that EarlyCSE uses.
3423 inst_iterator Current = llvm::prior(I);
3424 BasicBlock *CurrentBB = Current.getBasicBlockIterator();
3425 for (BasicBlock::iterator B = CurrentBB->begin(),
3426 J = Current.getInstructionIterator();
3428 Instruction *EarlierInst = &*llvm::prior(J);
3429 InstructionClass EarlierClass = GetInstructionClass(EarlierInst);
3430 switch (EarlierClass) {
3432 case IC_LoadWeakRetained: {
3433 // If this is loading from the same pointer, replace this load's value
3435 CallInst *Call = cast<CallInst>(Inst);
3436 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
3437 Value *Arg = Call->getArgOperand(0);
3438 Value *EarlierArg = EarlierCall->getArgOperand(0);
3439 switch (PA.getAA()->alias(Arg, EarlierArg)) {
3440 case AliasAnalysis::MustAlias:
3442 // If the load has a builtin retain, insert a plain retain for it.
3443 if (Class == IC_LoadWeakRetained) {
3445 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
3449 // Zap the fully redundant load.
3450 Call->replaceAllUsesWith(EarlierCall);
3451 Call->eraseFromParent();
3453 case AliasAnalysis::MayAlias:
3454 case AliasAnalysis::PartialAlias:
3456 case AliasAnalysis::NoAlias:
3463 // If this is storing to the same pointer and has the same size etc.
3464 // replace this load's value with the stored value.
3465 CallInst *Call = cast<CallInst>(Inst);
3466 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
3467 Value *Arg = Call->getArgOperand(0);
3468 Value *EarlierArg = EarlierCall->getArgOperand(0);
3469 switch (PA.getAA()->alias(Arg, EarlierArg)) {
3470 case AliasAnalysis::MustAlias:
3472 // If the load has a builtin retain, insert a plain retain for it.
3473 if (Class == IC_LoadWeakRetained) {
3475 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
3479 // Zap the fully redundant load.
3480 Call->replaceAllUsesWith(EarlierCall->getArgOperand(1));
3481 Call->eraseFromParent();
3483 case AliasAnalysis::MayAlias:
3484 case AliasAnalysis::PartialAlias:
3486 case AliasAnalysis::NoAlias:
3493 // TOOD: Grab the copied value.
3495 case IC_AutoreleasepoolPush:
3498 // Weak pointers are only modified through the weak entry points
3499 // (and arbitrary calls, which could call the weak entry points).
3502 // Anything else could modify the weak pointer.
3509 // Then, for each destroyWeak with an alloca operand, check to see if
3510 // the alloca and all its users can be zapped.
3511 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
3512 Instruction *Inst = &*I++;
3513 InstructionClass Class = GetBasicInstructionClass(Inst);
3514 if (Class != IC_DestroyWeak)
3517 CallInst *Call = cast<CallInst>(Inst);
3518 Value *Arg = Call->getArgOperand(0);
3519 if (AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
3520 for (Value::use_iterator UI = Alloca->use_begin(),
3521 UE = Alloca->use_end(); UI != UE; ++UI) {
3522 Instruction *UserInst = cast<Instruction>(*UI);
3523 switch (GetBasicInstructionClass(UserInst)) {
3526 case IC_DestroyWeak:
3533 for (Value::use_iterator UI = Alloca->use_begin(),
3534 UE = Alloca->use_end(); UI != UE; ) {
3535 CallInst *UserInst = cast<CallInst>(*UI++);
3536 if (!UserInst->use_empty())
3537 UserInst->replaceAllUsesWith(UserInst->getArgOperand(0));
3538 UserInst->eraseFromParent();
3540 Alloca->eraseFromParent();
3546 /// OptimizeSequences - Identify program paths which execute sequences of
3547 /// retains and releases which can be eliminated.
3548 bool ObjCARCOpt::OptimizeSequences(Function &F) {
3549 /// Releases, Retains - These are used to store the results of the main flow
3550 /// analysis. These use Value* as the key instead of Instruction* so that the
3551 /// map stays valid when we get around to rewriting code and calls get
3552 /// replaced by arguments.
3553 DenseMap<Value *, RRInfo> Releases;
3554 MapVector<Value *, RRInfo> Retains;
3556 /// BBStates, This is used during the traversal of the function to track the
3557 /// states for each identified object at each block.
3558 DenseMap<const BasicBlock *, BBState> BBStates;
3560 // Analyze the CFG of the function, and all instructions.
3561 bool NestingDetected = Visit(F, BBStates, Retains, Releases);
3564 return PerformCodePlacement(BBStates, Retains, Releases, F.getParent()) &&
3568 /// OptimizeReturns - Look for this pattern:
3570 /// %call = call i8* @something(...)
3571 /// %2 = call i8* @objc_retain(i8* %call)
3572 /// %3 = call i8* @objc_autorelease(i8* %2)
3575 /// And delete the retain and autorelease.
3577 /// Otherwise if it's just this:
3579 /// %3 = call i8* @objc_autorelease(i8* %2)
3582 /// convert the autorelease to autoreleaseRV.
3583 void ObjCARCOpt::OptimizeReturns(Function &F) {
3584 if (!F.getReturnType()->isPointerTy())
3587 SmallPtrSet<Instruction *, 4> DependingInstructions;
3588 SmallPtrSet<const BasicBlock *, 4> Visited;
3589 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
3590 BasicBlock *BB = FI;
3591 ReturnInst *Ret = dyn_cast<ReturnInst>(&BB->back());
3594 const Value *Arg = StripPointerCastsAndObjCCalls(Ret->getOperand(0));
3595 FindDependencies(NeedsPositiveRetainCount, Arg,
3596 BB, Ret, DependingInstructions, Visited, PA);
3597 if (DependingInstructions.size() != 1)
3601 CallInst *Autorelease =
3602 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3605 InstructionClass AutoreleaseClass =
3606 GetBasicInstructionClass(Autorelease);
3607 if (!IsAutorelease(AutoreleaseClass))
3609 if (GetObjCArg(Autorelease) != Arg)
3612 DependingInstructions.clear();
3615 // Check that there is nothing that can affect the reference
3616 // count between the autorelease and the retain.
3617 FindDependencies(CanChangeRetainCount, Arg,
3618 BB, Autorelease, DependingInstructions, Visited, PA);
3619 if (DependingInstructions.size() != 1)
3624 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3626 // Check that we found a retain with the same argument.
3628 !IsRetain(GetBasicInstructionClass(Retain)) ||
3629 GetObjCArg(Retain) != Arg)
3632 DependingInstructions.clear();
3635 // Convert the autorelease to an autoreleaseRV, since it's
3636 // returning the value.
3637 if (AutoreleaseClass == IC_Autorelease) {
3638 Autorelease->setCalledFunction(getAutoreleaseRVCallee(F.getParent()));
3639 AutoreleaseClass = IC_AutoreleaseRV;
3642 // Check that there is nothing that can affect the reference
3643 // count between the retain and the call.
3644 // Note that Retain need not be in BB.
3645 FindDependencies(CanChangeRetainCount, Arg, Retain->getParent(), Retain,
3646 DependingInstructions, Visited, PA);
3647 if (DependingInstructions.size() != 1)
3652 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3654 // Check that the pointer is the return value of the call.
3655 if (!Call || Arg != Call)
3658 // Check that the call is a regular call.
3659 InstructionClass Class = GetBasicInstructionClass(Call);
3660 if (Class != IC_CallOrUser && Class != IC_Call)
3663 // If so, we can zap the retain and autorelease.
3666 EraseInstruction(Retain);
3667 EraseInstruction(Autorelease);
3673 DependingInstructions.clear();
3678 bool ObjCARCOpt::doInitialization(Module &M) {
3682 // If nothing in the Module uses ARC, don't do anything.
3683 Run = ModuleHasARC(M);
3687 // Identify the imprecise release metadata kind.
3688 ImpreciseReleaseMDKind =
3689 M.getContext().getMDKindID("clang.imprecise_release");
3690 CopyOnEscapeMDKind =
3691 M.getContext().getMDKindID("clang.arc.copy_on_escape");
3692 NoObjCARCExceptionsMDKind =
3693 M.getContext().getMDKindID("clang.arc.no_objc_arc_exceptions");
3695 // Intuitively, objc_retain and others are nocapture, however in practice
3696 // they are not, because they return their argument value. And objc_release
3697 // calls finalizers.
3699 // These are initialized lazily.
3701 AutoreleaseRVCallee = 0;
3704 RetainBlockCallee = 0;
3705 AutoreleaseCallee = 0;
3710 bool ObjCARCOpt::runOnFunction(Function &F) {
3714 // If nothing in the Module uses ARC, don't do anything.
3720 PA.setAA(&getAnalysis<AliasAnalysis>());
3722 // This pass performs several distinct transformations. As a compile-time aid
3723 // when compiling code that isn't ObjC, skip these if the relevant ObjC
3724 // library functions aren't declared.
3726 // Preliminary optimizations. This also computs UsedInThisFunction.
3727 OptimizeIndividualCalls(F);
3729 // Optimizations for weak pointers.
3730 if (UsedInThisFunction & ((1 << IC_LoadWeak) |
3731 (1 << IC_LoadWeakRetained) |
3732 (1 << IC_StoreWeak) |
3733 (1 << IC_InitWeak) |
3734 (1 << IC_CopyWeak) |
3735 (1 << IC_MoveWeak) |
3736 (1 << IC_DestroyWeak)))
3737 OptimizeWeakCalls(F);
3739 // Optimizations for retain+release pairs.
3740 if (UsedInThisFunction & ((1 << IC_Retain) |
3741 (1 << IC_RetainRV) |
3742 (1 << IC_RetainBlock)))
3743 if (UsedInThisFunction & (1 << IC_Release))
3744 // Run OptimizeSequences until it either stops making changes or
3745 // no retain+release pair nesting is detected.
3746 while (OptimizeSequences(F)) {}
3748 // Optimizations if objc_autorelease is used.
3749 if (UsedInThisFunction &
3750 ((1 << IC_Autorelease) | (1 << IC_AutoreleaseRV)))
3756 void ObjCARCOpt::releaseMemory() {
3760 //===----------------------------------------------------------------------===//
3762 //===----------------------------------------------------------------------===//
3764 // TODO: ObjCARCContract could insert PHI nodes when uses aren't
3765 // dominated by single calls.
3767 #include "llvm/Operator.h"
3768 #include "llvm/InlineAsm.h"
3769 #include "llvm/Analysis/Dominators.h"
3771 STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
3774 /// ObjCARCContract - Late ARC optimizations. These change the IR in a way
3775 /// that makes it difficult to be analyzed by ObjCARCOpt, so it's run late.
3776 class ObjCARCContract : public FunctionPass {
3780 ProvenanceAnalysis PA;
3782 /// Run - A flag indicating whether this optimization pass should run.
3785 /// StoreStrongCallee, etc. - Declarations for ObjC runtime
3786 /// functions, for use in creating calls to them. These are initialized
3787 /// lazily to avoid cluttering up the Module with unused declarations.
3788 Constant *StoreStrongCallee,
3789 *RetainAutoreleaseCallee, *RetainAutoreleaseRVCallee;
3791 /// RetainRVMarker - The inline asm string to insert between calls and
3792 /// RetainRV calls to make the optimization work on targets which need it.
3793 const MDString *RetainRVMarker;
3795 /// StoreStrongCalls - The set of inserted objc_storeStrong calls. If
3796 /// at the end of walking the function we have found no alloca
3797 /// instructions, these calls can be marked "tail".
3798 DenseSet<CallInst *> StoreStrongCalls;
3800 Constant *getStoreStrongCallee(Module *M);
3801 Constant *getRetainAutoreleaseCallee(Module *M);
3802 Constant *getRetainAutoreleaseRVCallee(Module *M);
3804 bool ContractAutorelease(Function &F, Instruction *Autorelease,
3805 InstructionClass Class,
3806 SmallPtrSet<Instruction *, 4>
3807 &DependingInstructions,
3808 SmallPtrSet<const BasicBlock *, 4>
3811 void ContractRelease(Instruction *Release,
3812 inst_iterator &Iter);
3814 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
3815 virtual bool doInitialization(Module &M);
3816 virtual bool runOnFunction(Function &F);
3820 ObjCARCContract() : FunctionPass(ID) {
3821 initializeObjCARCContractPass(*PassRegistry::getPassRegistry());
3826 char ObjCARCContract::ID = 0;
3827 INITIALIZE_PASS_BEGIN(ObjCARCContract,
3828 "objc-arc-contract", "ObjC ARC contraction", false, false)
3829 INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
3830 INITIALIZE_PASS_DEPENDENCY(DominatorTree)
3831 INITIALIZE_PASS_END(ObjCARCContract,
3832 "objc-arc-contract", "ObjC ARC contraction", false, false)
3834 Pass *llvm::createObjCARCContractPass() {
3835 return new ObjCARCContract();
3838 void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const {
3839 AU.addRequired<AliasAnalysis>();
3840 AU.addRequired<DominatorTree>();
3841 AU.setPreservesCFG();
3844 Constant *ObjCARCContract::getStoreStrongCallee(Module *M) {
3845 if (!StoreStrongCallee) {
3846 LLVMContext &C = M->getContext();
3847 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3848 Type *I8XX = PointerType::getUnqual(I8X);
3849 std::vector<Type *> Params;
3850 Params.push_back(I8XX);
3851 Params.push_back(I8X);
3853 AttrListPtr Attributes;
3854 Attributes.addAttr(~0u, Attribute::NoUnwind);
3855 Attributes.addAttr(1, Attribute::NoCapture);
3858 M->getOrInsertFunction(
3860 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
3863 return StoreStrongCallee;
3866 Constant *ObjCARCContract::getRetainAutoreleaseCallee(Module *M) {
3867 if (!RetainAutoreleaseCallee) {
3868 LLVMContext &C = M->getContext();
3869 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3870 std::vector<Type *> Params;
3871 Params.push_back(I8X);
3873 FunctionType::get(I8X, Params, /*isVarArg=*/false);
3874 AttrListPtr Attributes;
3875 Attributes.addAttr(~0u, Attribute::NoUnwind);
3876 RetainAutoreleaseCallee =
3877 M->getOrInsertFunction("objc_retainAutorelease", FTy, Attributes);
3879 return RetainAutoreleaseCallee;
3882 Constant *ObjCARCContract::getRetainAutoreleaseRVCallee(Module *M) {
3883 if (!RetainAutoreleaseRVCallee) {
3884 LLVMContext &C = M->getContext();
3885 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3886 std::vector<Type *> Params;
3887 Params.push_back(I8X);
3889 FunctionType::get(I8X, Params, /*isVarArg=*/false);
3890 AttrListPtr Attributes;
3891 Attributes.addAttr(~0u, Attribute::NoUnwind);
3892 RetainAutoreleaseRVCallee =
3893 M->getOrInsertFunction("objc_retainAutoreleaseReturnValue", FTy,
3896 return RetainAutoreleaseRVCallee;
3899 /// ContractAutorelease - Merge an autorelease with a retain into a fused
3902 ObjCARCContract::ContractAutorelease(Function &F, Instruction *Autorelease,
3903 InstructionClass Class,
3904 SmallPtrSet<Instruction *, 4>
3905 &DependingInstructions,
3906 SmallPtrSet<const BasicBlock *, 4>
3908 const Value *Arg = GetObjCArg(Autorelease);
3910 // Check that there are no instructions between the retain and the autorelease
3911 // (such as an autorelease_pop) which may change the count.
3912 CallInst *Retain = 0;
3913 if (Class == IC_AutoreleaseRV)
3914 FindDependencies(RetainAutoreleaseRVDep, Arg,
3915 Autorelease->getParent(), Autorelease,
3916 DependingInstructions, Visited, PA);
3918 FindDependencies(RetainAutoreleaseDep, Arg,
3919 Autorelease->getParent(), Autorelease,
3920 DependingInstructions, Visited, PA);
3923 if (DependingInstructions.size() != 1) {
3924 DependingInstructions.clear();
3928 Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3929 DependingInstructions.clear();
3932 GetBasicInstructionClass(Retain) != IC_Retain ||
3933 GetObjCArg(Retain) != Arg)
3939 if (Class == IC_AutoreleaseRV)
3940 Retain->setCalledFunction(getRetainAutoreleaseRVCallee(F.getParent()));
3942 Retain->setCalledFunction(getRetainAutoreleaseCallee(F.getParent()));
3944 EraseInstruction(Autorelease);
3948 /// ContractRelease - Attempt to merge an objc_release with a store, load, and
3949 /// objc_retain to form an objc_storeStrong. This can be a little tricky because
3950 /// the instructions don't always appear in order, and there may be unrelated
3951 /// intervening instructions.
3952 void ObjCARCContract::ContractRelease(Instruction *Release,
3953 inst_iterator &Iter) {
3954 LoadInst *Load = dyn_cast<LoadInst>(GetObjCArg(Release));
3955 if (!Load || !Load->isSimple()) return;
3957 // For now, require everything to be in one basic block.
3958 BasicBlock *BB = Release->getParent();
3959 if (Load->getParent() != BB) return;
3961 // Walk down to find the store.
3962 BasicBlock::iterator I = Load, End = BB->end();
3964 AliasAnalysis::Location Loc = AA->getLocation(Load);
3967 IsRetain(GetBasicInstructionClass(I)) ||
3968 !(AA->getModRefInfo(I, Loc) & AliasAnalysis::Mod)))
3970 StoreInst *Store = dyn_cast<StoreInst>(I);
3971 if (!Store || !Store->isSimple()) return;
3972 if (Store->getPointerOperand() != Loc.Ptr) return;
3974 Value *New = StripPointerCastsAndObjCCalls(Store->getValueOperand());
3976 // Walk up to find the retain.
3978 BasicBlock::iterator Begin = BB->begin();
3979 while (I != Begin && GetBasicInstructionClass(I) != IC_Retain)
3981 Instruction *Retain = I;
3982 if (GetBasicInstructionClass(Retain) != IC_Retain) return;
3983 if (GetObjCArg(Retain) != New) return;
3988 LLVMContext &C = Release->getContext();
3989 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3990 Type *I8XX = PointerType::getUnqual(I8X);
3992 Value *Args[] = { Load->getPointerOperand(), New };
3993 if (Args[0]->getType() != I8XX)
3994 Args[0] = new BitCastInst(Args[0], I8XX, "", Store);
3995 if (Args[1]->getType() != I8X)
3996 Args[1] = new BitCastInst(Args[1], I8X, "", Store);
3997 CallInst *StoreStrong =
3998 CallInst::Create(getStoreStrongCallee(BB->getParent()->getParent()),
4000 StoreStrong->setDoesNotThrow();
4001 StoreStrong->setDebugLoc(Store->getDebugLoc());
4003 // We can't set the tail flag yet, because we haven't yet determined
4004 // whether there are any escaping allocas. Remember this call, so that
4005 // we can set the tail flag once we know it's safe.
4006 StoreStrongCalls.insert(StoreStrong);
4008 if (&*Iter == Store) ++Iter;
4009 Store->eraseFromParent();
4010 Release->eraseFromParent();
4011 EraseInstruction(Retain);
4012 if (Load->use_empty())
4013 Load->eraseFromParent();
4016 bool ObjCARCContract::doInitialization(Module &M) {
4017 // If nothing in the Module uses ARC, don't do anything.
4018 Run = ModuleHasARC(M);
4022 // These are initialized lazily.
4023 StoreStrongCallee = 0;
4024 RetainAutoreleaseCallee = 0;
4025 RetainAutoreleaseRVCallee = 0;
4027 // Initialize RetainRVMarker.
4029 if (NamedMDNode *NMD =
4030 M.getNamedMetadata("clang.arc.retainAutoreleasedReturnValueMarker"))
4031 if (NMD->getNumOperands() == 1) {
4032 const MDNode *N = NMD->getOperand(0);
4033 if (N->getNumOperands() == 1)
4034 if (const MDString *S = dyn_cast<MDString>(N->getOperand(0)))
4041 bool ObjCARCContract::runOnFunction(Function &F) {
4045 // If nothing in the Module uses ARC, don't do anything.
4050 AA = &getAnalysis<AliasAnalysis>();
4051 DT = &getAnalysis<DominatorTree>();
4053 PA.setAA(&getAnalysis<AliasAnalysis>());
4055 // Track whether it's ok to mark objc_storeStrong calls with the "tail"
4056 // keyword. Be conservative if the function has variadic arguments.
4057 // It seems that functions which "return twice" are also unsafe for the
4058 // "tail" argument, because they are setjmp, which could need to
4059 // return to an earlier stack state.
4060 bool TailOkForStoreStrongs = !F.isVarArg() && !F.callsFunctionThatReturnsTwice();
4062 // For ObjC library calls which return their argument, replace uses of the
4063 // argument with uses of the call return value, if it dominates the use. This
4064 // reduces register pressure.
4065 SmallPtrSet<Instruction *, 4> DependingInstructions;
4066 SmallPtrSet<const BasicBlock *, 4> Visited;
4067 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
4068 Instruction *Inst = &*I++;
4070 // Only these library routines return their argument. In particular,
4071 // objc_retainBlock does not necessarily return its argument.
4072 InstructionClass Class = GetBasicInstructionClass(Inst);
4075 case IC_FusedRetainAutorelease:
4076 case IC_FusedRetainAutoreleaseRV:
4078 case IC_Autorelease:
4079 case IC_AutoreleaseRV:
4080 if (ContractAutorelease(F, Inst, Class, DependingInstructions, Visited))
4084 // If we're compiling for a target which needs a special inline-asm
4085 // marker to do the retainAutoreleasedReturnValue optimization,
4087 if (!RetainRVMarker)
4089 BasicBlock::iterator BBI = Inst;
4091 while (isNoopInstruction(BBI)) --BBI;
4092 if (&*BBI == GetObjCArg(Inst)) {
4095 InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
4096 /*isVarArg=*/false),
4097 RetainRVMarker->getString(),
4098 /*Constraints=*/"", /*hasSideEffects=*/true);
4099 CallInst::Create(IA, "", Inst);
4104 // objc_initWeak(p, null) => *p = null
4105 CallInst *CI = cast<CallInst>(Inst);
4106 if (isNullOrUndef(CI->getArgOperand(1))) {
4108 ConstantPointerNull::get(cast<PointerType>(CI->getType()));
4110 new StoreInst(Null, CI->getArgOperand(0), CI);
4111 CI->replaceAllUsesWith(Null);
4112 CI->eraseFromParent();
4117 ContractRelease(Inst, I);
4120 // Be conservative if the function has any alloca instructions.
4121 // Technically we only care about escaping alloca instructions,
4122 // but this is sufficient to handle some interesting cases.
4123 if (isa<AllocaInst>(Inst))
4124 TailOkForStoreStrongs = false;
4130 // Don't use GetObjCArg because we don't want to look through bitcasts
4131 // and such; to do the replacement, the argument must have type i8*.
4132 const Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
4134 // If we're compiling bugpointed code, don't get in trouble.
4135 if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
4137 // Look through the uses of the pointer.
4138 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
4140 Use &U = UI.getUse();
4141 unsigned OperandNo = UI.getOperandNo();
4142 ++UI; // Increment UI now, because we may unlink its element.
4144 // If the call's return value dominates a use of the call's argument
4145 // value, rewrite the use to use the return value. We check for
4146 // reachability here because an unreachable call is considered to
4147 // trivially dominate itself, which would lead us to rewriting its
4148 // argument in terms of its return value, which would lead to
4149 // infinite loops in GetObjCArg.
4150 if (DT->isReachableFromEntry(U) &&
4151 DT->dominates(Inst, U)) {
4153 Instruction *Replacement = Inst;
4154 Type *UseTy = U.get()->getType();
4155 if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) {
4156 // For PHI nodes, insert the bitcast in the predecessor block.
4158 PHINode::getIncomingValueNumForOperand(OperandNo);
4160 PHI->getIncomingBlock(ValNo);
4161 if (Replacement->getType() != UseTy)
4162 Replacement = new BitCastInst(Replacement, UseTy, "",
4164 // While we're here, rewrite all edges for this PHI, rather
4165 // than just one use at a time, to minimize the number of
4166 // bitcasts we emit.
4167 for (unsigned i = 0, e = PHI->getNumIncomingValues();
4169 if (PHI->getIncomingBlock(i) == BB) {
4170 // Keep the UI iterator valid.
4171 if (&PHI->getOperandUse(
4172 PHINode::getOperandNumForIncomingValue(i)) ==
4175 PHI->setIncomingValue(i, Replacement);
4178 if (Replacement->getType() != UseTy)
4179 Replacement = new BitCastInst(Replacement, UseTy, "",
4180 cast<Instruction>(U.getUser()));
4186 // If Arg is a no-op casted pointer, strip one level of casts and
4188 if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
4189 Arg = BI->getOperand(0);
4190 else if (isa<GEPOperator>(Arg) &&
4191 cast<GEPOperator>(Arg)->hasAllZeroIndices())
4192 Arg = cast<GEPOperator>(Arg)->getPointerOperand();
4193 else if (isa<GlobalAlias>(Arg) &&
4194 !cast<GlobalAlias>(Arg)->mayBeOverridden())
4195 Arg = cast<GlobalAlias>(Arg)->getAliasee();
4201 // If this function has no escaping allocas or suspicious vararg usage,
4202 // objc_storeStrong calls can be marked with the "tail" keyword.
4203 if (TailOkForStoreStrongs)
4204 for (DenseSet<CallInst *>::iterator I = StoreStrongCalls.begin(),
4205 E = StoreStrongCalls.end(); I != E; ++I)
4206 (*I)->setTailCall();
4207 StoreStrongCalls.clear();