1 //===- ObjCARC.cpp - ObjC ARC Optimization --------------------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines ObjC ARC optimizations. ARC stands for
11 // Automatic Reference Counting and is a system for managing reference counts
12 // for objects in Objective C.
14 // The optimizations performed include elimination of redundant, partially
15 // redundant, and inconsequential reference count operations, elimination of
16 // redundant weak pointer operations, pattern-matching and replacement of
17 // low-level operations into higher-level operations, and numerous minor
20 // This file also defines a simple ARC-aware AliasAnalysis.
22 // WARNING: This file knows about certain library functions. It recognizes them
23 // by name, and hardwires knowedge of their semantics.
25 // WARNING: This file knows about how certain Objective-C library functions are
26 // used. Naive LLVM IR transformations which would otherwise be
27 // behavior-preserving may break these assumptions.
29 //===----------------------------------------------------------------------===//
31 #define DEBUG_TYPE "objc-arc"
32 #include "llvm/Function.h"
33 #include "llvm/Intrinsics.h"
34 #include "llvm/GlobalVariable.h"
35 #include "llvm/DerivedTypes.h"
36 #include "llvm/Module.h"
37 #include "llvm/Analysis/ValueTracking.h"
38 #include "llvm/Transforms/Utils/Local.h"
39 #include "llvm/Support/CallSite.h"
40 #include "llvm/Support/CommandLine.h"
41 #include "llvm/ADT/StringSwitch.h"
42 #include "llvm/ADT/DenseMap.h"
43 #include "llvm/ADT/STLExtras.h"
46 // A handy option to enable/disable all optimizations in this file.
47 static cl::opt<bool> EnableARCOpts("enable-objc-arc-opts", cl::init(true));
49 //===----------------------------------------------------------------------===//
51 //===----------------------------------------------------------------------===//
54 /// MapVector - An associative container with fast insertion-order
55 /// (deterministic) iteration over its elements. Plus the special
57 template<class KeyT, class ValueT>
59 /// Map - Map keys to indices in Vector.
60 typedef DenseMap<KeyT, size_t> MapTy;
63 /// Vector - Keys and values.
64 typedef std::vector<std::pair<KeyT, ValueT> > VectorTy;
68 typedef typename VectorTy::iterator iterator;
69 typedef typename VectorTy::const_iterator const_iterator;
70 iterator begin() { return Vector.begin(); }
71 iterator end() { return Vector.end(); }
72 const_iterator begin() const { return Vector.begin(); }
73 const_iterator end() const { return Vector.end(); }
77 assert(Vector.size() >= Map.size()); // May differ due to blotting.
78 for (typename MapTy::const_iterator I = Map.begin(), E = Map.end();
80 assert(I->second < Vector.size());
81 assert(Vector[I->second].first == I->first);
83 for (typename VectorTy::const_iterator I = Vector.begin(),
84 E = Vector.end(); I != E; ++I)
86 (Map.count(I->first) &&
87 Map[I->first] == size_t(I - Vector.begin())));
91 ValueT &operator[](KeyT Arg) {
92 std::pair<typename MapTy::iterator, bool> Pair =
93 Map.insert(std::make_pair(Arg, size_t(0)));
95 Pair.first->second = Vector.size();
96 Vector.push_back(std::make_pair(Arg, ValueT()));
97 return Vector.back().second;
99 return Vector[Pair.first->second].second;
102 std::pair<iterator, bool>
103 insert(const std::pair<KeyT, ValueT> &InsertPair) {
104 std::pair<typename MapTy::iterator, bool> Pair =
105 Map.insert(std::make_pair(InsertPair.first, size_t(0)));
107 Pair.first->second = Vector.size();
108 Vector.push_back(InsertPair);
109 return std::make_pair(llvm::prior(Vector.end()), true);
111 return std::make_pair(Vector.begin() + Pair.first->second, false);
114 const_iterator find(KeyT Key) const {
115 typename MapTy::const_iterator It = Map.find(Key);
116 if (It == Map.end()) return Vector.end();
117 return Vector.begin() + It->second;
120 /// blot - This is similar to erase, but instead of removing the element
121 /// from the vector, it just zeros out the key in the vector. This leaves
122 /// iterators intact, but clients must be prepared for zeroed-out keys when
124 void blot(KeyT Key) {
125 typename MapTy::iterator It = Map.find(Key);
126 if (It == Map.end()) return;
127 Vector[It->second].first = KeyT();
138 //===----------------------------------------------------------------------===//
140 //===----------------------------------------------------------------------===//
143 /// InstructionClass - A simple classification for instructions.
144 enum InstructionClass {
145 IC_Retain, ///< objc_retain
146 IC_RetainRV, ///< objc_retainAutoreleasedReturnValue
147 IC_RetainBlock, ///< objc_retainBlock
148 IC_Release, ///< objc_release
149 IC_Autorelease, ///< objc_autorelease
150 IC_AutoreleaseRV, ///< objc_autoreleaseReturnValue
151 IC_AutoreleasepoolPush, ///< objc_autoreleasePoolPush
152 IC_AutoreleasepoolPop, ///< objc_autoreleasePoolPop
153 IC_NoopCast, ///< objc_retainedObject, etc.
154 IC_FusedRetainAutorelease, ///< objc_retainAutorelease
155 IC_FusedRetainAutoreleaseRV, ///< objc_retainAutoreleaseReturnValue
156 IC_LoadWeakRetained, ///< objc_loadWeakRetained (primitive)
157 IC_StoreWeak, ///< objc_storeWeak (primitive)
158 IC_InitWeak, ///< objc_initWeak (derived)
159 IC_LoadWeak, ///< objc_loadWeak (derived)
160 IC_MoveWeak, ///< objc_moveWeak (derived)
161 IC_CopyWeak, ///< objc_copyWeak (derived)
162 IC_DestroyWeak, ///< objc_destroyWeak (derived)
163 IC_CallOrUser, ///< could call objc_release and/or "use" pointers
164 IC_Call, ///< could call objc_release
165 IC_User, ///< could "use" a pointer
166 IC_None ///< anything else
170 /// IsPotentialUse - Test whether the given value is possible a
171 /// reference-counted pointer.
172 static bool IsPotentialUse(const Value *Op) {
173 // Pointers to static or stack storage are not reference-counted pointers.
174 if (isa<Constant>(Op) || isa<AllocaInst>(Op))
176 // Special arguments are not reference-counted.
177 if (const Argument *Arg = dyn_cast<Argument>(Op))
178 if (Arg->hasByValAttr() ||
179 Arg->hasNestAttr() ||
180 Arg->hasStructRetAttr())
182 // Only consider values with pointer types, and not function pointers.
183 PointerType *Ty = dyn_cast<PointerType>(Op->getType());
184 if (!Ty || isa<FunctionType>(Ty->getElementType()))
186 // Conservatively assume anything else is a potential use.
190 /// GetCallSiteClass - Helper for GetInstructionClass. Determines what kind
191 /// of construct CS is.
192 static InstructionClass GetCallSiteClass(ImmutableCallSite CS) {
193 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
195 if (IsPotentialUse(*I))
196 return CS.onlyReadsMemory() ? IC_User : IC_CallOrUser;
198 return CS.onlyReadsMemory() ? IC_None : IC_Call;
201 /// GetFunctionClass - Determine if F is one of the special known Functions.
202 /// If it isn't, return IC_CallOrUser.
203 static InstructionClass GetFunctionClass(const Function *F) {
204 Function::const_arg_iterator AI = F->arg_begin(), AE = F->arg_end();
208 return StringSwitch<InstructionClass>(F->getName())
209 .Case("objc_autoreleasePoolPush", IC_AutoreleasepoolPush)
210 .Default(IC_CallOrUser);
213 const Argument *A0 = AI++;
215 // Argument is a pointer.
216 if (PointerType *PTy = dyn_cast<PointerType>(A0->getType())) {
217 Type *ETy = PTy->getElementType();
219 if (ETy->isIntegerTy(8))
220 return StringSwitch<InstructionClass>(F->getName())
221 .Case("objc_retain", IC_Retain)
222 .Case("objc_retainAutoreleasedReturnValue", IC_RetainRV)
223 .Case("objc_retainBlock", IC_RetainBlock)
224 .Case("objc_release", IC_Release)
225 .Case("objc_autorelease", IC_Autorelease)
226 .Case("objc_autoreleaseReturnValue", IC_AutoreleaseRV)
227 .Case("objc_autoreleasePoolPop", IC_AutoreleasepoolPop)
228 .Case("objc_retainedObject", IC_NoopCast)
229 .Case("objc_unretainedObject", IC_NoopCast)
230 .Case("objc_unretainedPointer", IC_NoopCast)
231 .Case("objc_retain_autorelease", IC_FusedRetainAutorelease)
232 .Case("objc_retainAutorelease", IC_FusedRetainAutorelease)
233 .Case("objc_retainAutoreleaseReturnValue",IC_FusedRetainAutoreleaseRV)
234 .Default(IC_CallOrUser);
237 if (PointerType *Pte = dyn_cast<PointerType>(ETy))
238 if (Pte->getElementType()->isIntegerTy(8))
239 return StringSwitch<InstructionClass>(F->getName())
240 .Case("objc_loadWeakRetained", IC_LoadWeakRetained)
241 .Case("objc_loadWeak", IC_LoadWeak)
242 .Case("objc_destroyWeak", IC_DestroyWeak)
243 .Default(IC_CallOrUser);
246 // Two arguments, first is i8**.
247 const Argument *A1 = AI++;
249 if (PointerType *PTy = dyn_cast<PointerType>(A0->getType()))
250 if (PointerType *Pte = dyn_cast<PointerType>(PTy->getElementType()))
251 if (Pte->getElementType()->isIntegerTy(8))
252 if (PointerType *PTy1 = dyn_cast<PointerType>(A1->getType())) {
253 Type *ETy1 = PTy1->getElementType();
254 // Second argument is i8*
255 if (ETy1->isIntegerTy(8))
256 return StringSwitch<InstructionClass>(F->getName())
257 .Case("objc_storeWeak", IC_StoreWeak)
258 .Case("objc_initWeak", IC_InitWeak)
259 .Default(IC_CallOrUser);
260 // Second argument is i8**.
261 if (PointerType *Pte1 = dyn_cast<PointerType>(ETy1))
262 if (Pte1->getElementType()->isIntegerTy(8))
263 return StringSwitch<InstructionClass>(F->getName())
264 .Case("objc_moveWeak", IC_MoveWeak)
265 .Case("objc_copyWeak", IC_CopyWeak)
266 .Default(IC_CallOrUser);
270 return IC_CallOrUser;
273 /// GetInstructionClass - Determine what kind of construct V is.
274 static InstructionClass GetInstructionClass(const Value *V) {
275 if (const Instruction *I = dyn_cast<Instruction>(V)) {
276 // Any instruction other than bitcast and gep with a pointer operand have a
277 // use of an objc pointer. Bitcasts, GEPs, Selects, PHIs transfer a pointer
278 // to a subsequent use, rather than using it themselves, in this sense.
279 // As a short cut, several other opcodes are known to have no pointer
280 // operands of interest. And ret is never followed by a release, so it's
281 // not interesting to examine.
282 switch (I->getOpcode()) {
283 case Instruction::Call: {
284 const CallInst *CI = cast<CallInst>(I);
285 // Check for calls to special functions.
286 if (const Function *F = CI->getCalledFunction()) {
287 InstructionClass Class = GetFunctionClass(F);
288 if (Class != IC_CallOrUser)
291 // None of the intrinsic functions do objc_release. For intrinsics, the
292 // only question is whether or not they may be users.
293 switch (F->getIntrinsicID()) {
295 case Intrinsic::bswap: case Intrinsic::ctpop:
296 case Intrinsic::ctlz: case Intrinsic::cttz:
297 case Intrinsic::returnaddress: case Intrinsic::frameaddress:
298 case Intrinsic::stacksave: case Intrinsic::stackrestore:
299 case Intrinsic::vastart: case Intrinsic::vacopy: case Intrinsic::vaend:
300 // Don't let dbg info affect our results.
301 case Intrinsic::dbg_declare: case Intrinsic::dbg_value:
302 // Short cut: Some intrinsics obviously don't use ObjC pointers.
305 for (Function::const_arg_iterator AI = F->arg_begin(),
306 AE = F->arg_end(); AI != AE; ++AI)
307 if (IsPotentialUse(AI))
312 return GetCallSiteClass(CI);
314 case Instruction::Invoke:
315 return GetCallSiteClass(cast<InvokeInst>(I));
316 case Instruction::BitCast:
317 case Instruction::GetElementPtr:
318 case Instruction::Select: case Instruction::PHI:
319 case Instruction::Ret: case Instruction::Br:
320 case Instruction::Switch: case Instruction::IndirectBr:
321 case Instruction::Alloca: case Instruction::VAArg:
322 case Instruction::Add: case Instruction::FAdd:
323 case Instruction::Sub: case Instruction::FSub:
324 case Instruction::Mul: case Instruction::FMul:
325 case Instruction::SDiv: case Instruction::UDiv: case Instruction::FDiv:
326 case Instruction::SRem: case Instruction::URem: case Instruction::FRem:
327 case Instruction::Shl: case Instruction::LShr: case Instruction::AShr:
328 case Instruction::And: case Instruction::Or: case Instruction::Xor:
329 case Instruction::SExt: case Instruction::ZExt: case Instruction::Trunc:
330 case Instruction::IntToPtr: case Instruction::FCmp:
331 case Instruction::FPTrunc: case Instruction::FPExt:
332 case Instruction::FPToUI: case Instruction::FPToSI:
333 case Instruction::UIToFP: case Instruction::SIToFP:
334 case Instruction::InsertElement: case Instruction::ExtractElement:
335 case Instruction::ShuffleVector:
336 case Instruction::ExtractValue:
338 case Instruction::ICmp:
339 // Comparing a pointer with null, or any other constant, isn't an
340 // interesting use, because we don't care what the pointer points to, or
341 // about the values of any other dynamic reference-counted pointers.
342 if (IsPotentialUse(I->getOperand(1)))
346 // For anything else, check all the operands.
347 // Note that this includes both operands of a Store: while the first
348 // operand isn't actually being dereferenced, it is being stored to
349 // memory where we can no longer track who might read it and dereference
350 // it, so we have to consider it potentially used.
351 for (User::const_op_iterator OI = I->op_begin(), OE = I->op_end();
353 if (IsPotentialUse(*OI))
358 // Otherwise, it's totally inert for ARC purposes.
362 /// GetBasicInstructionClass - Determine what kind of construct V is. This is
363 /// similar to GetInstructionClass except that it only detects objc runtine
364 /// calls. This allows it to be faster.
365 static InstructionClass GetBasicInstructionClass(const Value *V) {
366 if (const CallInst *CI = dyn_cast<CallInst>(V)) {
367 if (const Function *F = CI->getCalledFunction())
368 return GetFunctionClass(F);
369 // Otherwise, be conservative.
370 return IC_CallOrUser;
373 // Otherwise, be conservative.
377 /// IsRetain - Test if the the given class is objc_retain or
379 static bool IsRetain(InstructionClass Class) {
380 return Class == IC_Retain ||
381 Class == IC_RetainRV;
384 /// IsAutorelease - Test if the the given class is objc_autorelease or
386 static bool IsAutorelease(InstructionClass Class) {
387 return Class == IC_Autorelease ||
388 Class == IC_AutoreleaseRV;
391 /// IsForwarding - Test if the given class represents instructions which return
392 /// their argument verbatim.
393 static bool IsForwarding(InstructionClass Class) {
394 // objc_retainBlock technically doesn't always return its argument
395 // verbatim, but it doesn't matter for our purposes here.
396 return Class == IC_Retain ||
397 Class == IC_RetainRV ||
398 Class == IC_Autorelease ||
399 Class == IC_AutoreleaseRV ||
400 Class == IC_RetainBlock ||
401 Class == IC_NoopCast;
404 /// IsNoopOnNull - Test if the given class represents instructions which do
405 /// nothing if passed a null pointer.
406 static bool IsNoopOnNull(InstructionClass Class) {
407 return Class == IC_Retain ||
408 Class == IC_RetainRV ||
409 Class == IC_Release ||
410 Class == IC_Autorelease ||
411 Class == IC_AutoreleaseRV ||
412 Class == IC_RetainBlock;
415 /// IsAlwaysTail - Test if the given class represents instructions which are
416 /// always safe to mark with the "tail" keyword.
417 static bool IsAlwaysTail(InstructionClass Class) {
418 // IC_RetainBlock may be given a stack argument.
419 return Class == IC_Retain ||
420 Class == IC_RetainRV ||
421 Class == IC_Autorelease ||
422 Class == IC_AutoreleaseRV;
425 /// IsNoThrow - Test if the given class represents instructions which are always
426 /// safe to mark with the nounwind attribute..
427 static bool IsNoThrow(InstructionClass Class) {
428 return Class == IC_Retain ||
429 Class == IC_RetainRV ||
430 Class == IC_RetainBlock ||
431 Class == IC_Release ||
432 Class == IC_Autorelease ||
433 Class == IC_AutoreleaseRV ||
434 Class == IC_AutoreleasepoolPush ||
435 Class == IC_AutoreleasepoolPop;
438 /// EraseInstruction - Erase the given instruction. ObjC calls return their
439 /// argument verbatim, so if it's such a call and the return value has users,
440 /// replace them with the argument value.
441 static void EraseInstruction(Instruction *CI) {
442 Value *OldArg = cast<CallInst>(CI)->getArgOperand(0);
444 bool Unused = CI->use_empty();
447 // Replace the return value with the argument.
448 assert(IsForwarding(GetBasicInstructionClass(CI)) &&
449 "Can't delete non-forwarding instruction with users!");
450 CI->replaceAllUsesWith(OldArg);
453 CI->eraseFromParent();
456 RecursivelyDeleteTriviallyDeadInstructions(OldArg);
459 /// GetUnderlyingObjCPtr - This is a wrapper around getUnderlyingObject which
460 /// also knows how to look through objc_retain and objc_autorelease calls, which
461 /// we know to return their argument verbatim.
462 static const Value *GetUnderlyingObjCPtr(const Value *V) {
464 V = GetUnderlyingObject(V);
465 if (!IsForwarding(GetBasicInstructionClass(V)))
467 V = cast<CallInst>(V)->getArgOperand(0);
473 /// StripPointerCastsAndObjCCalls - This is a wrapper around
474 /// Value::stripPointerCasts which also knows how to look through objc_retain
475 /// and objc_autorelease calls, which we know to return their argument verbatim.
476 static const Value *StripPointerCastsAndObjCCalls(const Value *V) {
478 V = V->stripPointerCasts();
479 if (!IsForwarding(GetBasicInstructionClass(V)))
481 V = cast<CallInst>(V)->getArgOperand(0);
486 /// StripPointerCastsAndObjCCalls - This is a wrapper around
487 /// Value::stripPointerCasts which also knows how to look through objc_retain
488 /// and objc_autorelease calls, which we know to return their argument verbatim.
489 static Value *StripPointerCastsAndObjCCalls(Value *V) {
491 V = V->stripPointerCasts();
492 if (!IsForwarding(GetBasicInstructionClass(V)))
494 V = cast<CallInst>(V)->getArgOperand(0);
499 /// GetObjCArg - Assuming the given instruction is one of the special calls such
500 /// as objc_retain or objc_release, return the argument value, stripped of no-op
501 /// casts and forwarding calls.
502 static Value *GetObjCArg(Value *Inst) {
503 return StripPointerCastsAndObjCCalls(cast<CallInst>(Inst)->getArgOperand(0));
506 /// IsObjCIdentifiedObject - This is similar to AliasAnalysis'
507 /// isObjCIdentifiedObject, except that it uses special knowledge of
508 /// ObjC conventions...
509 static bool IsObjCIdentifiedObject(const Value *V) {
510 // Assume that call results and arguments have their own "provenance".
511 // Constants (including GlobalVariables) and Allocas are never
512 // reference-counted.
513 if (isa<CallInst>(V) || isa<InvokeInst>(V) ||
514 isa<Argument>(V) || isa<Constant>(V) ||
518 if (const LoadInst *LI = dyn_cast<LoadInst>(V)) {
519 const Value *Pointer =
520 StripPointerCastsAndObjCCalls(LI->getPointerOperand());
521 if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(Pointer)) {
522 // A constant pointer can't be pointing to an object on the heap. It may
523 // be reference-counted, but it won't be deleted.
524 if (GV->isConstant())
526 StringRef Name = GV->getName();
527 // These special variables are known to hold values which are not
528 // reference-counted pointers.
529 if (Name.startswith("\01L_OBJC_SELECTOR_REFERENCES_") ||
530 Name.startswith("\01L_OBJC_CLASSLIST_REFERENCES_") ||
531 Name.startswith("\01L_OBJC_CLASSLIST_SUP_REFS_$_") ||
532 Name.startswith("\01L_OBJC_METH_VAR_NAME_") ||
533 Name.startswith("\01l_objc_msgSend_fixup_"))
541 /// FindSingleUseIdentifiedObject - This is similar to
542 /// StripPointerCastsAndObjCCalls but it stops as soon as it finds a value
543 /// with multiple uses.
544 static const Value *FindSingleUseIdentifiedObject(const Value *Arg) {
545 if (Arg->hasOneUse()) {
546 if (const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
547 return FindSingleUseIdentifiedObject(BC->getOperand(0));
548 if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Arg))
549 if (GEP->hasAllZeroIndices())
550 return FindSingleUseIdentifiedObject(GEP->getPointerOperand());
551 if (IsForwarding(GetBasicInstructionClass(Arg)))
552 return FindSingleUseIdentifiedObject(
553 cast<CallInst>(Arg)->getArgOperand(0));
554 if (!IsObjCIdentifiedObject(Arg))
559 // If we found an identifiable object but it has multiple uses, but they
560 // are trivial uses, we can still consider this to be a single-use
562 if (IsObjCIdentifiedObject(Arg)) {
563 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
566 if (!U->use_empty() || StripPointerCastsAndObjCCalls(U) != Arg)
576 /// ModuleHasARC - Test if the given module looks interesting to run ARC
578 static bool ModuleHasARC(const Module &M) {
580 M.getNamedValue("objc_retain") ||
581 M.getNamedValue("objc_release") ||
582 M.getNamedValue("objc_autorelease") ||
583 M.getNamedValue("objc_retainAutoreleasedReturnValue") ||
584 M.getNamedValue("objc_retainBlock") ||
585 M.getNamedValue("objc_autoreleaseReturnValue") ||
586 M.getNamedValue("objc_autoreleasePoolPush") ||
587 M.getNamedValue("objc_loadWeakRetained") ||
588 M.getNamedValue("objc_loadWeak") ||
589 M.getNamedValue("objc_destroyWeak") ||
590 M.getNamedValue("objc_storeWeak") ||
591 M.getNamedValue("objc_initWeak") ||
592 M.getNamedValue("objc_moveWeak") ||
593 M.getNamedValue("objc_copyWeak") ||
594 M.getNamedValue("objc_retainedObject") ||
595 M.getNamedValue("objc_unretainedObject") ||
596 M.getNamedValue("objc_unretainedPointer");
599 //===----------------------------------------------------------------------===//
600 // ARC AliasAnalysis.
601 //===----------------------------------------------------------------------===//
603 #include "llvm/Pass.h"
604 #include "llvm/Analysis/AliasAnalysis.h"
605 #include "llvm/Analysis/Passes.h"
608 /// ObjCARCAliasAnalysis - This is a simple alias analysis
609 /// implementation that uses knowledge of ARC constructs to answer queries.
611 /// TODO: This class could be generalized to know about other ObjC-specific
612 /// tricks. Such as knowing that ivars in the non-fragile ABI are non-aliasing
613 /// even though their offsets are dynamic.
614 class ObjCARCAliasAnalysis : public ImmutablePass,
615 public AliasAnalysis {
617 static char ID; // Class identification, replacement for typeinfo
618 ObjCARCAliasAnalysis() : ImmutablePass(ID) {
619 initializeObjCARCAliasAnalysisPass(*PassRegistry::getPassRegistry());
623 virtual void initializePass() {
624 InitializeAliasAnalysis(this);
627 /// getAdjustedAnalysisPointer - This method is used when a pass implements
628 /// an analysis interface through multiple inheritance. If needed, it
629 /// should override this to adjust the this pointer as needed for the
630 /// specified pass info.
631 virtual void *getAdjustedAnalysisPointer(const void *PI) {
632 if (PI == &AliasAnalysis::ID)
633 return (AliasAnalysis*)this;
637 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
638 virtual AliasResult alias(const Location &LocA, const Location &LocB);
639 virtual bool pointsToConstantMemory(const Location &Loc, bool OrLocal);
640 virtual ModRefBehavior getModRefBehavior(ImmutableCallSite CS);
641 virtual ModRefBehavior getModRefBehavior(const Function *F);
642 virtual ModRefResult getModRefInfo(ImmutableCallSite CS,
643 const Location &Loc);
644 virtual ModRefResult getModRefInfo(ImmutableCallSite CS1,
645 ImmutableCallSite CS2);
647 } // End of anonymous namespace
649 // Register this pass...
650 char ObjCARCAliasAnalysis::ID = 0;
651 INITIALIZE_AG_PASS(ObjCARCAliasAnalysis, AliasAnalysis, "objc-arc-aa",
652 "ObjC-ARC-Based Alias Analysis", false, true, false)
654 ImmutablePass *llvm::createObjCARCAliasAnalysisPass() {
655 return new ObjCARCAliasAnalysis();
659 ObjCARCAliasAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
660 AU.setPreservesAll();
661 AliasAnalysis::getAnalysisUsage(AU);
664 AliasAnalysis::AliasResult
665 ObjCARCAliasAnalysis::alias(const Location &LocA, const Location &LocB) {
667 return AliasAnalysis::alias(LocA, LocB);
669 // First, strip off no-ops, including ObjC-specific no-ops, and try making a
670 // precise alias query.
671 const Value *SA = StripPointerCastsAndObjCCalls(LocA.Ptr);
672 const Value *SB = StripPointerCastsAndObjCCalls(LocB.Ptr);
674 AliasAnalysis::alias(Location(SA, LocA.Size, LocA.TBAATag),
675 Location(SB, LocB.Size, LocB.TBAATag));
676 if (Result != MayAlias)
679 // If that failed, climb to the underlying object, including climbing through
680 // ObjC-specific no-ops, and try making an imprecise alias query.
681 const Value *UA = GetUnderlyingObjCPtr(SA);
682 const Value *UB = GetUnderlyingObjCPtr(SB);
683 if (UA != SA || UB != SB) {
684 Result = AliasAnalysis::alias(Location(UA), Location(UB));
685 // We can't use MustAlias or PartialAlias results here because
686 // GetUnderlyingObjCPtr may return an offsetted pointer value.
687 if (Result == NoAlias)
691 // If that failed, fail. We don't need to chain here, since that's covered
692 // by the earlier precise query.
697 ObjCARCAliasAnalysis::pointsToConstantMemory(const Location &Loc,
700 return AliasAnalysis::pointsToConstantMemory(Loc, OrLocal);
702 // First, strip off no-ops, including ObjC-specific no-ops, and try making
703 // a precise alias query.
704 const Value *S = StripPointerCastsAndObjCCalls(Loc.Ptr);
705 if (AliasAnalysis::pointsToConstantMemory(Location(S, Loc.Size, Loc.TBAATag),
709 // If that failed, climb to the underlying object, including climbing through
710 // ObjC-specific no-ops, and try making an imprecise alias query.
711 const Value *U = GetUnderlyingObjCPtr(S);
713 return AliasAnalysis::pointsToConstantMemory(Location(U), OrLocal);
715 // If that failed, fail. We don't need to chain here, since that's covered
716 // by the earlier precise query.
720 AliasAnalysis::ModRefBehavior
721 ObjCARCAliasAnalysis::getModRefBehavior(ImmutableCallSite CS) {
722 // We have nothing to do. Just chain to the next AliasAnalysis.
723 return AliasAnalysis::getModRefBehavior(CS);
726 AliasAnalysis::ModRefBehavior
727 ObjCARCAliasAnalysis::getModRefBehavior(const Function *F) {
729 return AliasAnalysis::getModRefBehavior(F);
731 switch (GetFunctionClass(F)) {
733 return DoesNotAccessMemory;
738 return AliasAnalysis::getModRefBehavior(F);
741 AliasAnalysis::ModRefResult
742 ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS, const Location &Loc) {
744 return AliasAnalysis::getModRefInfo(CS, Loc);
746 switch (GetBasicInstructionClass(CS.getInstruction())) {
751 case IC_AutoreleaseRV:
753 case IC_AutoreleasepoolPush:
754 case IC_FusedRetainAutorelease:
755 case IC_FusedRetainAutoreleaseRV:
756 // These functions don't access any memory visible to the compiler.
762 return AliasAnalysis::getModRefInfo(CS, Loc);
765 AliasAnalysis::ModRefResult
766 ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS1,
767 ImmutableCallSite CS2) {
768 // TODO: Theoretically we could check for dependencies between objc_* calls
769 // and OnlyAccessesArgumentPointees calls or other well-behaved calls.
770 return AliasAnalysis::getModRefInfo(CS1, CS2);
773 //===----------------------------------------------------------------------===//
775 //===----------------------------------------------------------------------===//
777 #include "llvm/Support/InstIterator.h"
778 #include "llvm/Transforms/Scalar.h"
781 /// ObjCARCExpand - Early ARC transformations.
782 class ObjCARCExpand : public FunctionPass {
783 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
784 virtual bool doInitialization(Module &M);
785 virtual bool runOnFunction(Function &F);
787 /// Run - A flag indicating whether this optimization pass should run.
792 ObjCARCExpand() : FunctionPass(ID) {
793 initializeObjCARCExpandPass(*PassRegistry::getPassRegistry());
798 char ObjCARCExpand::ID = 0;
799 INITIALIZE_PASS(ObjCARCExpand,
800 "objc-arc-expand", "ObjC ARC expansion", false, false)
802 Pass *llvm::createObjCARCExpandPass() {
803 return new ObjCARCExpand();
806 void ObjCARCExpand::getAnalysisUsage(AnalysisUsage &AU) const {
807 AU.setPreservesCFG();
810 bool ObjCARCExpand::doInitialization(Module &M) {
811 Run = ModuleHasARC(M);
815 bool ObjCARCExpand::runOnFunction(Function &F) {
819 // If nothing in the Module uses ARC, don't do anything.
823 bool Changed = false;
825 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ++I) {
826 Instruction *Inst = &*I;
828 switch (GetBasicInstructionClass(Inst)) {
832 case IC_AutoreleaseRV:
833 case IC_FusedRetainAutorelease:
834 case IC_FusedRetainAutoreleaseRV:
835 // These calls return their argument verbatim, as a low-level
836 // optimization. However, this makes high-level optimizations
837 // harder. Undo any uses of this optimization that the front-end
838 // emitted here. We'll redo them in a later pass.
840 Inst->replaceAllUsesWith(cast<CallInst>(Inst)->getArgOperand(0));
850 //===----------------------------------------------------------------------===//
852 //===----------------------------------------------------------------------===//
854 // TODO: On code like this:
857 // stuff_that_cannot_release()
858 // objc_autorelease(%x)
859 // stuff_that_cannot_release()
861 // stuff_that_cannot_release()
862 // objc_autorelease(%x)
864 // The second retain and autorelease can be deleted.
866 // TODO: It should be possible to delete
867 // objc_autoreleasePoolPush and objc_autoreleasePoolPop
868 // pairs if nothing is actually autoreleased between them. Also, autorelease
869 // calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code
870 // after inlining) can be turned into plain release calls.
872 // TODO: Critical-edge splitting. If the optimial insertion point is
873 // a critical edge, the current algorithm has to fail, because it doesn't
874 // know how to split edges. It should be possible to make the optimizer
875 // think in terms of edges, rather than blocks, and then split critical
878 // TODO: OptimizeSequences could generalized to be Interprocedural.
880 // TODO: Recognize that a bunch of other objc runtime calls have
881 // non-escaping arguments and non-releasing arguments, and may be
882 // non-autoreleasing.
884 // TODO: Sink autorelease calls as far as possible. Unfortunately we
885 // usually can't sink them past other calls, which would be the main
886 // case where it would be useful.
888 // TODO: The pointer returned from objc_loadWeakRetained is retained.
890 // TODO: Delete release+retain pairs (rare).
892 #include "llvm/GlobalAlias.h"
893 #include "llvm/Constants.h"
894 #include "llvm/LLVMContext.h"
895 #include "llvm/Support/ErrorHandling.h"
896 #include "llvm/Support/CFG.h"
897 #include "llvm/ADT/PostOrderIterator.h"
898 #include "llvm/ADT/Statistic.h"
900 STATISTIC(NumNoops, "Number of no-op objc calls eliminated");
901 STATISTIC(NumPartialNoops, "Number of partially no-op objc calls eliminated");
902 STATISTIC(NumAutoreleases,"Number of autoreleases converted to releases");
903 STATISTIC(NumRets, "Number of return value forwarding "
904 "retain+autoreleaes eliminated");
905 STATISTIC(NumRRs, "Number of retain+release paths eliminated");
906 STATISTIC(NumPeeps, "Number of calls peephole-optimized");
909 /// ProvenanceAnalysis - This is similar to BasicAliasAnalysis, and it
910 /// uses many of the same techniques, except it uses special ObjC-specific
911 /// reasoning about pointer relationships.
912 class ProvenanceAnalysis {
915 typedef std::pair<const Value *, const Value *> ValuePairTy;
916 typedef DenseMap<ValuePairTy, bool> CachedResultsTy;
917 CachedResultsTy CachedResults;
919 bool relatedCheck(const Value *A, const Value *B);
920 bool relatedSelect(const SelectInst *A, const Value *B);
921 bool relatedPHI(const PHINode *A, const Value *B);
924 void operator=(const ProvenanceAnalysis &);
925 ProvenanceAnalysis(const ProvenanceAnalysis &);
928 ProvenanceAnalysis() {}
930 void setAA(AliasAnalysis *aa) { AA = aa; }
932 AliasAnalysis *getAA() const { return AA; }
934 bool related(const Value *A, const Value *B);
937 CachedResults.clear();
942 bool ProvenanceAnalysis::relatedSelect(const SelectInst *A, const Value *B) {
943 // If the values are Selects with the same condition, we can do a more precise
944 // check: just check for relations between the values on corresponding arms.
945 if (const SelectInst *SB = dyn_cast<SelectInst>(B))
946 if (A->getCondition() == SB->getCondition()) {
947 if (related(A->getTrueValue(), SB->getTrueValue()))
949 if (related(A->getFalseValue(), SB->getFalseValue()))
954 // Check both arms of the Select node individually.
955 if (related(A->getTrueValue(), B))
957 if (related(A->getFalseValue(), B))
960 // The arms both checked out.
964 bool ProvenanceAnalysis::relatedPHI(const PHINode *A, const Value *B) {
965 // If the values are PHIs in the same block, we can do a more precise as well
966 // as efficient check: just check for relations between the values on
967 // corresponding edges.
968 if (const PHINode *PNB = dyn_cast<PHINode>(B))
969 if (PNB->getParent() == A->getParent()) {
970 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i)
971 if (related(A->getIncomingValue(i),
972 PNB->getIncomingValueForBlock(A->getIncomingBlock(i))))
977 // Check each unique source of the PHI node against B.
978 SmallPtrSet<const Value *, 4> UniqueSrc;
979 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i) {
980 const Value *PV1 = A->getIncomingValue(i);
981 if (UniqueSrc.insert(PV1) && related(PV1, B))
985 // All of the arms checked out.
989 /// isStoredObjCPointer - Test if the value of P, or any value covered by its
990 /// provenance, is ever stored within the function (not counting callees).
991 static bool isStoredObjCPointer(const Value *P) {
992 SmallPtrSet<const Value *, 8> Visited;
993 SmallVector<const Value *, 8> Worklist;
994 Worklist.push_back(P);
997 P = Worklist.pop_back_val();
998 for (Value::const_use_iterator UI = P->use_begin(), UE = P->use_end();
1000 const User *Ur = *UI;
1001 if (isa<StoreInst>(Ur)) {
1002 if (UI.getOperandNo() == 0)
1003 // The pointer is stored.
1005 // The pointed is stored through.
1008 if (isa<CallInst>(Ur))
1009 // The pointer is passed as an argument, ignore this.
1011 if (isa<PtrToIntInst>(P))
1012 // Assume the worst.
1014 if (Visited.insert(Ur))
1015 Worklist.push_back(Ur);
1017 } while (!Worklist.empty());
1019 // Everything checked out.
1023 bool ProvenanceAnalysis::relatedCheck(const Value *A, const Value *B) {
1024 // Skip past provenance pass-throughs.
1025 A = GetUnderlyingObjCPtr(A);
1026 B = GetUnderlyingObjCPtr(B);
1032 // Ask regular AliasAnalysis, for a first approximation.
1033 switch (AA->alias(A, B)) {
1034 case AliasAnalysis::NoAlias:
1036 case AliasAnalysis::MustAlias:
1037 case AliasAnalysis::PartialAlias:
1039 case AliasAnalysis::MayAlias:
1043 bool AIsIdentified = IsObjCIdentifiedObject(A);
1044 bool BIsIdentified = IsObjCIdentifiedObject(B);
1046 // An ObjC-Identified object can't alias a load if it is never locally stored.
1047 if (AIsIdentified) {
1048 if (BIsIdentified) {
1049 // If both pointers have provenance, they can be directly compared.
1053 if (isa<LoadInst>(B))
1054 return isStoredObjCPointer(A);
1057 if (BIsIdentified && isa<LoadInst>(A))
1058 return isStoredObjCPointer(B);
1061 // Special handling for PHI and Select.
1062 if (const PHINode *PN = dyn_cast<PHINode>(A))
1063 return relatedPHI(PN, B);
1064 if (const PHINode *PN = dyn_cast<PHINode>(B))
1065 return relatedPHI(PN, A);
1066 if (const SelectInst *S = dyn_cast<SelectInst>(A))
1067 return relatedSelect(S, B);
1068 if (const SelectInst *S = dyn_cast<SelectInst>(B))
1069 return relatedSelect(S, A);
1075 bool ProvenanceAnalysis::related(const Value *A, const Value *B) {
1076 // Begin by inserting a conservative value into the map. If the insertion
1077 // fails, we have the answer already. If it succeeds, leave it there until we
1078 // compute the real answer to guard against recursive queries.
1079 if (A > B) std::swap(A, B);
1080 std::pair<CachedResultsTy::iterator, bool> Pair =
1081 CachedResults.insert(std::make_pair(ValuePairTy(A, B), true));
1083 return Pair.first->second;
1085 bool Result = relatedCheck(A, B);
1086 CachedResults[ValuePairTy(A, B)] = Result;
1091 // Sequence - A sequence of states that a pointer may go through in which an
1092 // objc_retain and objc_release are actually needed.
1095 S_Retain, ///< objc_retain(x)
1096 S_CanRelease, ///< foo(x) -- x could possibly see a ref count decrement
1097 S_Use, ///< any use of x
1098 S_Stop, ///< like S_Release, but code motion is stopped
1099 S_Release, ///< objc_release(x)
1100 S_MovableRelease ///< objc_release(x), !clang.imprecise_release
1104 static Sequence MergeSeqs(Sequence A, Sequence B, bool TopDown) {
1108 if (A == S_None || B == S_None)
1111 if (A > B) std::swap(A, B);
1113 // Choose the side which is further along in the sequence.
1114 if ((A == S_Retain || A == S_CanRelease) &&
1115 (B == S_CanRelease || B == S_Use))
1118 // Choose the side which is further along in the sequence.
1119 if ((A == S_Use || A == S_CanRelease) &&
1120 (B == S_Use || B == S_Release || B == S_Stop || B == S_MovableRelease))
1122 // If both sides are releases, choose the more conservative one.
1123 if (A == S_Stop && (B == S_Release || B == S_MovableRelease))
1125 if (A == S_Release && B == S_MovableRelease)
1133 /// RRInfo - Unidirectional information about either a
1134 /// retain-decrement-use-release sequence or release-use-decrement-retain
1135 /// reverese sequence.
1137 /// KnownSafe - After an objc_retain, the reference count of the referenced
1138 /// object is known to be positive. Similarly, before an objc_release, the
1139 /// reference count of the referenced object is known to be positive. If
1140 /// there are retain-release pairs in code regions where the retain count
1141 /// is known to be positive, they can be eliminated, regardless of any side
1142 /// effects between them.
1144 /// Also, a retain+release pair nested within another retain+release
1145 /// pair all on the known same pointer value can be eliminated, regardless
1146 /// of any intervening side effects.
1148 /// KnownSafe is true when either of these conditions is satisfied.
1151 /// IsRetainBlock - True if the Calls are objc_retainBlock calls (as
1152 /// opposed to objc_retain calls).
1155 /// IsTailCallRelease - True of the objc_release calls are all marked
1156 /// with the "tail" keyword.
1157 bool IsTailCallRelease;
1159 /// ReleaseMetadata - If the Calls are objc_release calls and they all have
1160 /// a clang.imprecise_release tag, this is the metadata tag.
1161 MDNode *ReleaseMetadata;
1163 /// Calls - For a top-down sequence, the set of objc_retains or
1164 /// objc_retainBlocks. For bottom-up, the set of objc_releases.
1165 SmallPtrSet<Instruction *, 2> Calls;
1167 /// ReverseInsertPts - The set of optimal insert positions for
1168 /// moving calls in the opposite sequence.
1169 SmallPtrSet<Instruction *, 2> ReverseInsertPts;
1172 KnownSafe(false), IsRetainBlock(false), IsTailCallRelease(false),
1173 ReleaseMetadata(0) {}
1179 void RRInfo::clear() {
1181 IsRetainBlock = false;
1182 IsTailCallRelease = false;
1183 ReleaseMetadata = 0;
1185 ReverseInsertPts.clear();
1189 /// PtrState - This class summarizes several per-pointer runtime properties
1190 /// which are propogated through the flow graph.
1192 /// RefCount - The known minimum number of reference count increments.
1195 /// NestCount - The known minimum level of retain+release nesting.
1198 /// Seq - The current position in the sequence.
1202 /// RRI - Unidirectional information about the current sequence.
1203 /// TODO: Encapsulate this better.
1206 PtrState() : RefCount(0), NestCount(0), Seq(S_None) {}
1208 void SetAtLeastOneRefCount() {
1209 if (RefCount == 0) RefCount = 1;
1212 void IncrementRefCount() {
1213 if (RefCount != UINT_MAX) ++RefCount;
1216 void DecrementRefCount() {
1217 if (RefCount != 0) --RefCount;
1220 bool IsKnownIncremented() const {
1221 return RefCount > 0;
1224 void IncrementNestCount() {
1225 if (NestCount != UINT_MAX) ++NestCount;
1228 void DecrementNestCount() {
1229 if (NestCount != 0) --NestCount;
1232 bool IsKnownNested() const {
1233 return NestCount > 0;
1236 void SetSeq(Sequence NewSeq) {
1240 void SetSeqToRelease(MDNode *M) {
1241 if (Seq == S_None || Seq == S_Use) {
1242 Seq = M ? S_MovableRelease : S_Release;
1243 RRI.ReleaseMetadata = M;
1244 } else if (Seq != S_MovableRelease || RRI.ReleaseMetadata != M) {
1246 RRI.ReleaseMetadata = 0;
1250 Sequence GetSeq() const {
1254 void ClearSequenceProgress() {
1259 void Merge(const PtrState &Other, bool TopDown);
1264 PtrState::Merge(const PtrState &Other, bool TopDown) {
1265 Seq = MergeSeqs(Seq, Other.Seq, TopDown);
1266 RefCount = std::min(RefCount, Other.RefCount);
1267 NestCount = std::min(NestCount, Other.NestCount);
1269 // We can't merge a plain objc_retain with an objc_retainBlock.
1270 if (RRI.IsRetainBlock != Other.RRI.IsRetainBlock)
1273 if (Seq == S_None) {
1276 // Conservatively merge the ReleaseMetadata information.
1277 if (RRI.ReleaseMetadata != Other.RRI.ReleaseMetadata)
1278 RRI.ReleaseMetadata = 0;
1280 RRI.KnownSafe = RRI.KnownSafe && Other.RRI.KnownSafe;
1281 RRI.IsTailCallRelease = RRI.IsTailCallRelease && Other.RRI.IsTailCallRelease;
1282 RRI.Calls.insert(Other.RRI.Calls.begin(), Other.RRI.Calls.end());
1283 RRI.ReverseInsertPts.insert(Other.RRI.ReverseInsertPts.begin(),
1284 Other.RRI.ReverseInsertPts.end());
1289 /// BBState - Per-BasicBlock state.
1291 /// TopDownPathCount - The number of unique control paths from the entry
1292 /// which can reach this block.
1293 unsigned TopDownPathCount;
1295 /// BottomUpPathCount - The number of unique control paths to exits
1296 /// from this block.
1297 unsigned BottomUpPathCount;
1299 /// MapTy - A type for PerPtrTopDown and PerPtrBottomUp.
1300 typedef MapVector<const Value *, PtrState> MapTy;
1302 /// PerPtrTopDown - The top-down traversal uses this to record information
1303 /// known about a pointer at the bottom of each block.
1304 MapTy PerPtrTopDown;
1306 /// PerPtrBottomUp - The bottom-up traversal uses this to record information
1307 /// known about a pointer at the top of each block.
1308 MapTy PerPtrBottomUp;
1311 BBState() : TopDownPathCount(0), BottomUpPathCount(0) {}
1313 typedef MapTy::iterator ptr_iterator;
1314 typedef MapTy::const_iterator ptr_const_iterator;
1316 ptr_iterator top_down_ptr_begin() { return PerPtrTopDown.begin(); }
1317 ptr_iterator top_down_ptr_end() { return PerPtrTopDown.end(); }
1318 ptr_const_iterator top_down_ptr_begin() const {
1319 return PerPtrTopDown.begin();
1321 ptr_const_iterator top_down_ptr_end() const {
1322 return PerPtrTopDown.end();
1325 ptr_iterator bottom_up_ptr_begin() { return PerPtrBottomUp.begin(); }
1326 ptr_iterator bottom_up_ptr_end() { return PerPtrBottomUp.end(); }
1327 ptr_const_iterator bottom_up_ptr_begin() const {
1328 return PerPtrBottomUp.begin();
1330 ptr_const_iterator bottom_up_ptr_end() const {
1331 return PerPtrBottomUp.end();
1334 /// SetAsEntry - Mark this block as being an entry block, which has one
1335 /// path from the entry by definition.
1336 void SetAsEntry() { TopDownPathCount = 1; }
1338 /// SetAsExit - Mark this block as being an exit block, which has one
1339 /// path to an exit by definition.
1340 void SetAsExit() { BottomUpPathCount = 1; }
1342 PtrState &getPtrTopDownState(const Value *Arg) {
1343 return PerPtrTopDown[Arg];
1346 PtrState &getPtrBottomUpState(const Value *Arg) {
1347 return PerPtrBottomUp[Arg];
1350 void clearBottomUpPointers() {
1351 PerPtrBottomUp.clear();
1354 void clearTopDownPointers() {
1355 PerPtrTopDown.clear();
1358 void InitFromPred(const BBState &Other);
1359 void InitFromSucc(const BBState &Other);
1360 void MergePred(const BBState &Other);
1361 void MergeSucc(const BBState &Other);
1363 /// GetAllPathCount - Return the number of possible unique paths from an
1364 /// entry to an exit which pass through this block. This is only valid
1365 /// after both the top-down and bottom-up traversals are complete.
1366 unsigned GetAllPathCount() const {
1367 return TopDownPathCount * BottomUpPathCount;
1370 /// IsVisitedTopDown - Test whether the block for this BBState has been
1371 /// visited by the top-down portion of the algorithm.
1372 bool isVisitedTopDown() const {
1373 return TopDownPathCount != 0;
1378 void BBState::InitFromPred(const BBState &Other) {
1379 PerPtrTopDown = Other.PerPtrTopDown;
1380 TopDownPathCount = Other.TopDownPathCount;
1383 void BBState::InitFromSucc(const BBState &Other) {
1384 PerPtrBottomUp = Other.PerPtrBottomUp;
1385 BottomUpPathCount = Other.BottomUpPathCount;
1388 /// MergePred - The top-down traversal uses this to merge information about
1389 /// predecessors to form the initial state for a new block.
1390 void BBState::MergePred(const BBState &Other) {
1391 // Other.TopDownPathCount can be 0, in which case it is either dead or a
1392 // loop backedge. Loop backedges are special.
1393 TopDownPathCount += Other.TopDownPathCount;
1395 // For each entry in the other set, if our set has an entry with the same key,
1396 // merge the entries. Otherwise, copy the entry and merge it with an empty
1398 for (ptr_const_iterator MI = Other.top_down_ptr_begin(),
1399 ME = Other.top_down_ptr_end(); MI != ME; ++MI) {
1400 std::pair<ptr_iterator, bool> Pair = PerPtrTopDown.insert(*MI);
1401 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
1405 // For each entry in our set, if the other set doesn't have an entry with the
1406 // same key, force it to merge with an empty entry.
1407 for (ptr_iterator MI = top_down_ptr_begin(),
1408 ME = top_down_ptr_end(); MI != ME; ++MI)
1409 if (Other.PerPtrTopDown.find(MI->first) == Other.PerPtrTopDown.end())
1410 MI->second.Merge(PtrState(), /*TopDown=*/true);
1413 /// MergeSucc - The bottom-up traversal uses this to merge information about
1414 /// successors to form the initial state for a new block.
1415 void BBState::MergeSucc(const BBState &Other) {
1416 // Other.BottomUpPathCount can be 0, in which case it is either dead or a
1417 // loop backedge. Loop backedges are special.
1418 BottomUpPathCount += Other.BottomUpPathCount;
1420 // For each entry in the other set, if our set has an entry with the
1421 // same key, merge the entries. Otherwise, copy the entry and merge
1422 // it with an empty entry.
1423 for (ptr_const_iterator MI = Other.bottom_up_ptr_begin(),
1424 ME = Other.bottom_up_ptr_end(); MI != ME; ++MI) {
1425 std::pair<ptr_iterator, bool> Pair = PerPtrBottomUp.insert(*MI);
1426 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
1430 // For each entry in our set, if the other set doesn't have an entry
1431 // with the same key, force it to merge with an empty entry.
1432 for (ptr_iterator MI = bottom_up_ptr_begin(),
1433 ME = bottom_up_ptr_end(); MI != ME; ++MI)
1434 if (Other.PerPtrBottomUp.find(MI->first) == Other.PerPtrBottomUp.end())
1435 MI->second.Merge(PtrState(), /*TopDown=*/false);
1439 /// ObjCARCOpt - The main ARC optimization pass.
1440 class ObjCARCOpt : public FunctionPass {
1442 ProvenanceAnalysis PA;
1444 /// Run - A flag indicating whether this optimization pass should run.
1447 /// RetainRVCallee, etc. - Declarations for ObjC runtime
1448 /// functions, for use in creating calls to them. These are initialized
1449 /// lazily to avoid cluttering up the Module with unused declarations.
1450 Constant *RetainRVCallee, *AutoreleaseRVCallee, *ReleaseCallee,
1451 *RetainCallee, *RetainBlockCallee, *AutoreleaseCallee;
1453 /// UsedInThisFunciton - Flags which determine whether each of the
1454 /// interesting runtine functions is in fact used in the current function.
1455 unsigned UsedInThisFunction;
1457 /// ImpreciseReleaseMDKind - The Metadata Kind for clang.imprecise_release
1459 unsigned ImpreciseReleaseMDKind;
1461 Constant *getRetainRVCallee(Module *M);
1462 Constant *getAutoreleaseRVCallee(Module *M);
1463 Constant *getReleaseCallee(Module *M);
1464 Constant *getRetainCallee(Module *M);
1465 Constant *getRetainBlockCallee(Module *M);
1466 Constant *getAutoreleaseCallee(Module *M);
1468 void OptimizeRetainCall(Function &F, Instruction *Retain);
1469 bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV);
1470 void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV);
1471 void OptimizeIndividualCalls(Function &F);
1473 void CheckForCFGHazards(const BasicBlock *BB,
1474 DenseMap<const BasicBlock *, BBState> &BBStates,
1475 BBState &MyStates) const;
1476 bool VisitBottomUp(BasicBlock *BB,
1477 DenseMap<const BasicBlock *, BBState> &BBStates,
1478 MapVector<Value *, RRInfo> &Retains);
1479 bool VisitTopDown(BasicBlock *BB,
1480 DenseMap<const BasicBlock *, BBState> &BBStates,
1481 DenseMap<Value *, RRInfo> &Releases);
1482 bool Visit(Function &F,
1483 DenseMap<const BasicBlock *, BBState> &BBStates,
1484 MapVector<Value *, RRInfo> &Retains,
1485 DenseMap<Value *, RRInfo> &Releases);
1487 void MoveCalls(Value *Arg, RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
1488 MapVector<Value *, RRInfo> &Retains,
1489 DenseMap<Value *, RRInfo> &Releases,
1490 SmallVectorImpl<Instruction *> &DeadInsts,
1493 bool PerformCodePlacement(DenseMap<const BasicBlock *, BBState> &BBStates,
1494 MapVector<Value *, RRInfo> &Retains,
1495 DenseMap<Value *, RRInfo> &Releases,
1498 void OptimizeWeakCalls(Function &F);
1500 bool OptimizeSequences(Function &F);
1502 void OptimizeReturns(Function &F);
1504 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
1505 virtual bool doInitialization(Module &M);
1506 virtual bool runOnFunction(Function &F);
1507 virtual void releaseMemory();
1511 ObjCARCOpt() : FunctionPass(ID) {
1512 initializeObjCARCOptPass(*PassRegistry::getPassRegistry());
1517 char ObjCARCOpt::ID = 0;
1518 INITIALIZE_PASS_BEGIN(ObjCARCOpt,
1519 "objc-arc", "ObjC ARC optimization", false, false)
1520 INITIALIZE_PASS_DEPENDENCY(ObjCARCAliasAnalysis)
1521 INITIALIZE_PASS_END(ObjCARCOpt,
1522 "objc-arc", "ObjC ARC optimization", false, false)
1524 Pass *llvm::createObjCARCOptPass() {
1525 return new ObjCARCOpt();
1528 void ObjCARCOpt::getAnalysisUsage(AnalysisUsage &AU) const {
1529 AU.addRequired<ObjCARCAliasAnalysis>();
1530 AU.addRequired<AliasAnalysis>();
1531 // ARC optimization doesn't currently split critical edges.
1532 AU.setPreservesCFG();
1535 Constant *ObjCARCOpt::getRetainRVCallee(Module *M) {
1536 if (!RetainRVCallee) {
1537 LLVMContext &C = M->getContext();
1538 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
1539 std::vector<Type *> Params;
1540 Params.push_back(I8X);
1542 FunctionType::get(I8X, Params, /*isVarArg=*/false);
1543 AttrListPtr Attributes;
1544 Attributes.addAttr(~0u, Attribute::NoUnwind);
1546 M->getOrInsertFunction("objc_retainAutoreleasedReturnValue", FTy,
1549 return RetainRVCallee;
1552 Constant *ObjCARCOpt::getAutoreleaseRVCallee(Module *M) {
1553 if (!AutoreleaseRVCallee) {
1554 LLVMContext &C = M->getContext();
1555 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
1556 std::vector<Type *> Params;
1557 Params.push_back(I8X);
1559 FunctionType::get(I8X, Params, /*isVarArg=*/false);
1560 AttrListPtr Attributes;
1561 Attributes.addAttr(~0u, Attribute::NoUnwind);
1562 AutoreleaseRVCallee =
1563 M->getOrInsertFunction("objc_autoreleaseReturnValue", FTy,
1566 return AutoreleaseRVCallee;
1569 Constant *ObjCARCOpt::getReleaseCallee(Module *M) {
1570 if (!ReleaseCallee) {
1571 LLVMContext &C = M->getContext();
1572 std::vector<Type *> Params;
1573 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1574 AttrListPtr Attributes;
1575 Attributes.addAttr(~0u, Attribute::NoUnwind);
1577 M->getOrInsertFunction(
1579 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
1582 return ReleaseCallee;
1585 Constant *ObjCARCOpt::getRetainCallee(Module *M) {
1586 if (!RetainCallee) {
1587 LLVMContext &C = M->getContext();
1588 std::vector<Type *> Params;
1589 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1590 AttrListPtr Attributes;
1591 Attributes.addAttr(~0u, Attribute::NoUnwind);
1593 M->getOrInsertFunction(
1595 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1598 return RetainCallee;
1601 Constant *ObjCARCOpt::getRetainBlockCallee(Module *M) {
1602 if (!RetainBlockCallee) {
1603 LLVMContext &C = M->getContext();
1604 std::vector<Type *> Params;
1605 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1606 AttrListPtr Attributes;
1607 Attributes.addAttr(~0u, Attribute::NoUnwind);
1609 M->getOrInsertFunction(
1611 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1614 return RetainBlockCallee;
1617 Constant *ObjCARCOpt::getAutoreleaseCallee(Module *M) {
1618 if (!AutoreleaseCallee) {
1619 LLVMContext &C = M->getContext();
1620 std::vector<Type *> Params;
1621 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C)));
1622 AttrListPtr Attributes;
1623 Attributes.addAttr(~0u, Attribute::NoUnwind);
1625 M->getOrInsertFunction(
1627 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
1630 return AutoreleaseCallee;
1633 /// CanAlterRefCount - Test whether the given instruction can result in a
1634 /// reference count modification (positive or negative) for the pointer's
1637 CanAlterRefCount(const Instruction *Inst, const Value *Ptr,
1638 ProvenanceAnalysis &PA, InstructionClass Class) {
1640 case IC_Autorelease:
1641 case IC_AutoreleaseRV:
1643 // These operations never directly modify a reference count.
1648 ImmutableCallSite CS = static_cast<const Value *>(Inst);
1649 assert(CS && "Only calls can alter reference counts!");
1651 // See if AliasAnalysis can help us with the call.
1652 AliasAnalysis::ModRefBehavior MRB = PA.getAA()->getModRefBehavior(CS);
1653 if (AliasAnalysis::onlyReadsMemory(MRB))
1655 if (AliasAnalysis::onlyAccessesArgPointees(MRB)) {
1656 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
1658 const Value *Op = *I;
1659 if (IsPotentialUse(Op) && PA.related(Ptr, Op))
1665 // Assume the worst.
1669 /// CanUse - Test whether the given instruction can "use" the given pointer's
1670 /// object in a way that requires the reference count to be positive.
1672 CanUse(const Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA,
1673 InstructionClass Class) {
1674 // IC_Call operations (as opposed to IC_CallOrUser) never "use" objc pointers.
1675 if (Class == IC_Call)
1678 // Consider various instructions which may have pointer arguments which are
1680 if (const ICmpInst *ICI = dyn_cast<ICmpInst>(Inst)) {
1681 // Comparing a pointer with null, or any other constant, isn't really a use,
1682 // because we don't care what the pointer points to, or about the values
1683 // of any other dynamic reference-counted pointers.
1684 if (!IsPotentialUse(ICI->getOperand(1)))
1686 } else if (ImmutableCallSite CS = static_cast<const Value *>(Inst)) {
1687 // For calls, just check the arguments (and not the callee operand).
1688 for (ImmutableCallSite::arg_iterator OI = CS.arg_begin(),
1689 OE = CS.arg_end(); OI != OE; ++OI) {
1690 const Value *Op = *OI;
1691 if (IsPotentialUse(Op) && PA.related(Ptr, Op))
1695 } else if (const StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
1696 // Special-case stores, because we don't care about the stored value, just
1697 // the store address.
1698 const Value *Op = GetUnderlyingObjCPtr(SI->getPointerOperand());
1699 // If we can't tell what the underlying object was, assume there is a
1701 return IsPotentialUse(Op) && PA.related(Op, Ptr);
1704 // Check each operand for a match.
1705 for (User::const_op_iterator OI = Inst->op_begin(), OE = Inst->op_end();
1707 const Value *Op = *OI;
1708 if (IsPotentialUse(Op) && PA.related(Ptr, Op))
1714 /// CanInterruptRV - Test whether the given instruction can autorelease
1715 /// any pointer or cause an autoreleasepool pop.
1717 CanInterruptRV(InstructionClass Class) {
1719 case IC_AutoreleasepoolPop:
1722 case IC_Autorelease:
1723 case IC_AutoreleaseRV:
1724 case IC_FusedRetainAutorelease:
1725 case IC_FusedRetainAutoreleaseRV:
1733 /// DependenceKind - There are several kinds of dependence-like concepts in
1735 enum DependenceKind {
1736 NeedsPositiveRetainCount,
1737 CanChangeRetainCount,
1738 RetainAutoreleaseDep, ///< Blocks objc_retainAutorelease.
1739 RetainAutoreleaseRVDep, ///< Blocks objc_retainAutoreleaseReturnValue.
1740 RetainRVDep ///< Blocks objc_retainAutoreleasedReturnValue.
1744 /// Depends - Test if there can be dependencies on Inst through Arg. This
1745 /// function only tests dependencies relevant for removing pairs of calls.
1747 Depends(DependenceKind Flavor, Instruction *Inst, const Value *Arg,
1748 ProvenanceAnalysis &PA) {
1749 // If we've reached the definition of Arg, stop.
1754 case NeedsPositiveRetainCount: {
1755 InstructionClass Class = GetInstructionClass(Inst);
1757 case IC_AutoreleasepoolPop:
1758 case IC_AutoreleasepoolPush:
1762 return CanUse(Inst, Arg, PA, Class);
1766 case CanChangeRetainCount: {
1767 InstructionClass Class = GetInstructionClass(Inst);
1769 case IC_AutoreleasepoolPop:
1770 // Conservatively assume this can decrement any count.
1772 case IC_AutoreleasepoolPush:
1776 return CanAlterRefCount(Inst, Arg, PA, Class);
1780 case RetainAutoreleaseDep:
1781 switch (GetBasicInstructionClass(Inst)) {
1782 case IC_AutoreleasepoolPop:
1783 // Don't merge an objc_autorelease with an objc_retain inside a different
1784 // autoreleasepool scope.
1788 // Check for a retain of the same pointer for merging.
1789 return GetObjCArg(Inst) == Arg;
1791 // Nothing else matters for objc_retainAutorelease formation.
1796 case RetainAutoreleaseRVDep: {
1797 InstructionClass Class = GetBasicInstructionClass(Inst);
1801 // Check for a retain of the same pointer for merging.
1802 return GetObjCArg(Inst) == Arg;
1804 // Anything that can autorelease interrupts
1805 // retainAutoreleaseReturnValue formation.
1806 return CanInterruptRV(Class);
1812 return CanInterruptRV(GetBasicInstructionClass(Inst));
1815 llvm_unreachable("Invalid dependence flavor");
1819 /// FindDependencies - Walk up the CFG from StartPos (which is in StartBB) and
1820 /// find local and non-local dependencies on Arg.
1821 /// TODO: Cache results?
1823 FindDependencies(DependenceKind Flavor,
1825 BasicBlock *StartBB, Instruction *StartInst,
1826 SmallPtrSet<Instruction *, 4> &DependingInstructions,
1827 SmallPtrSet<const BasicBlock *, 4> &Visited,
1828 ProvenanceAnalysis &PA) {
1829 BasicBlock::iterator StartPos = StartInst;
1831 SmallVector<std::pair<BasicBlock *, BasicBlock::iterator>, 4> Worklist;
1832 Worklist.push_back(std::make_pair(StartBB, StartPos));
1834 std::pair<BasicBlock *, BasicBlock::iterator> Pair =
1835 Worklist.pop_back_val();
1836 BasicBlock *LocalStartBB = Pair.first;
1837 BasicBlock::iterator LocalStartPos = Pair.second;
1838 BasicBlock::iterator StartBBBegin = LocalStartBB->begin();
1840 if (LocalStartPos == StartBBBegin) {
1841 pred_iterator PI(LocalStartBB), PE(LocalStartBB, false);
1843 // If we've reached the function entry, produce a null dependence.
1844 DependingInstructions.insert(0);
1846 // Add the predecessors to the worklist.
1848 BasicBlock *PredBB = *PI;
1849 if (Visited.insert(PredBB))
1850 Worklist.push_back(std::make_pair(PredBB, PredBB->end()));
1851 } while (++PI != PE);
1855 Instruction *Inst = --LocalStartPos;
1856 if (Depends(Flavor, Inst, Arg, PA)) {
1857 DependingInstructions.insert(Inst);
1861 } while (!Worklist.empty());
1863 // Determine whether the original StartBB post-dominates all of the blocks we
1864 // visited. If not, insert a sentinal indicating that most optimizations are
1866 for (SmallPtrSet<const BasicBlock *, 4>::const_iterator I = Visited.begin(),
1867 E = Visited.end(); I != E; ++I) {
1868 const BasicBlock *BB = *I;
1871 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
1872 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
1873 const BasicBlock *Succ = *SI;
1874 if (Succ != StartBB && !Visited.count(Succ)) {
1875 DependingInstructions.insert(reinterpret_cast<Instruction *>(-1));
1882 static bool isNullOrUndef(const Value *V) {
1883 return isa<ConstantPointerNull>(V) || isa<UndefValue>(V);
1886 static bool isNoopInstruction(const Instruction *I) {
1887 return isa<BitCastInst>(I) ||
1888 (isa<GetElementPtrInst>(I) &&
1889 cast<GetElementPtrInst>(I)->hasAllZeroIndices());
1892 /// OptimizeRetainCall - Turn objc_retain into
1893 /// objc_retainAutoreleasedReturnValue if the operand is a return value.
1895 ObjCARCOpt::OptimizeRetainCall(Function &F, Instruction *Retain) {
1896 CallSite CS(GetObjCArg(Retain));
1897 Instruction *Call = CS.getInstruction();
1899 if (Call->getParent() != Retain->getParent()) return;
1901 // Check that the call is next to the retain.
1902 BasicBlock::iterator I = Call;
1904 while (isNoopInstruction(I)) ++I;
1908 // Turn it to an objc_retainAutoreleasedReturnValue..
1911 cast<CallInst>(Retain)->setCalledFunction(getRetainRVCallee(F.getParent()));
1914 /// OptimizeRetainRVCall - Turn objc_retainAutoreleasedReturnValue into
1915 /// objc_retain if the operand is not a return value. Or, if it can be
1916 /// paired with an objc_autoreleaseReturnValue, delete the pair and
1919 ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
1920 // Check for the argument being from an immediately preceding call.
1921 Value *Arg = GetObjCArg(RetainRV);
1923 if (Instruction *Call = CS.getInstruction())
1924 if (Call->getParent() == RetainRV->getParent()) {
1925 BasicBlock::iterator I = Call;
1927 while (isNoopInstruction(I)) ++I;
1928 if (&*I == RetainRV)
1932 // Check for being preceded by an objc_autoreleaseReturnValue on the same
1933 // pointer. In this case, we can delete the pair.
1934 BasicBlock::iterator I = RetainRV, Begin = RetainRV->getParent()->begin();
1936 do --I; while (I != Begin && isNoopInstruction(I));
1937 if (GetBasicInstructionClass(I) == IC_AutoreleaseRV &&
1938 GetObjCArg(I) == Arg) {
1941 EraseInstruction(I);
1942 EraseInstruction(RetainRV);
1947 // Turn it to a plain objc_retain.
1950 cast<CallInst>(RetainRV)->setCalledFunction(getRetainCallee(F.getParent()));
1954 /// OptimizeAutoreleaseRVCall - Turn objc_autoreleaseReturnValue into
1955 /// objc_autorelease if the result is not used as a return value.
1957 ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV) {
1958 // Check for a return of the pointer value.
1959 const Value *Ptr = GetObjCArg(AutoreleaseRV);
1960 SmallVector<const Value *, 2> Users;
1961 Users.push_back(Ptr);
1963 Ptr = Users.pop_back_val();
1964 for (Value::const_use_iterator UI = Ptr->use_begin(), UE = Ptr->use_end();
1966 const User *I = *UI;
1967 if (isa<ReturnInst>(I) || GetBasicInstructionClass(I) == IC_RetainRV)
1969 if (isa<BitCastInst>(I))
1972 } while (!Users.empty());
1976 cast<CallInst>(AutoreleaseRV)->
1977 setCalledFunction(getAutoreleaseCallee(F.getParent()));
1980 /// OptimizeIndividualCalls - Visit each call, one at a time, and make
1981 /// simplifications without doing any additional analysis.
1982 void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
1983 // Reset all the flags in preparation for recomputing them.
1984 UsedInThisFunction = 0;
1986 // Visit all objc_* calls in F.
1987 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
1988 Instruction *Inst = &*I++;
1989 InstructionClass Class = GetBasicInstructionClass(Inst);
1994 // Delete no-op casts. These function calls have special semantics, but
1995 // the semantics are entirely implemented via lowering in the front-end,
1996 // so by the time they reach the optimizer, they are just no-op calls
1997 // which return their argument.
1999 // There are gray areas here, as the ability to cast reference-counted
2000 // pointers to raw void* and back allows code to break ARC assumptions,
2001 // however these are currently considered to be unimportant.
2005 EraseInstruction(Inst);
2008 // If the pointer-to-weak-pointer is null, it's undefined behavior.
2011 case IC_LoadWeakRetained:
2013 case IC_DestroyWeak: {
2014 CallInst *CI = cast<CallInst>(Inst);
2015 if (isNullOrUndef(CI->getArgOperand(0))) {
2016 Type *Ty = CI->getArgOperand(0)->getType();
2017 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
2018 Constant::getNullValue(Ty),
2020 CI->replaceAllUsesWith(UndefValue::get(CI->getType()));
2021 CI->eraseFromParent();
2028 CallInst *CI = cast<CallInst>(Inst);
2029 if (isNullOrUndef(CI->getArgOperand(0)) ||
2030 isNullOrUndef(CI->getArgOperand(1))) {
2031 Type *Ty = CI->getArgOperand(0)->getType();
2032 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
2033 Constant::getNullValue(Ty),
2035 CI->replaceAllUsesWith(UndefValue::get(CI->getType()));
2036 CI->eraseFromParent();
2042 OptimizeRetainCall(F, Inst);
2045 if (OptimizeRetainRVCall(F, Inst))
2048 case IC_AutoreleaseRV:
2049 OptimizeAutoreleaseRVCall(F, Inst);
2053 // objc_autorelease(x) -> objc_release(x) if x is otherwise unused.
2054 if (IsAutorelease(Class) && Inst->use_empty()) {
2055 CallInst *Call = cast<CallInst>(Inst);
2056 const Value *Arg = Call->getArgOperand(0);
2057 Arg = FindSingleUseIdentifiedObject(Arg);
2062 // Create the declaration lazily.
2063 LLVMContext &C = Inst->getContext();
2065 CallInst::Create(getReleaseCallee(F.getParent()),
2066 Call->getArgOperand(0), "", Call);
2067 NewCall->setMetadata(ImpreciseReleaseMDKind,
2068 MDNode::get(C, ArrayRef<Value *>()));
2069 EraseInstruction(Call);
2075 // For functions which can never be passed stack arguments, add
2077 if (IsAlwaysTail(Class)) {
2079 cast<CallInst>(Inst)->setTailCall();
2082 // Set nounwind as needed.
2083 if (IsNoThrow(Class)) {
2085 cast<CallInst>(Inst)->setDoesNotThrow();
2088 if (!IsNoopOnNull(Class)) {
2089 UsedInThisFunction |= 1 << Class;
2093 const Value *Arg = GetObjCArg(Inst);
2095 // ARC calls with null are no-ops. Delete them.
2096 if (isNullOrUndef(Arg)) {
2099 EraseInstruction(Inst);
2103 // Keep track of which of retain, release, autorelease, and retain_block
2104 // are actually present in this function.
2105 UsedInThisFunction |= 1 << Class;
2107 // If Arg is a PHI, and one or more incoming values to the
2108 // PHI are null, and the call is control-equivalent to the PHI, and there
2109 // are no relevant side effects between the PHI and the call, the call
2110 // could be pushed up to just those paths with non-null incoming values.
2111 // For now, don't bother splitting critical edges for this.
2112 SmallVector<std::pair<Instruction *, const Value *>, 4> Worklist;
2113 Worklist.push_back(std::make_pair(Inst, Arg));
2115 std::pair<Instruction *, const Value *> Pair = Worklist.pop_back_val();
2119 const PHINode *PN = dyn_cast<PHINode>(Arg);
2122 // Determine if the PHI has any null operands, or any incoming
2124 bool HasNull = false;
2125 bool HasCriticalEdges = false;
2126 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
2128 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
2129 if (isNullOrUndef(Incoming))
2131 else if (cast<TerminatorInst>(PN->getIncomingBlock(i)->back())
2132 .getNumSuccessors() != 1) {
2133 HasCriticalEdges = true;
2137 // If we have null operands and no critical edges, optimize.
2138 if (!HasCriticalEdges && HasNull) {
2139 SmallPtrSet<Instruction *, 4> DependingInstructions;
2140 SmallPtrSet<const BasicBlock *, 4> Visited;
2142 // Check that there is nothing that cares about the reference
2143 // count between the call and the phi.
2144 FindDependencies(NeedsPositiveRetainCount, Arg,
2145 Inst->getParent(), Inst,
2146 DependingInstructions, Visited, PA);
2147 if (DependingInstructions.size() == 1 &&
2148 *DependingInstructions.begin() == PN) {
2151 // Clone the call into each predecessor that has a non-null value.
2152 CallInst *CInst = cast<CallInst>(Inst);
2153 Type *ParamTy = CInst->getArgOperand(0)->getType();
2154 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
2156 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
2157 if (!isNullOrUndef(Incoming)) {
2158 CallInst *Clone = cast<CallInst>(CInst->clone());
2159 Value *Op = PN->getIncomingValue(i);
2160 Instruction *InsertPos = &PN->getIncomingBlock(i)->back();
2161 if (Op->getType() != ParamTy)
2162 Op = new BitCastInst(Op, ParamTy, "", InsertPos);
2163 Clone->setArgOperand(0, Op);
2164 Clone->insertBefore(InsertPos);
2165 Worklist.push_back(std::make_pair(Clone, Incoming));
2168 // Erase the original call.
2169 EraseInstruction(CInst);
2173 } while (!Worklist.empty());
2177 /// CheckForCFGHazards - Check for critical edges, loop boundaries, irreducible
2178 /// control flow, or other CFG structures where moving code across the edge
2179 /// would result in it being executed more.
2181 ObjCARCOpt::CheckForCFGHazards(const BasicBlock *BB,
2182 DenseMap<const BasicBlock *, BBState> &BBStates,
2183 BBState &MyStates) const {
2184 // If any top-down local-use or possible-dec has a succ which is earlier in
2185 // the sequence, forget it.
2186 for (BBState::ptr_const_iterator I = MyStates.top_down_ptr_begin(),
2187 E = MyStates.top_down_ptr_end(); I != E; ++I)
2188 switch (I->second.GetSeq()) {
2191 const Value *Arg = I->first;
2192 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2193 bool SomeSuccHasSame = false;
2194 bool AllSuccsHaveSame = true;
2195 PtrState &S = MyStates.getPtrTopDownState(Arg);
2196 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
2197 PtrState &SuccS = BBStates[*SI].getPtrBottomUpState(Arg);
2198 switch (SuccS.GetSeq()) {
2200 case S_CanRelease: {
2201 if (!S.RRI.KnownSafe && !SuccS.RRI.KnownSafe)
2202 S.ClearSequenceProgress();
2206 SomeSuccHasSame = true;
2210 case S_MovableRelease:
2211 if (!S.RRI.KnownSafe && !SuccS.RRI.KnownSafe)
2212 AllSuccsHaveSame = false;
2215 llvm_unreachable("bottom-up pointer in retain state!");
2218 // If the state at the other end of any of the successor edges
2219 // matches the current state, require all edges to match. This
2220 // guards against loops in the middle of a sequence.
2221 if (SomeSuccHasSame && !AllSuccsHaveSame)
2222 S.ClearSequenceProgress();
2224 case S_CanRelease: {
2225 const Value *Arg = I->first;
2226 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2227 bool SomeSuccHasSame = false;
2228 bool AllSuccsHaveSame = true;
2229 PtrState &S = MyStates.getPtrTopDownState(Arg);
2230 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
2231 PtrState &SuccS = BBStates[*SI].getPtrBottomUpState(Arg);
2232 switch (SuccS.GetSeq()) {
2234 if (!S.RRI.KnownSafe && !SuccS.RRI.KnownSafe)
2235 S.ClearSequenceProgress();
2239 SomeSuccHasSame = true;
2243 case S_MovableRelease:
2245 if (!S.RRI.KnownSafe && !SuccS.RRI.KnownSafe)
2246 AllSuccsHaveSame = false;
2249 llvm_unreachable("bottom-up pointer in retain state!");
2252 // If the state at the other end of any of the successor edges
2253 // matches the current state, require all edges to match. This
2254 // guards against loops in the middle of a sequence.
2255 if (SomeSuccHasSame && !AllSuccsHaveSame)
2256 S.ClearSequenceProgress();
2262 ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
2263 DenseMap<const BasicBlock *, BBState> &BBStates,
2264 MapVector<Value *, RRInfo> &Retains) {
2265 bool NestingDetected = false;
2266 BBState &MyStates = BBStates[BB];
2268 // Merge the states from each successor to compute the initial state
2269 // for the current block.
2270 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2271 succ_const_iterator SI(TI), SE(TI, false);
2273 MyStates.SetAsExit();
2276 const BasicBlock *Succ = *SI++;
2279 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Succ);
2280 // If we haven't seen this node yet, then we've found a CFG cycle.
2281 // Be optimistic here; it's CheckForCFGHazards' job detect trouble.
2282 if (I == BBStates.end())
2284 MyStates.InitFromSucc(I->second);
2288 I = BBStates.find(Succ);
2289 if (I != BBStates.end())
2290 MyStates.MergeSucc(I->second);
2296 // Visit all the instructions, bottom-up.
2297 for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
2298 Instruction *Inst = llvm::prior(I);
2299 InstructionClass Class = GetInstructionClass(Inst);
2300 const Value *Arg = 0;
2304 Arg = GetObjCArg(Inst);
2306 PtrState &S = MyStates.getPtrBottomUpState(Arg);
2308 // If we see two releases in a row on the same pointer. If so, make
2309 // a note, and we'll cicle back to revisit it after we've
2310 // hopefully eliminated the second release, which may allow us to
2311 // eliminate the first release too.
2312 // Theoretically we could implement removal of nested retain+release
2313 // pairs by making PtrState hold a stack of states, but this is
2314 // simple and avoids adding overhead for the non-nested case.
2315 if (S.GetSeq() == S_Release || S.GetSeq() == S_MovableRelease)
2316 NestingDetected = true;
2318 S.SetSeqToRelease(Inst->getMetadata(ImpreciseReleaseMDKind));
2320 S.RRI.KnownSafe = S.IsKnownNested() || S.IsKnownIncremented();
2321 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
2322 S.RRI.Calls.insert(Inst);
2324 S.IncrementRefCount();
2325 S.IncrementNestCount();
2328 case IC_RetainBlock:
2331 Arg = GetObjCArg(Inst);
2333 PtrState &S = MyStates.getPtrBottomUpState(Arg);
2334 S.DecrementRefCount();
2335 S.SetAtLeastOneRefCount();
2336 S.DecrementNestCount();
2338 switch (S.GetSeq()) {
2341 case S_MovableRelease:
2343 S.RRI.ReverseInsertPts.clear();
2346 // Don't do retain+release tracking for IC_RetainRV, because it's
2347 // better to let it remain as the first instruction after a call.
2348 if (Class != IC_RetainRV) {
2349 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
2350 Retains[Inst] = S.RRI;
2352 S.ClearSequenceProgress();
2357 llvm_unreachable("bottom-up pointer in retain state!");
2361 case IC_AutoreleasepoolPop:
2362 // Conservatively, clear MyStates for all known pointers.
2363 MyStates.clearBottomUpPointers();
2365 case IC_AutoreleasepoolPush:
2367 // These are irrelevant.
2373 // Consider any other possible effects of this instruction on each
2374 // pointer being tracked.
2375 for (BBState::ptr_iterator MI = MyStates.bottom_up_ptr_begin(),
2376 ME = MyStates.bottom_up_ptr_end(); MI != ME; ++MI) {
2377 const Value *Ptr = MI->first;
2379 continue; // Handled above.
2380 PtrState &S = MI->second;
2381 Sequence Seq = S.GetSeq();
2383 // Check for possible releases.
2384 if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
2385 S.DecrementRefCount();
2388 S.SetSeq(S_CanRelease);
2392 case S_MovableRelease:
2397 llvm_unreachable("bottom-up pointer in retain state!");
2401 // Check for possible direct uses.
2404 case S_MovableRelease:
2405 if (CanUse(Inst, Ptr, PA, Class)) {
2406 S.RRI.ReverseInsertPts.clear();
2407 S.RRI.ReverseInsertPts.insert(Inst);
2409 } else if (Seq == S_Release &&
2410 (Class == IC_User || Class == IC_CallOrUser)) {
2411 // Non-movable releases depend on any possible objc pointer use.
2413 S.RRI.ReverseInsertPts.clear();
2414 S.RRI.ReverseInsertPts.insert(Inst);
2418 if (CanUse(Inst, Ptr, PA, Class))
2426 llvm_unreachable("bottom-up pointer in retain state!");
2431 return NestingDetected;
2435 ObjCARCOpt::VisitTopDown(BasicBlock *BB,
2436 DenseMap<const BasicBlock *, BBState> &BBStates,
2437 DenseMap<Value *, RRInfo> &Releases) {
2438 bool NestingDetected = false;
2439 BBState &MyStates = BBStates[BB];
2441 // Merge the states from each predecessor to compute the initial state
2442 // for the current block.
2443 const_pred_iterator PI(BB), PE(BB, false);
2445 MyStates.SetAsEntry();
2448 const BasicBlock *Pred = *PI++;
2451 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Pred);
2452 assert(I != BBStates.end());
2453 // If we haven't seen this node yet, then we've found a CFG cycle.
2454 // Be optimistic here; it's CheckForCFGHazards' job detect trouble.
2455 if (!I->second.isVisitedTopDown())
2457 MyStates.InitFromPred(I->second);
2461 I = BBStates.find(Pred);
2462 assert(I != BBStates.end());
2463 if (I->second.isVisitedTopDown())
2464 MyStates.MergePred(I->second);
2470 // Visit all the instructions, top-down.
2471 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
2472 Instruction *Inst = I;
2473 InstructionClass Class = GetInstructionClass(Inst);
2474 const Value *Arg = 0;
2477 case IC_RetainBlock:
2480 Arg = GetObjCArg(Inst);
2482 PtrState &S = MyStates.getPtrTopDownState(Arg);
2484 // Don't do retain+release tracking for IC_RetainRV, because it's
2485 // better to let it remain as the first instruction after a call.
2486 if (Class != IC_RetainRV) {
2487 // If we see two retains in a row on the same pointer. If so, make
2488 // a note, and we'll cicle back to revisit it after we've
2489 // hopefully eliminated the second retain, which may allow us to
2490 // eliminate the first retain too.
2491 // Theoretically we could implement removal of nested retain+release
2492 // pairs by making PtrState hold a stack of states, but this is
2493 // simple and avoids adding overhead for the non-nested case.
2494 if (S.GetSeq() == S_Retain)
2495 NestingDetected = true;
2499 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
2500 // Don't check S.IsKnownIncremented() here because it's not
2502 S.RRI.KnownSafe = S.IsKnownNested();
2503 S.RRI.Calls.insert(Inst);
2506 S.SetAtLeastOneRefCount();
2507 S.IncrementRefCount();
2508 S.IncrementNestCount();
2512 Arg = GetObjCArg(Inst);
2514 PtrState &S = MyStates.getPtrTopDownState(Arg);
2515 S.DecrementRefCount();
2516 S.DecrementNestCount();
2518 switch (S.GetSeq()) {
2521 S.RRI.ReverseInsertPts.clear();
2524 S.RRI.ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
2525 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
2526 Releases[Inst] = S.RRI;
2527 S.ClearSequenceProgress();
2533 case S_MovableRelease:
2534 llvm_unreachable("top-down pointer in release state!");
2538 case IC_AutoreleasepoolPop:
2539 // Conservatively, clear MyStates for all known pointers.
2540 MyStates.clearTopDownPointers();
2542 case IC_AutoreleasepoolPush:
2544 // These are irrelevant.
2550 // Consider any other possible effects of this instruction on each
2551 // pointer being tracked.
2552 for (BBState::ptr_iterator MI = MyStates.top_down_ptr_begin(),
2553 ME = MyStates.top_down_ptr_end(); MI != ME; ++MI) {
2554 const Value *Ptr = MI->first;
2556 continue; // Handled above.
2557 PtrState &S = MI->second;
2558 Sequence Seq = S.GetSeq();
2560 // Check for possible releases.
2561 if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
2562 S.DecrementRefCount();
2565 S.SetSeq(S_CanRelease);
2566 S.RRI.ReverseInsertPts.clear();
2567 S.RRI.ReverseInsertPts.insert(Inst);
2569 // One call can't cause a transition from S_Retain to S_CanRelease
2570 // and S_CanRelease to S_Use. If we've made the first transition,
2579 case S_MovableRelease:
2580 llvm_unreachable("top-down pointer in release state!");
2584 // Check for possible direct uses.
2587 if (CanUse(Inst, Ptr, PA, Class))
2596 case S_MovableRelease:
2597 llvm_unreachable("top-down pointer in release state!");
2602 CheckForCFGHazards(BB, BBStates, MyStates);
2603 return NestingDetected;
2606 // Visit - Visit the function both top-down and bottom-up.
2608 ObjCARCOpt::Visit(Function &F,
2609 DenseMap<const BasicBlock *, BBState> &BBStates,
2610 MapVector<Value *, RRInfo> &Retains,
2611 DenseMap<Value *, RRInfo> &Releases) {
2612 // Use reverse-postorder on the reverse CFG for bottom-up, because we
2613 // magically know that loops will be well behaved, i.e. they won't repeatedly
2614 // call retain on a single pointer without doing a release. We can't use
2615 // ReversePostOrderTraversal here because we want to walk up from each
2616 // function exit point.
2617 SmallPtrSet<BasicBlock *, 16> Visited;
2618 SmallVector<std::pair<BasicBlock *, pred_iterator>, 16> Stack;
2619 SmallVector<BasicBlock *, 16> Order;
2620 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) {
2622 if (BB->getTerminator()->getNumSuccessors() == 0)
2623 Stack.push_back(std::make_pair(BB, pred_begin(BB)));
2625 while (!Stack.empty()) {
2626 pred_iterator End = pred_end(Stack.back().first);
2627 while (Stack.back().second != End) {
2628 BasicBlock *BB = *Stack.back().second++;
2629 if (Visited.insert(BB))
2630 Stack.push_back(std::make_pair(BB, pred_begin(BB)));
2632 Order.push_back(Stack.pop_back_val().first);
2634 bool BottomUpNestingDetected = false;
2635 for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
2636 Order.rbegin(), E = Order.rend(); I != E; ++I) {
2637 BasicBlock *BB = *I;
2638 BottomUpNestingDetected |= VisitBottomUp(BB, BBStates, Retains);
2641 // Use regular reverse-postorder for top-down.
2642 bool TopDownNestingDetected = false;
2643 typedef ReversePostOrderTraversal<Function *> RPOTType;
2645 for (RPOTType::rpo_iterator I = RPOT.begin(), E = RPOT.end(); I != E; ++I) {
2646 BasicBlock *BB = *I;
2647 TopDownNestingDetected |= VisitTopDown(BB, BBStates, Releases);
2650 return TopDownNestingDetected && BottomUpNestingDetected;
2653 /// MoveCalls - Move the calls in RetainsToMove and ReleasesToMove.
2654 void ObjCARCOpt::MoveCalls(Value *Arg,
2655 RRInfo &RetainsToMove,
2656 RRInfo &ReleasesToMove,
2657 MapVector<Value *, RRInfo> &Retains,
2658 DenseMap<Value *, RRInfo> &Releases,
2659 SmallVectorImpl<Instruction *> &DeadInsts,
2661 Type *ArgTy = Arg->getType();
2662 Type *ParamTy = PointerType::getUnqual(Type::getInt8Ty(ArgTy->getContext()));
2664 // Insert the new retain and release calls.
2665 for (SmallPtrSet<Instruction *, 2>::const_iterator
2666 PI = ReleasesToMove.ReverseInsertPts.begin(),
2667 PE = ReleasesToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
2668 Instruction *InsertPt = *PI;
2669 Value *MyArg = ArgTy == ParamTy ? Arg :
2670 new BitCastInst(Arg, ParamTy, "", InsertPt);
2672 CallInst::Create(RetainsToMove.IsRetainBlock ?
2673 getRetainBlockCallee(M) : getRetainCallee(M),
2674 MyArg, "", InsertPt);
2675 Call->setDoesNotThrow();
2676 if (!RetainsToMove.IsRetainBlock)
2677 Call->setTailCall();
2679 for (SmallPtrSet<Instruction *, 2>::const_iterator
2680 PI = RetainsToMove.ReverseInsertPts.begin(),
2681 PE = RetainsToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
2682 Instruction *LastUse = *PI;
2683 Instruction *InsertPts[] = { 0, 0, 0 };
2684 if (InvokeInst *II = dyn_cast<InvokeInst>(LastUse)) {
2685 // We can't insert code immediately after an invoke instruction, so
2686 // insert code at the beginning of both successor blocks instead.
2687 // The invoke's return value isn't available in the unwind block,
2688 // but our releases will never depend on it, because they must be
2689 // paired with retains from before the invoke.
2690 InsertPts[0] = II->getNormalDest()->getFirstInsertionPt();
2691 InsertPts[1] = II->getUnwindDest()->getFirstInsertionPt();
2693 // Insert code immediately after the last use.
2694 InsertPts[0] = llvm::next(BasicBlock::iterator(LastUse));
2697 for (Instruction **I = InsertPts; *I; ++I) {
2698 Instruction *InsertPt = *I;
2699 Value *MyArg = ArgTy == ParamTy ? Arg :
2700 new BitCastInst(Arg, ParamTy, "", InsertPt);
2701 CallInst *Call = CallInst::Create(getReleaseCallee(M), MyArg,
2703 // Attach a clang.imprecise_release metadata tag, if appropriate.
2704 if (MDNode *M = ReleasesToMove.ReleaseMetadata)
2705 Call->setMetadata(ImpreciseReleaseMDKind, M);
2706 Call->setDoesNotThrow();
2707 if (ReleasesToMove.IsTailCallRelease)
2708 Call->setTailCall();
2712 // Delete the original retain and release calls.
2713 for (SmallPtrSet<Instruction *, 2>::const_iterator
2714 AI = RetainsToMove.Calls.begin(),
2715 AE = RetainsToMove.Calls.end(); AI != AE; ++AI) {
2716 Instruction *OrigRetain = *AI;
2717 Retains.blot(OrigRetain);
2718 DeadInsts.push_back(OrigRetain);
2720 for (SmallPtrSet<Instruction *, 2>::const_iterator
2721 AI = ReleasesToMove.Calls.begin(),
2722 AE = ReleasesToMove.Calls.end(); AI != AE; ++AI) {
2723 Instruction *OrigRelease = *AI;
2724 Releases.erase(OrigRelease);
2725 DeadInsts.push_back(OrigRelease);
2730 ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState>
2732 MapVector<Value *, RRInfo> &Retains,
2733 DenseMap<Value *, RRInfo> &Releases,
2735 bool AnyPairsCompletelyEliminated = false;
2736 RRInfo RetainsToMove;
2737 RRInfo ReleasesToMove;
2738 SmallVector<Instruction *, 4> NewRetains;
2739 SmallVector<Instruction *, 4> NewReleases;
2740 SmallVector<Instruction *, 8> DeadInsts;
2742 for (MapVector<Value *, RRInfo>::const_iterator I = Retains.begin(),
2743 E = Retains.end(); I != E; ) {
2744 Value *V = (I++)->first;
2745 if (!V) continue; // blotted
2747 Instruction *Retain = cast<Instruction>(V);
2748 Value *Arg = GetObjCArg(Retain);
2750 // If the object being released is in static or stack storage, we know it's
2751 // not being managed by ObjC reference counting, so we can delete pairs
2752 // regardless of what possible decrements or uses lie between them.
2753 bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
2755 // A constant pointer can't be pointing to an object on the heap. It may
2756 // be reference-counted, but it won't be deleted.
2757 if (const LoadInst *LI = dyn_cast<LoadInst>(Arg))
2758 if (const GlobalVariable *GV =
2759 dyn_cast<GlobalVariable>(
2760 StripPointerCastsAndObjCCalls(LI->getPointerOperand())))
2761 if (GV->isConstant())
2764 // If a pair happens in a region where it is known that the reference count
2765 // is already incremented, we can similarly ignore possible decrements.
2766 bool KnownSafeTD = true, KnownSafeBU = true;
2768 // Connect the dots between the top-down-collected RetainsToMove and
2769 // bottom-up-collected ReleasesToMove to form sets of related calls.
2770 // This is an iterative process so that we connect multiple releases
2771 // to multiple retains if needed.
2772 unsigned OldDelta = 0;
2773 unsigned NewDelta = 0;
2774 unsigned OldCount = 0;
2775 unsigned NewCount = 0;
2776 bool FirstRelease = true;
2777 bool FirstRetain = true;
2778 NewRetains.push_back(Retain);
2780 for (SmallVectorImpl<Instruction *>::const_iterator
2781 NI = NewRetains.begin(), NE = NewRetains.end(); NI != NE; ++NI) {
2782 Instruction *NewRetain = *NI;
2783 MapVector<Value *, RRInfo>::const_iterator It = Retains.find(NewRetain);
2784 assert(It != Retains.end());
2785 const RRInfo &NewRetainRRI = It->second;
2786 KnownSafeTD &= NewRetainRRI.KnownSafe;
2787 for (SmallPtrSet<Instruction *, 2>::const_iterator
2788 LI = NewRetainRRI.Calls.begin(),
2789 LE = NewRetainRRI.Calls.end(); LI != LE; ++LI) {
2790 Instruction *NewRetainRelease = *LI;
2791 DenseMap<Value *, RRInfo>::const_iterator Jt =
2792 Releases.find(NewRetainRelease);
2793 if (Jt == Releases.end())
2795 const RRInfo &NewRetainReleaseRRI = Jt->second;
2796 assert(NewRetainReleaseRRI.Calls.count(NewRetain));
2797 if (ReleasesToMove.Calls.insert(NewRetainRelease)) {
2799 BBStates[NewRetainRelease->getParent()].GetAllPathCount();
2801 // Merge the ReleaseMetadata and IsTailCallRelease values.
2803 ReleasesToMove.ReleaseMetadata =
2804 NewRetainReleaseRRI.ReleaseMetadata;
2805 ReleasesToMove.IsTailCallRelease =
2806 NewRetainReleaseRRI.IsTailCallRelease;
2807 FirstRelease = false;
2809 if (ReleasesToMove.ReleaseMetadata !=
2810 NewRetainReleaseRRI.ReleaseMetadata)
2811 ReleasesToMove.ReleaseMetadata = 0;
2812 if (ReleasesToMove.IsTailCallRelease !=
2813 NewRetainReleaseRRI.IsTailCallRelease)
2814 ReleasesToMove.IsTailCallRelease = false;
2817 // Collect the optimal insertion points.
2819 for (SmallPtrSet<Instruction *, 2>::const_iterator
2820 RI = NewRetainReleaseRRI.ReverseInsertPts.begin(),
2821 RE = NewRetainReleaseRRI.ReverseInsertPts.end();
2823 Instruction *RIP = *RI;
2824 if (ReleasesToMove.ReverseInsertPts.insert(RIP))
2825 NewDelta -= BBStates[RIP->getParent()].GetAllPathCount();
2827 NewReleases.push_back(NewRetainRelease);
2832 if (NewReleases.empty()) break;
2834 // Back the other way.
2835 for (SmallVectorImpl<Instruction *>::const_iterator
2836 NI = NewReleases.begin(), NE = NewReleases.end(); NI != NE; ++NI) {
2837 Instruction *NewRelease = *NI;
2838 DenseMap<Value *, RRInfo>::const_iterator It =
2839 Releases.find(NewRelease);
2840 assert(It != Releases.end());
2841 const RRInfo &NewReleaseRRI = It->second;
2842 KnownSafeBU &= NewReleaseRRI.KnownSafe;
2843 for (SmallPtrSet<Instruction *, 2>::const_iterator
2844 LI = NewReleaseRRI.Calls.begin(),
2845 LE = NewReleaseRRI.Calls.end(); LI != LE; ++LI) {
2846 Instruction *NewReleaseRetain = *LI;
2847 MapVector<Value *, RRInfo>::const_iterator Jt =
2848 Retains.find(NewReleaseRetain);
2849 if (Jt == Retains.end())
2851 const RRInfo &NewReleaseRetainRRI = Jt->second;
2852 assert(NewReleaseRetainRRI.Calls.count(NewRelease));
2853 if (RetainsToMove.Calls.insert(NewReleaseRetain)) {
2854 unsigned PathCount =
2855 BBStates[NewReleaseRetain->getParent()].GetAllPathCount();
2856 OldDelta += PathCount;
2857 OldCount += PathCount;
2859 // Merge the IsRetainBlock values.
2861 RetainsToMove.IsRetainBlock = NewReleaseRetainRRI.IsRetainBlock;
2862 FirstRetain = false;
2863 } else if (ReleasesToMove.IsRetainBlock !=
2864 NewReleaseRetainRRI.IsRetainBlock)
2865 // It's not possible to merge the sequences if one uses
2866 // objc_retain and the other uses objc_retainBlock.
2869 // Collect the optimal insertion points.
2871 for (SmallPtrSet<Instruction *, 2>::const_iterator
2872 RI = NewReleaseRetainRRI.ReverseInsertPts.begin(),
2873 RE = NewReleaseRetainRRI.ReverseInsertPts.end();
2875 Instruction *RIP = *RI;
2876 if (RetainsToMove.ReverseInsertPts.insert(RIP)) {
2877 PathCount = BBStates[RIP->getParent()].GetAllPathCount();
2878 NewDelta += PathCount;
2879 NewCount += PathCount;
2882 NewRetains.push_back(NewReleaseRetain);
2886 NewReleases.clear();
2887 if (NewRetains.empty()) break;
2890 // If the pointer is known incremented or nested, we can safely delete the
2891 // pair regardless of what's between them.
2892 if (KnownSafeTD || KnownSafeBU) {
2893 RetainsToMove.ReverseInsertPts.clear();
2894 ReleasesToMove.ReverseInsertPts.clear();
2897 // Determine whether the new insertion points we computed preserve the
2898 // balance of retain and release calls through the program.
2899 // TODO: If the fully aggressive solution isn't valid, try to find a
2900 // less aggressive solution which is.
2905 // Determine whether the original call points are balanced in the retain and
2906 // release calls through the program. If not, conservatively don't touch
2908 // TODO: It's theoretically possible to do code motion in this case, as
2909 // long as the existing imbalances are maintained.
2913 // Ok, everything checks out and we're all set. Let's move some code!
2915 AnyPairsCompletelyEliminated = NewCount == 0;
2916 NumRRs += OldCount - NewCount;
2917 MoveCalls(Arg, RetainsToMove, ReleasesToMove,
2918 Retains, Releases, DeadInsts, M);
2921 NewReleases.clear();
2923 RetainsToMove.clear();
2924 ReleasesToMove.clear();
2927 // Now that we're done moving everything, we can delete the newly dead
2928 // instructions, as we no longer need them as insert points.
2929 while (!DeadInsts.empty())
2930 EraseInstruction(DeadInsts.pop_back_val());
2932 return AnyPairsCompletelyEliminated;
2935 /// OptimizeWeakCalls - Weak pointer optimizations.
2936 void ObjCARCOpt::OptimizeWeakCalls(Function &F) {
2937 // First, do memdep-style RLE and S2L optimizations. We can't use memdep
2938 // itself because it uses AliasAnalysis and we need to do provenance
2940 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
2941 Instruction *Inst = &*I++;
2942 InstructionClass Class = GetBasicInstructionClass(Inst);
2943 if (Class != IC_LoadWeak && Class != IC_LoadWeakRetained)
2946 // Delete objc_loadWeak calls with no users.
2947 if (Class == IC_LoadWeak && Inst->use_empty()) {
2948 Inst->eraseFromParent();
2952 // TODO: For now, just look for an earlier available version of this value
2953 // within the same block. Theoretically, we could do memdep-style non-local
2954 // analysis too, but that would want caching. A better approach would be to
2955 // use the technique that EarlyCSE uses.
2956 inst_iterator Current = llvm::prior(I);
2957 BasicBlock *CurrentBB = Current.getBasicBlockIterator();
2958 for (BasicBlock::iterator B = CurrentBB->begin(),
2959 J = Current.getInstructionIterator();
2961 Instruction *EarlierInst = &*llvm::prior(J);
2962 InstructionClass EarlierClass = GetInstructionClass(EarlierInst);
2963 switch (EarlierClass) {
2965 case IC_LoadWeakRetained: {
2966 // If this is loading from the same pointer, replace this load's value
2968 CallInst *Call = cast<CallInst>(Inst);
2969 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
2970 Value *Arg = Call->getArgOperand(0);
2971 Value *EarlierArg = EarlierCall->getArgOperand(0);
2972 switch (PA.getAA()->alias(Arg, EarlierArg)) {
2973 case AliasAnalysis::MustAlias:
2975 // If the load has a builtin retain, insert a plain retain for it.
2976 if (Class == IC_LoadWeakRetained) {
2978 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
2982 // Zap the fully redundant load.
2983 Call->replaceAllUsesWith(EarlierCall);
2984 Call->eraseFromParent();
2986 case AliasAnalysis::MayAlias:
2987 case AliasAnalysis::PartialAlias:
2989 case AliasAnalysis::NoAlias:
2996 // If this is storing to the same pointer and has the same size etc.
2997 // replace this load's value with the stored value.
2998 CallInst *Call = cast<CallInst>(Inst);
2999 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
3000 Value *Arg = Call->getArgOperand(0);
3001 Value *EarlierArg = EarlierCall->getArgOperand(0);
3002 switch (PA.getAA()->alias(Arg, EarlierArg)) {
3003 case AliasAnalysis::MustAlias:
3005 // If the load has a builtin retain, insert a plain retain for it.
3006 if (Class == IC_LoadWeakRetained) {
3008 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
3012 // Zap the fully redundant load.
3013 Call->replaceAllUsesWith(EarlierCall->getArgOperand(1));
3014 Call->eraseFromParent();
3016 case AliasAnalysis::MayAlias:
3017 case AliasAnalysis::PartialAlias:
3019 case AliasAnalysis::NoAlias:
3026 // TOOD: Grab the copied value.
3028 case IC_AutoreleasepoolPush:
3031 // Weak pointers are only modified through the weak entry points
3032 // (and arbitrary calls, which could call the weak entry points).
3035 // Anything else could modify the weak pointer.
3042 // Then, for each destroyWeak with an alloca operand, check to see if
3043 // the alloca and all its users can be zapped.
3044 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
3045 Instruction *Inst = &*I++;
3046 InstructionClass Class = GetBasicInstructionClass(Inst);
3047 if (Class != IC_DestroyWeak)
3050 CallInst *Call = cast<CallInst>(Inst);
3051 Value *Arg = Call->getArgOperand(0);
3052 if (AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
3053 for (Value::use_iterator UI = Alloca->use_begin(),
3054 UE = Alloca->use_end(); UI != UE; ++UI) {
3055 Instruction *UserInst = cast<Instruction>(*UI);
3056 switch (GetBasicInstructionClass(UserInst)) {
3059 case IC_DestroyWeak:
3066 for (Value::use_iterator UI = Alloca->use_begin(),
3067 UE = Alloca->use_end(); UI != UE; ) {
3068 CallInst *UserInst = cast<CallInst>(*UI++);
3069 if (!UserInst->use_empty())
3070 UserInst->replaceAllUsesWith(UserInst->getOperand(1));
3071 UserInst->eraseFromParent();
3073 Alloca->eraseFromParent();
3079 /// OptimizeSequences - Identify program paths which execute sequences of
3080 /// retains and releases which can be eliminated.
3081 bool ObjCARCOpt::OptimizeSequences(Function &F) {
3082 /// Releases, Retains - These are used to store the results of the main flow
3083 /// analysis. These use Value* as the key instead of Instruction* so that the
3084 /// map stays valid when we get around to rewriting code and calls get
3085 /// replaced by arguments.
3086 DenseMap<Value *, RRInfo> Releases;
3087 MapVector<Value *, RRInfo> Retains;
3089 /// BBStates, This is used during the traversal of the function to track the
3090 /// states for each identified object at each block.
3091 DenseMap<const BasicBlock *, BBState> BBStates;
3093 // Analyze the CFG of the function, and all instructions.
3094 bool NestingDetected = Visit(F, BBStates, Retains, Releases);
3097 return PerformCodePlacement(BBStates, Retains, Releases, F.getParent()) &&
3101 /// OptimizeReturns - Look for this pattern:
3103 /// %call = call i8* @something(...)
3104 /// %2 = call i8* @objc_retain(i8* %call)
3105 /// %3 = call i8* @objc_autorelease(i8* %2)
3108 /// And delete the retain and autorelease.
3110 /// Otherwise if it's just this:
3112 /// %3 = call i8* @objc_autorelease(i8* %2)
3115 /// convert the autorelease to autoreleaseRV.
3116 void ObjCARCOpt::OptimizeReturns(Function &F) {
3117 if (!F.getReturnType()->isPointerTy())
3120 SmallPtrSet<Instruction *, 4> DependingInstructions;
3121 SmallPtrSet<const BasicBlock *, 4> Visited;
3122 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
3123 BasicBlock *BB = FI;
3124 ReturnInst *Ret = dyn_cast<ReturnInst>(&BB->back());
3127 const Value *Arg = StripPointerCastsAndObjCCalls(Ret->getOperand(0));
3128 FindDependencies(NeedsPositiveRetainCount, Arg,
3129 BB, Ret, DependingInstructions, Visited, PA);
3130 if (DependingInstructions.size() != 1)
3134 CallInst *Autorelease =
3135 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3138 InstructionClass AutoreleaseClass =
3139 GetBasicInstructionClass(Autorelease);
3140 if (!IsAutorelease(AutoreleaseClass))
3142 if (GetObjCArg(Autorelease) != Arg)
3145 DependingInstructions.clear();
3148 // Check that there is nothing that can affect the reference
3149 // count between the autorelease and the retain.
3150 FindDependencies(CanChangeRetainCount, Arg,
3151 BB, Autorelease, DependingInstructions, Visited, PA);
3152 if (DependingInstructions.size() != 1)
3157 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3159 // Check that we found a retain with the same argument.
3161 !IsRetain(GetBasicInstructionClass(Retain)) ||
3162 GetObjCArg(Retain) != Arg)
3165 DependingInstructions.clear();
3168 // Convert the autorelease to an autoreleaseRV, since it's
3169 // returning the value.
3170 if (AutoreleaseClass == IC_Autorelease) {
3171 Autorelease->setCalledFunction(getAutoreleaseRVCallee(F.getParent()));
3172 AutoreleaseClass = IC_AutoreleaseRV;
3175 // Check that there is nothing that can affect the reference
3176 // count between the retain and the call.
3177 FindDependencies(CanChangeRetainCount, Arg, BB, Retain,
3178 DependingInstructions, Visited, PA);
3179 if (DependingInstructions.size() != 1)
3184 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3186 // Check that the pointer is the return value of the call.
3187 if (!Call || Arg != Call)
3190 // Check that the call is a regular call.
3191 InstructionClass Class = GetBasicInstructionClass(Call);
3192 if (Class != IC_CallOrUser && Class != IC_Call)
3195 // If so, we can zap the retain and autorelease.
3198 EraseInstruction(Retain);
3199 EraseInstruction(Autorelease);
3205 DependingInstructions.clear();
3210 bool ObjCARCOpt::doInitialization(Module &M) {
3214 Run = ModuleHasARC(M);
3218 // Identify the imprecise release metadata kind.
3219 ImpreciseReleaseMDKind =
3220 M.getContext().getMDKindID("clang.imprecise_release");
3222 // Intuitively, objc_retain and others are nocapture, however in practice
3223 // they are not, because they return their argument value. And objc_release
3224 // calls finalizers.
3226 // These are initialized lazily.
3228 AutoreleaseRVCallee = 0;
3231 RetainBlockCallee = 0;
3232 AutoreleaseCallee = 0;
3237 bool ObjCARCOpt::runOnFunction(Function &F) {
3241 // If nothing in the Module uses ARC, don't do anything.
3247 PA.setAA(&getAnalysis<AliasAnalysis>());
3249 // This pass performs several distinct transformations. As a compile-time aid
3250 // when compiling code that isn't ObjC, skip these if the relevant ObjC
3251 // library functions aren't declared.
3253 // Preliminary optimizations. This also computs UsedInThisFunction.
3254 OptimizeIndividualCalls(F);
3256 // Optimizations for weak pointers.
3257 if (UsedInThisFunction & ((1 << IC_LoadWeak) |
3258 (1 << IC_LoadWeakRetained) |
3259 (1 << IC_StoreWeak) |
3260 (1 << IC_InitWeak) |
3261 (1 << IC_CopyWeak) |
3262 (1 << IC_MoveWeak) |
3263 (1 << IC_DestroyWeak)))
3264 OptimizeWeakCalls(F);
3266 // Optimizations for retain+release pairs.
3267 if (UsedInThisFunction & ((1 << IC_Retain) |
3268 (1 << IC_RetainRV) |
3269 (1 << IC_RetainBlock)))
3270 if (UsedInThisFunction & (1 << IC_Release))
3271 // Run OptimizeSequences until it either stops making changes or
3272 // no retain+release pair nesting is detected.
3273 while (OptimizeSequences(F)) {}
3275 // Optimizations if objc_autorelease is used.
3276 if (UsedInThisFunction &
3277 ((1 << IC_Autorelease) | (1 << IC_AutoreleaseRV)))
3283 void ObjCARCOpt::releaseMemory() {
3287 //===----------------------------------------------------------------------===//
3289 //===----------------------------------------------------------------------===//
3291 // TODO: ObjCARCContract could insert PHI nodes when uses aren't
3292 // dominated by single calls.
3294 #include "llvm/Operator.h"
3295 #include "llvm/InlineAsm.h"
3296 #include "llvm/Analysis/Dominators.h"
3298 STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
3301 /// ObjCARCContract - Late ARC optimizations. These change the IR in a way
3302 /// that makes it difficult to be analyzed by ObjCARCOpt, so it's run late.
3303 class ObjCARCContract : public FunctionPass {
3307 ProvenanceAnalysis PA;
3309 /// Run - A flag indicating whether this optimization pass should run.
3312 /// StoreStrongCallee, etc. - Declarations for ObjC runtime
3313 /// functions, for use in creating calls to them. These are initialized
3314 /// lazily to avoid cluttering up the Module with unused declarations.
3315 Constant *StoreStrongCallee,
3316 *RetainAutoreleaseCallee, *RetainAutoreleaseRVCallee;
3318 /// RetainRVMarker - The inline asm string to insert between calls and
3319 /// RetainRV calls to make the optimization work on targets which need it.
3320 const MDString *RetainRVMarker;
3322 Constant *getStoreStrongCallee(Module *M);
3323 Constant *getRetainAutoreleaseCallee(Module *M);
3324 Constant *getRetainAutoreleaseRVCallee(Module *M);
3326 bool ContractAutorelease(Function &F, Instruction *Autorelease,
3327 InstructionClass Class,
3328 SmallPtrSet<Instruction *, 4>
3329 &DependingInstructions,
3330 SmallPtrSet<const BasicBlock *, 4>
3333 void ContractRelease(Instruction *Release,
3334 inst_iterator &Iter);
3336 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
3337 virtual bool doInitialization(Module &M);
3338 virtual bool runOnFunction(Function &F);
3342 ObjCARCContract() : FunctionPass(ID) {
3343 initializeObjCARCContractPass(*PassRegistry::getPassRegistry());
3348 char ObjCARCContract::ID = 0;
3349 INITIALIZE_PASS_BEGIN(ObjCARCContract,
3350 "objc-arc-contract", "ObjC ARC contraction", false, false)
3351 INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
3352 INITIALIZE_PASS_DEPENDENCY(DominatorTree)
3353 INITIALIZE_PASS_END(ObjCARCContract,
3354 "objc-arc-contract", "ObjC ARC contraction", false, false)
3356 Pass *llvm::createObjCARCContractPass() {
3357 return new ObjCARCContract();
3360 void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const {
3361 AU.addRequired<AliasAnalysis>();
3362 AU.addRequired<DominatorTree>();
3363 AU.setPreservesCFG();
3366 Constant *ObjCARCContract::getStoreStrongCallee(Module *M) {
3367 if (!StoreStrongCallee) {
3368 LLVMContext &C = M->getContext();
3369 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3370 Type *I8XX = PointerType::getUnqual(I8X);
3371 std::vector<Type *> Params;
3372 Params.push_back(I8XX);
3373 Params.push_back(I8X);
3375 AttrListPtr Attributes;
3376 Attributes.addAttr(~0u, Attribute::NoUnwind);
3377 Attributes.addAttr(1, Attribute::NoCapture);
3380 M->getOrInsertFunction(
3382 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
3385 return StoreStrongCallee;
3388 Constant *ObjCARCContract::getRetainAutoreleaseCallee(Module *M) {
3389 if (!RetainAutoreleaseCallee) {
3390 LLVMContext &C = M->getContext();
3391 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3392 std::vector<Type *> Params;
3393 Params.push_back(I8X);
3395 FunctionType::get(I8X, Params, /*isVarArg=*/false);
3396 AttrListPtr Attributes;
3397 Attributes.addAttr(~0u, Attribute::NoUnwind);
3398 RetainAutoreleaseCallee =
3399 M->getOrInsertFunction("objc_retainAutorelease", FTy, Attributes);
3401 return RetainAutoreleaseCallee;
3404 Constant *ObjCARCContract::getRetainAutoreleaseRVCallee(Module *M) {
3405 if (!RetainAutoreleaseRVCallee) {
3406 LLVMContext &C = M->getContext();
3407 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3408 std::vector<Type *> Params;
3409 Params.push_back(I8X);
3411 FunctionType::get(I8X, Params, /*isVarArg=*/false);
3412 AttrListPtr Attributes;
3413 Attributes.addAttr(~0u, Attribute::NoUnwind);
3414 RetainAutoreleaseRVCallee =
3415 M->getOrInsertFunction("objc_retainAutoreleaseReturnValue", FTy,
3418 return RetainAutoreleaseRVCallee;
3421 /// ContractAutorelease - Merge an autorelease with a retain into a fused
3424 ObjCARCContract::ContractAutorelease(Function &F, Instruction *Autorelease,
3425 InstructionClass Class,
3426 SmallPtrSet<Instruction *, 4>
3427 &DependingInstructions,
3428 SmallPtrSet<const BasicBlock *, 4>
3430 const Value *Arg = GetObjCArg(Autorelease);
3432 // Check that there are no instructions between the retain and the autorelease
3433 // (such as an autorelease_pop) which may change the count.
3434 CallInst *Retain = 0;
3435 if (Class == IC_AutoreleaseRV)
3436 FindDependencies(RetainAutoreleaseRVDep, Arg,
3437 Autorelease->getParent(), Autorelease,
3438 DependingInstructions, Visited, PA);
3440 FindDependencies(RetainAutoreleaseDep, Arg,
3441 Autorelease->getParent(), Autorelease,
3442 DependingInstructions, Visited, PA);
3445 if (DependingInstructions.size() != 1) {
3446 DependingInstructions.clear();
3450 Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3451 DependingInstructions.clear();
3454 GetBasicInstructionClass(Retain) != IC_Retain ||
3455 GetObjCArg(Retain) != Arg)
3461 if (Class == IC_AutoreleaseRV)
3462 Retain->setCalledFunction(getRetainAutoreleaseRVCallee(F.getParent()));
3464 Retain->setCalledFunction(getRetainAutoreleaseCallee(F.getParent()));
3466 EraseInstruction(Autorelease);
3470 /// ContractRelease - Attempt to merge an objc_release with a store, load, and
3471 /// objc_retain to form an objc_storeStrong. This can be a little tricky because
3472 /// the instructions don't always appear in order, and there may be unrelated
3473 /// intervening instructions.
3474 void ObjCARCContract::ContractRelease(Instruction *Release,
3475 inst_iterator &Iter) {
3476 LoadInst *Load = dyn_cast<LoadInst>(GetObjCArg(Release));
3477 if (!Load || Load->isVolatile()) return;
3479 // For now, require everything to be in one basic block.
3480 BasicBlock *BB = Release->getParent();
3481 if (Load->getParent() != BB) return;
3483 // Walk down to find the store.
3484 BasicBlock::iterator I = Load, End = BB->end();
3486 AliasAnalysis::Location Loc = AA->getLocation(Load);
3489 IsRetain(GetBasicInstructionClass(I)) ||
3490 !(AA->getModRefInfo(I, Loc) & AliasAnalysis::Mod)))
3492 StoreInst *Store = dyn_cast<StoreInst>(I);
3493 if (!Store || Store->isVolatile()) return;
3494 if (Store->getPointerOperand() != Loc.Ptr) return;
3496 Value *New = StripPointerCastsAndObjCCalls(Store->getValueOperand());
3498 // Walk up to find the retain.
3500 BasicBlock::iterator Begin = BB->begin();
3501 while (I != Begin && GetBasicInstructionClass(I) != IC_Retain)
3503 Instruction *Retain = I;
3504 if (GetBasicInstructionClass(Retain) != IC_Retain) return;
3505 if (GetObjCArg(Retain) != New) return;
3510 LLVMContext &C = Release->getContext();
3511 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3512 Type *I8XX = PointerType::getUnqual(I8X);
3514 Value *Args[] = { Load->getPointerOperand(), New };
3515 if (Args[0]->getType() != I8XX)
3516 Args[0] = new BitCastInst(Args[0], I8XX, "", Store);
3517 if (Args[1]->getType() != I8X)
3518 Args[1] = new BitCastInst(Args[1], I8X, "", Store);
3519 CallInst *StoreStrong =
3520 CallInst::Create(getStoreStrongCallee(BB->getParent()->getParent()),
3522 StoreStrong->setDoesNotThrow();
3523 StoreStrong->setDebugLoc(Store->getDebugLoc());
3525 if (&*Iter == Store) ++Iter;
3526 Store->eraseFromParent();
3527 Release->eraseFromParent();
3528 EraseInstruction(Retain);
3529 if (Load->use_empty())
3530 Load->eraseFromParent();
3533 bool ObjCARCContract::doInitialization(Module &M) {
3534 Run = ModuleHasARC(M);
3538 // These are initialized lazily.
3539 StoreStrongCallee = 0;
3540 RetainAutoreleaseCallee = 0;
3541 RetainAutoreleaseRVCallee = 0;
3543 // Initialize RetainRVMarker.
3545 if (NamedMDNode *NMD =
3546 M.getNamedMetadata("clang.arc.retainAutoreleasedReturnValueMarker"))
3547 if (NMD->getNumOperands() == 1) {
3548 const MDNode *N = NMD->getOperand(0);
3549 if (N->getNumOperands() == 1)
3550 if (const MDString *S = dyn_cast<MDString>(N->getOperand(0)))
3557 bool ObjCARCContract::runOnFunction(Function &F) {
3561 // If nothing in the Module uses ARC, don't do anything.
3566 AA = &getAnalysis<AliasAnalysis>();
3567 DT = &getAnalysis<DominatorTree>();
3569 PA.setAA(&getAnalysis<AliasAnalysis>());
3571 // For ObjC library calls which return their argument, replace uses of the
3572 // argument with uses of the call return value, if it dominates the use. This
3573 // reduces register pressure.
3574 SmallPtrSet<Instruction *, 4> DependingInstructions;
3575 SmallPtrSet<const BasicBlock *, 4> Visited;
3576 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
3577 Instruction *Inst = &*I++;
3579 // Only these library routines return their argument. In particular,
3580 // objc_retainBlock does not necessarily return its argument.
3581 InstructionClass Class = GetBasicInstructionClass(Inst);
3584 case IC_FusedRetainAutorelease:
3585 case IC_FusedRetainAutoreleaseRV:
3587 case IC_Autorelease:
3588 case IC_AutoreleaseRV:
3589 if (ContractAutorelease(F, Inst, Class, DependingInstructions, Visited))
3593 // If we're compiling for a target which needs a special inline-asm
3594 // marker to do the retainAutoreleasedReturnValue optimization,
3596 if (!RetainRVMarker)
3598 BasicBlock::iterator BBI = Inst;
3600 while (isNoopInstruction(BBI)) --BBI;
3601 if (&*BBI == GetObjCArg(Inst)) {
3603 InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
3604 /*isVarArg=*/false),
3605 RetainRVMarker->getString(),
3606 /*Constraints=*/"", /*hasSideEffects=*/true);
3607 CallInst::Create(IA, "", Inst);
3612 // objc_initWeak(p, null) => *p = null
3613 CallInst *CI = cast<CallInst>(Inst);
3614 if (isNullOrUndef(CI->getArgOperand(1))) {
3616 ConstantPointerNull::get(cast<PointerType>(CI->getType()));
3618 new StoreInst(Null, CI->getArgOperand(0), CI);
3619 CI->replaceAllUsesWith(Null);
3620 CI->eraseFromParent();
3625 ContractRelease(Inst, I);
3631 // Don't use GetObjCArg because we don't want to look through bitcasts
3632 // and such; to do the replacement, the argument must have type i8*.
3633 const Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
3635 // If we're compiling bugpointed code, don't get in trouble.
3636 if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
3638 // Look through the uses of the pointer.
3639 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
3641 Use &U = UI.getUse();
3642 unsigned OperandNo = UI.getOperandNo();
3643 ++UI; // Increment UI now, because we may unlink its element.
3644 if (Instruction *UserInst = dyn_cast<Instruction>(U.getUser()))
3645 if (Inst != UserInst && DT->dominates(Inst, UserInst)) {
3647 Instruction *Replacement = Inst;
3648 Type *UseTy = U.get()->getType();
3649 if (PHINode *PHI = dyn_cast<PHINode>(UserInst)) {
3650 // For PHI nodes, insert the bitcast in the predecessor block.
3652 PHINode::getIncomingValueNumForOperand(OperandNo);
3654 PHI->getIncomingBlock(ValNo);
3655 if (Replacement->getType() != UseTy)
3656 Replacement = new BitCastInst(Replacement, UseTy, "",
3658 for (unsigned i = 0, e = PHI->getNumIncomingValues();
3660 if (PHI->getIncomingBlock(i) == BB) {
3661 // Keep the UI iterator valid.
3662 if (&PHI->getOperandUse(
3663 PHINode::getOperandNumForIncomingValue(i)) ==
3666 PHI->setIncomingValue(i, Replacement);
3669 if (Replacement->getType() != UseTy)
3670 Replacement = new BitCastInst(Replacement, UseTy, "", UserInst);
3676 // If Arg is a no-op casted pointer, strip one level of casts and
3678 if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
3679 Arg = BI->getOperand(0);
3680 else if (isa<GEPOperator>(Arg) &&
3681 cast<GEPOperator>(Arg)->hasAllZeroIndices())
3682 Arg = cast<GEPOperator>(Arg)->getPointerOperand();
3683 else if (isa<GlobalAlias>(Arg) &&
3684 !cast<GlobalAlias>(Arg)->mayBeOverridden())
3685 Arg = cast<GlobalAlias>(Arg)->getAliasee();