blob: 900ad61e9e370222c372b75dc3cefd10cec90d32 [file] [log] [blame]
Michael Gottesman79d8d812013-01-28 01:35:51 +00001//===- ObjCARCOpts.cpp - ObjC ARC Optimization ----------------------------===//
John McCalld935e9c2011-06-15 23:37:01 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Michael Gottesman97e3df02013-01-14 00:35:14 +00009/// \file
10/// This file defines ObjC ARC optimizations. ARC stands for Automatic
11/// Reference Counting and is a system for managing reference counts for objects
12/// in Objective C.
13///
14/// The optimizations performed include elimination of redundant, partially
15/// redundant, and inconsequential reference count operations, elimination of
16/// redundant weak pointer operations, pattern-matching and replacement of
17/// low-level operations into higher-level operations, and numerous minor
18/// simplifications.
19///
20/// This file also defines a simple ARC-aware AliasAnalysis.
21///
22/// WARNING: This file knows about certain library functions. It recognizes them
23/// by name, and hardwires knowledge of their semantics.
24///
25/// WARNING: This file knows about how certain Objective-C library functions are
26/// used. Naive LLVM IR transformations which would otherwise be
27/// behavior-preserving may break these assumptions.
28///
John McCalld935e9c2011-06-15 23:37:01 +000029//===----------------------------------------------------------------------===//
30
Michael Gottesman08904e32013-01-28 03:28:38 +000031#define DEBUG_TYPE "objc-arc-opts"
32#include "ObjCARC.h"
Michael Gottesmanfa0939f2013-01-28 04:12:07 +000033
John McCalld935e9c2011-06-15 23:37:01 +000034#include "llvm/ADT/DenseMap.h"
Michael Gottesmanf15c0bb2013-01-13 22:12:06 +000035#include "llvm/ADT/SmallPtrSet.h"
Michael Gottesmanfa0939f2013-01-28 04:12:07 +000036#include "llvm/ADT/STLExtras.h"
37
John McCalld935e9c2011-06-15 23:37:01 +000038using namespace llvm;
Michael Gottesman08904e32013-01-28 03:28:38 +000039using namespace llvm::objcarc;
John McCalld935e9c2011-06-15 23:37:01 +000040
Michael Gottesman97e3df02013-01-14 00:35:14 +000041/// \defgroup MiscUtils Miscellaneous utilities that are not ARC specific.
42/// @{
John McCalld935e9c2011-06-15 23:37:01 +000043
44namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +000045 /// \brief An associative container with fast insertion-order (deterministic)
46 /// iteration over its elements. Plus the special blot operation.
John McCalld935e9c2011-06-15 23:37:01 +000047 template<class KeyT, class ValueT>
48 class MapVector {
Michael Gottesman97e3df02013-01-14 00:35:14 +000049 /// Map keys to indices in Vector.
John McCalld935e9c2011-06-15 23:37:01 +000050 typedef DenseMap<KeyT, size_t> MapTy;
51 MapTy Map;
52
John McCalld935e9c2011-06-15 23:37:01 +000053 typedef std::vector<std::pair<KeyT, ValueT> > VectorTy;
Michael Gottesman97e3df02013-01-14 00:35:14 +000054 /// Keys and values.
John McCalld935e9c2011-06-15 23:37:01 +000055 VectorTy Vector;
56
57 public:
58 typedef typename VectorTy::iterator iterator;
59 typedef typename VectorTy::const_iterator const_iterator;
60 iterator begin() { return Vector.begin(); }
61 iterator end() { return Vector.end(); }
62 const_iterator begin() const { return Vector.begin(); }
63 const_iterator end() const { return Vector.end(); }
64
65#ifdef XDEBUG
66 ~MapVector() {
67 assert(Vector.size() >= Map.size()); // May differ due to blotting.
68 for (typename MapTy::const_iterator I = Map.begin(), E = Map.end();
69 I != E; ++I) {
70 assert(I->second < Vector.size());
71 assert(Vector[I->second].first == I->first);
72 }
73 for (typename VectorTy::const_iterator I = Vector.begin(),
74 E = Vector.end(); I != E; ++I)
75 assert(!I->first ||
76 (Map.count(I->first) &&
77 Map[I->first] == size_t(I - Vector.begin())));
78 }
79#endif
80
Dan Gohman55b06742012-03-02 01:13:53 +000081 ValueT &operator[](const KeyT &Arg) {
John McCalld935e9c2011-06-15 23:37:01 +000082 std::pair<typename MapTy::iterator, bool> Pair =
83 Map.insert(std::make_pair(Arg, size_t(0)));
84 if (Pair.second) {
Dan Gohman55b06742012-03-02 01:13:53 +000085 size_t Num = Vector.size();
86 Pair.first->second = Num;
John McCalld935e9c2011-06-15 23:37:01 +000087 Vector.push_back(std::make_pair(Arg, ValueT()));
Dan Gohman55b06742012-03-02 01:13:53 +000088 return Vector[Num].second;
John McCalld935e9c2011-06-15 23:37:01 +000089 }
90 return Vector[Pair.first->second].second;
91 }
92
93 std::pair<iterator, bool>
94 insert(const std::pair<KeyT, ValueT> &InsertPair) {
95 std::pair<typename MapTy::iterator, bool> Pair =
96 Map.insert(std::make_pair(InsertPair.first, size_t(0)));
97 if (Pair.second) {
Dan Gohman55b06742012-03-02 01:13:53 +000098 size_t Num = Vector.size();
99 Pair.first->second = Num;
John McCalld935e9c2011-06-15 23:37:01 +0000100 Vector.push_back(InsertPair);
Dan Gohman55b06742012-03-02 01:13:53 +0000101 return std::make_pair(Vector.begin() + Num, true);
John McCalld935e9c2011-06-15 23:37:01 +0000102 }
103 return std::make_pair(Vector.begin() + Pair.first->second, false);
104 }
105
Dan Gohman55b06742012-03-02 01:13:53 +0000106 const_iterator find(const KeyT &Key) const {
John McCalld935e9c2011-06-15 23:37:01 +0000107 typename MapTy::const_iterator It = Map.find(Key);
108 if (It == Map.end()) return Vector.end();
109 return Vector.begin() + It->second;
110 }
111
Michael Gottesman97e3df02013-01-14 00:35:14 +0000112 /// This is similar to erase, but instead of removing the element from the
113 /// vector, it just zeros out the key in the vector. This leaves iterators
114 /// intact, but clients must be prepared for zeroed-out keys when iterating.
Dan Gohman55b06742012-03-02 01:13:53 +0000115 void blot(const KeyT &Key) {
John McCalld935e9c2011-06-15 23:37:01 +0000116 typename MapTy::iterator It = Map.find(Key);
117 if (It == Map.end()) return;
118 Vector[It->second].first = KeyT();
119 Map.erase(It);
120 }
121
122 void clear() {
123 Map.clear();
124 Vector.clear();
125 }
126 };
127}
128
Michael Gottesman97e3df02013-01-14 00:35:14 +0000129/// @}
130///
131/// \defgroup ARCUtilities Utility declarations/definitions specific to ARC.
132/// @{
John McCalld935e9c2011-06-15 23:37:01 +0000133
Chandler Carruthed0881b2012-12-03 16:50:05 +0000134#include "llvm/Analysis/ValueTracking.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +0000135#include "llvm/IR/Intrinsics.h"
Dan Gohman41375a32012-05-08 23:39:44 +0000136#include "llvm/Support/CallSite.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +0000137#include "llvm/Transforms/Utils/Local.h"
Dan Gohman41375a32012-05-08 23:39:44 +0000138
Michael Gottesman5300cdd2013-01-27 06:19:48 +0000139/// \brief Test whether the given value is possible a retainable object pointer.
140static bool IsPotentialRetainableObjPtr(const Value *Op) {
141 // Pointers to static or stack storage are not valid retainable object pointers.
John McCalld935e9c2011-06-15 23:37:01 +0000142 if (isa<Constant>(Op) || isa<AllocaInst>(Op))
143 return false;
Michael Gottesman5300cdd2013-01-27 06:19:48 +0000144 // Special arguments can not be a valid retainable object pointer.
John McCalld935e9c2011-06-15 23:37:01 +0000145 if (const Argument *Arg = dyn_cast<Argument>(Op))
146 if (Arg->hasByValAttr() ||
147 Arg->hasNestAttr() ||
148 Arg->hasStructRetAttr())
149 return false;
Dan Gohmanbd944b42011-12-14 19:10:53 +0000150 // Only consider values with pointer types.
Michael Gottesman5300cdd2013-01-27 06:19:48 +0000151 //
Dan Gohmanbd944b42011-12-14 19:10:53 +0000152 // It seemes intuitive to exclude function pointer types as well, since
Michael Gottesman5300cdd2013-01-27 06:19:48 +0000153 // functions are never retainable object pointers, however clang occasionally
154 // bitcasts retainable object pointers to function-pointer type temporarily.
Chris Lattner229907c2011-07-18 04:54:35 +0000155 PointerType *Ty = dyn_cast<PointerType>(Op->getType());
Dan Gohmanbd944b42011-12-14 19:10:53 +0000156 if (!Ty)
John McCalld935e9c2011-06-15 23:37:01 +0000157 return false;
Michael Gottesman5300cdd2013-01-27 06:19:48 +0000158 // Conservatively assume anything else is a potential retainable object pointer.
John McCalld935e9c2011-06-15 23:37:01 +0000159 return true;
160}
161
Michael Gottesman4385edf2013-01-14 01:47:53 +0000162/// \brief Helper for GetInstructionClass. Determines what kind of construct CS
163/// is.
John McCalld935e9c2011-06-15 23:37:01 +0000164static InstructionClass GetCallSiteClass(ImmutableCallSite CS) {
165 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
166 I != E; ++I)
Michael Gottesman5300cdd2013-01-27 06:19:48 +0000167 if (IsPotentialRetainableObjPtr(*I))
John McCalld935e9c2011-06-15 23:37:01 +0000168 return CS.onlyReadsMemory() ? IC_User : IC_CallOrUser;
169
170 return CS.onlyReadsMemory() ? IC_None : IC_Call;
171}
172
Michael Gottesman97e3df02013-01-14 00:35:14 +0000173/// \brief Determine what kind of construct V is.
John McCalld935e9c2011-06-15 23:37:01 +0000174static InstructionClass GetInstructionClass(const Value *V) {
175 if (const Instruction *I = dyn_cast<Instruction>(V)) {
176 // Any instruction other than bitcast and gep with a pointer operand have a
177 // use of an objc pointer. Bitcasts, GEPs, Selects, PHIs transfer a pointer
178 // to a subsequent use, rather than using it themselves, in this sense.
179 // As a short cut, several other opcodes are known to have no pointer
180 // operands of interest. And ret is never followed by a release, so it's
181 // not interesting to examine.
182 switch (I->getOpcode()) {
183 case Instruction::Call: {
184 const CallInst *CI = cast<CallInst>(I);
185 // Check for calls to special functions.
186 if (const Function *F = CI->getCalledFunction()) {
187 InstructionClass Class = GetFunctionClass(F);
188 if (Class != IC_CallOrUser)
189 return Class;
190
191 // None of the intrinsic functions do objc_release. For intrinsics, the
192 // only question is whether or not they may be users.
193 switch (F->getIntrinsicID()) {
John McCalld935e9c2011-06-15 23:37:01 +0000194 case Intrinsic::returnaddress: case Intrinsic::frameaddress:
195 case Intrinsic::stacksave: case Intrinsic::stackrestore:
196 case Intrinsic::vastart: case Intrinsic::vacopy: case Intrinsic::vaend:
Dan Gohman41375a32012-05-08 23:39:44 +0000197 case Intrinsic::objectsize: case Intrinsic::prefetch:
198 case Intrinsic::stackprotector:
199 case Intrinsic::eh_return_i32: case Intrinsic::eh_return_i64:
200 case Intrinsic::eh_typeid_for: case Intrinsic::eh_dwarf_cfa:
201 case Intrinsic::eh_sjlj_lsda: case Intrinsic::eh_sjlj_functioncontext:
202 case Intrinsic::init_trampoline: case Intrinsic::adjust_trampoline:
203 case Intrinsic::lifetime_start: case Intrinsic::lifetime_end:
204 case Intrinsic::invariant_start: case Intrinsic::invariant_end:
John McCalld935e9c2011-06-15 23:37:01 +0000205 // Don't let dbg info affect our results.
206 case Intrinsic::dbg_declare: case Intrinsic::dbg_value:
207 // Short cut: Some intrinsics obviously don't use ObjC pointers.
208 return IC_None;
209 default:
Dan Gohman41375a32012-05-08 23:39:44 +0000210 break;
John McCalld935e9c2011-06-15 23:37:01 +0000211 }
212 }
213 return GetCallSiteClass(CI);
214 }
215 case Instruction::Invoke:
216 return GetCallSiteClass(cast<InvokeInst>(I));
217 case Instruction::BitCast:
218 case Instruction::GetElementPtr:
219 case Instruction::Select: case Instruction::PHI:
220 case Instruction::Ret: case Instruction::Br:
221 case Instruction::Switch: case Instruction::IndirectBr:
222 case Instruction::Alloca: case Instruction::VAArg:
223 case Instruction::Add: case Instruction::FAdd:
224 case Instruction::Sub: case Instruction::FSub:
225 case Instruction::Mul: case Instruction::FMul:
226 case Instruction::SDiv: case Instruction::UDiv: case Instruction::FDiv:
227 case Instruction::SRem: case Instruction::URem: case Instruction::FRem:
228 case Instruction::Shl: case Instruction::LShr: case Instruction::AShr:
229 case Instruction::And: case Instruction::Or: case Instruction::Xor:
230 case Instruction::SExt: case Instruction::ZExt: case Instruction::Trunc:
231 case Instruction::IntToPtr: case Instruction::FCmp:
232 case Instruction::FPTrunc: case Instruction::FPExt:
233 case Instruction::FPToUI: case Instruction::FPToSI:
234 case Instruction::UIToFP: case Instruction::SIToFP:
235 case Instruction::InsertElement: case Instruction::ExtractElement:
236 case Instruction::ShuffleVector:
237 case Instruction::ExtractValue:
238 break;
239 case Instruction::ICmp:
240 // Comparing a pointer with null, or any other constant, isn't an
241 // interesting use, because we don't care what the pointer points to, or
242 // about the values of any other dynamic reference-counted pointers.
Michael Gottesman5300cdd2013-01-27 06:19:48 +0000243 if (IsPotentialRetainableObjPtr(I->getOperand(1)))
John McCalld935e9c2011-06-15 23:37:01 +0000244 return IC_User;
245 break;
246 default:
247 // For anything else, check all the operands.
Dan Gohman4b8e8ce2011-08-22 17:29:37 +0000248 // Note that this includes both operands of a Store: while the first
249 // operand isn't actually being dereferenced, it is being stored to
250 // memory where we can no longer track who might read it and dereference
251 // it, so we have to consider it potentially used.
John McCalld935e9c2011-06-15 23:37:01 +0000252 for (User::const_op_iterator OI = I->op_begin(), OE = I->op_end();
253 OI != OE; ++OI)
Michael Gottesman5300cdd2013-01-27 06:19:48 +0000254 if (IsPotentialRetainableObjPtr(*OI))
John McCalld935e9c2011-06-15 23:37:01 +0000255 return IC_User;
256 }
257 }
258
259 // Otherwise, it's totally inert for ARC purposes.
260 return IC_None;
261}
262
Michael Gottesman97e3df02013-01-14 00:35:14 +0000263/// \brief Test if the given class is objc_retain or equivalent.
John McCalld935e9c2011-06-15 23:37:01 +0000264static bool IsRetain(InstructionClass Class) {
265 return Class == IC_Retain ||
266 Class == IC_RetainRV;
267}
268
Michael Gottesman97e3df02013-01-14 00:35:14 +0000269/// \brief Test if the given class is objc_autorelease or equivalent.
John McCalld935e9c2011-06-15 23:37:01 +0000270static bool IsAutorelease(InstructionClass Class) {
271 return Class == IC_Autorelease ||
272 Class == IC_AutoreleaseRV;
273}
274
Michael Gottesman97e3df02013-01-14 00:35:14 +0000275/// \brief Test if the given class represents instructions which return their
276/// argument verbatim.
John McCalld935e9c2011-06-15 23:37:01 +0000277static bool IsForwarding(InstructionClass Class) {
278 // objc_retainBlock technically doesn't always return its argument
279 // verbatim, but it doesn't matter for our purposes here.
280 return Class == IC_Retain ||
281 Class == IC_RetainRV ||
282 Class == IC_Autorelease ||
283 Class == IC_AutoreleaseRV ||
284 Class == IC_RetainBlock ||
285 Class == IC_NoopCast;
286}
287
Michael Gottesman97e3df02013-01-14 00:35:14 +0000288/// \brief Test if the given class represents instructions which do nothing if
289/// passed a null pointer.
John McCalld935e9c2011-06-15 23:37:01 +0000290static bool IsNoopOnNull(InstructionClass Class) {
291 return Class == IC_Retain ||
292 Class == IC_RetainRV ||
293 Class == IC_Release ||
294 Class == IC_Autorelease ||
295 Class == IC_AutoreleaseRV ||
296 Class == IC_RetainBlock;
297}
298
Michael Gottesman4385edf2013-01-14 01:47:53 +0000299/// \brief Test if the given class represents instructions which are always safe
300/// to mark with the "tail" keyword.
John McCalld935e9c2011-06-15 23:37:01 +0000301static bool IsAlwaysTail(InstructionClass Class) {
302 // IC_RetainBlock may be given a stack argument.
303 return Class == IC_Retain ||
304 Class == IC_RetainRV ||
John McCalld935e9c2011-06-15 23:37:01 +0000305 Class == IC_AutoreleaseRV;
306}
307
Michael Gottesmanc9656fa2013-01-12 01:25:15 +0000308/// \brief Test if the given class represents instructions which are never safe
309/// to mark with the "tail" keyword.
310static bool IsNeverTail(InstructionClass Class) {
311 /// It is never safe to tail call objc_autorelease since by tail calling
312 /// objc_autorelease, we also tail call -[NSObject autorelease] which supports
313 /// fast autoreleasing causing our object to be potentially reclaimed from the
314 /// autorelease pool which violates the semantics of __autoreleasing types in
315 /// ARC.
316 return Class == IC_Autorelease;
317}
318
Michael Gottesman97e3df02013-01-14 00:35:14 +0000319/// \brief Test if the given class represents instructions which are always safe
320/// to mark with the nounwind attribute.
John McCalld935e9c2011-06-15 23:37:01 +0000321static bool IsNoThrow(InstructionClass Class) {
Dan Gohmanfca43c22011-09-14 18:33:34 +0000322 // objc_retainBlock is not nounwind because it calls user copy constructors
323 // which could theoretically throw.
John McCalld935e9c2011-06-15 23:37:01 +0000324 return Class == IC_Retain ||
325 Class == IC_RetainRV ||
John McCalld935e9c2011-06-15 23:37:01 +0000326 Class == IC_Release ||
327 Class == IC_Autorelease ||
328 Class == IC_AutoreleaseRV ||
329 Class == IC_AutoreleasepoolPush ||
330 Class == IC_AutoreleasepoolPop;
331}
332
Michael Gottesman97e3df02013-01-14 00:35:14 +0000333/// \brief Erase the given instruction.
334///
335/// Many ObjC calls return their argument verbatim,
336/// so if it's such a call and the return value has users, replace them with the
337/// argument value.
338///
John McCalld935e9c2011-06-15 23:37:01 +0000339static void EraseInstruction(Instruction *CI) {
340 Value *OldArg = cast<CallInst>(CI)->getArgOperand(0);
341
342 bool Unused = CI->use_empty();
343
344 if (!Unused) {
345 // Replace the return value with the argument.
346 assert(IsForwarding(GetBasicInstructionClass(CI)) &&
347 "Can't delete non-forwarding instruction with users!");
348 CI->replaceAllUsesWith(OldArg);
349 }
350
351 CI->eraseFromParent();
352
353 if (Unused)
354 RecursivelyDeleteTriviallyDeadInstructions(OldArg);
355}
356
Michael Gottesman97e3df02013-01-14 00:35:14 +0000357/// \brief This is a wrapper around getUnderlyingObject which also knows how to
358/// look through objc_retain and objc_autorelease calls, which we know to return
359/// their argument verbatim.
John McCalld935e9c2011-06-15 23:37:01 +0000360static const Value *GetUnderlyingObjCPtr(const Value *V) {
361 for (;;) {
362 V = GetUnderlyingObject(V);
363 if (!IsForwarding(GetBasicInstructionClass(V)))
364 break;
365 V = cast<CallInst>(V)->getArgOperand(0);
366 }
367
368 return V;
369}
370
Michael Gottesman97e3df02013-01-14 00:35:14 +0000371/// \brief This is a wrapper around Value::stripPointerCasts which also knows
372/// how to look through objc_retain and objc_autorelease calls, which we know to
373/// return their argument verbatim.
John McCalld935e9c2011-06-15 23:37:01 +0000374static const Value *StripPointerCastsAndObjCCalls(const Value *V) {
375 for (;;) {
376 V = V->stripPointerCasts();
377 if (!IsForwarding(GetBasicInstructionClass(V)))
378 break;
379 V = cast<CallInst>(V)->getArgOperand(0);
380 }
381 return V;
382}
383
Michael Gottesman97e3df02013-01-14 00:35:14 +0000384/// \brief This is a wrapper around Value::stripPointerCasts which also knows
385/// how to look through objc_retain and objc_autorelease calls, which we know to
386/// return their argument verbatim.
John McCalld935e9c2011-06-15 23:37:01 +0000387static Value *StripPointerCastsAndObjCCalls(Value *V) {
388 for (;;) {
389 V = V->stripPointerCasts();
390 if (!IsForwarding(GetBasicInstructionClass(V)))
391 break;
392 V = cast<CallInst>(V)->getArgOperand(0);
393 }
394 return V;
395}
396
Michael Gottesman97e3df02013-01-14 00:35:14 +0000397/// \brief Assuming the given instruction is one of the special calls such as
398/// objc_retain or objc_release, return the argument value, stripped of no-op
John McCalld935e9c2011-06-15 23:37:01 +0000399/// casts and forwarding calls.
400static Value *GetObjCArg(Value *Inst) {
401 return StripPointerCastsAndObjCCalls(cast<CallInst>(Inst)->getArgOperand(0));
402}
403
Michael Gottesman87db35752013-01-18 23:02:45 +0000404/// \brief Return true if this value refers to a distinct and identifiable
405/// object.
406///
407/// This is similar to AliasAnalysis's isIdentifiedObject, except that it uses
408/// special knowledge of ObjC conventions.
John McCalld935e9c2011-06-15 23:37:01 +0000409static bool IsObjCIdentifiedObject(const Value *V) {
410 // Assume that call results and arguments have their own "provenance".
411 // Constants (including GlobalVariables) and Allocas are never
412 // reference-counted.
413 if (isa<CallInst>(V) || isa<InvokeInst>(V) ||
414 isa<Argument>(V) || isa<Constant>(V) ||
415 isa<AllocaInst>(V))
416 return true;
417
418 if (const LoadInst *LI = dyn_cast<LoadInst>(V)) {
419 const Value *Pointer =
420 StripPointerCastsAndObjCCalls(LI->getPointerOperand());
421 if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(Pointer)) {
Dan Gohman56e1cef2011-08-22 17:29:11 +0000422 // A constant pointer can't be pointing to an object on the heap. It may
423 // be reference-counted, but it won't be deleted.
424 if (GV->isConstant())
425 return true;
John McCalld935e9c2011-06-15 23:37:01 +0000426 StringRef Name = GV->getName();
427 // These special variables are known to hold values which are not
428 // reference-counted pointers.
429 if (Name.startswith("\01L_OBJC_SELECTOR_REFERENCES_") ||
430 Name.startswith("\01L_OBJC_CLASSLIST_REFERENCES_") ||
431 Name.startswith("\01L_OBJC_CLASSLIST_SUP_REFS_$_") ||
432 Name.startswith("\01L_OBJC_METH_VAR_NAME_") ||
433 Name.startswith("\01l_objc_msgSend_fixup_"))
434 return true;
435 }
436 }
437
438 return false;
439}
440
Michael Gottesman97e3df02013-01-14 00:35:14 +0000441/// \brief This is similar to StripPointerCastsAndObjCCalls but it stops as soon
442/// as it finds a value with multiple uses.
John McCalld935e9c2011-06-15 23:37:01 +0000443static const Value *FindSingleUseIdentifiedObject(const Value *Arg) {
444 if (Arg->hasOneUse()) {
445 if (const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
446 return FindSingleUseIdentifiedObject(BC->getOperand(0));
447 if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Arg))
448 if (GEP->hasAllZeroIndices())
449 return FindSingleUseIdentifiedObject(GEP->getPointerOperand());
450 if (IsForwarding(GetBasicInstructionClass(Arg)))
451 return FindSingleUseIdentifiedObject(
452 cast<CallInst>(Arg)->getArgOperand(0));
453 if (!IsObjCIdentifiedObject(Arg))
454 return 0;
455 return Arg;
456 }
457
Dan Gohman41375a32012-05-08 23:39:44 +0000458 // If we found an identifiable object but it has multiple uses, but they are
459 // trivial uses, we can still consider this to be a single-use value.
John McCalld935e9c2011-06-15 23:37:01 +0000460 if (IsObjCIdentifiedObject(Arg)) {
461 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
462 UI != UE; ++UI) {
463 const User *U = *UI;
464 if (!U->use_empty() || StripPointerCastsAndObjCCalls(U) != Arg)
465 return 0;
466 }
467
468 return Arg;
469 }
470
471 return 0;
472}
473
Michael Gottesman4385edf2013-01-14 01:47:53 +0000474/// \brief Test whether the given pointer, which is an Objective C block
475/// pointer, does not "escape".
Michael Gottesman97e3df02013-01-14 00:35:14 +0000476///
477/// This differs from regular escape analysis in that a use as an
478/// argument to a call is not considered an escape.
479///
Dan Gohman728db492012-01-13 00:39:07 +0000480static bool DoesObjCBlockEscape(const Value *BlockPtr) {
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000481
482 DEBUG(dbgs() << "DoesObjCBlockEscape: Target: " << *BlockPtr << "\n");
483
Dan Gohman728db492012-01-13 00:39:07 +0000484 // Walk the def-use chains.
485 SmallVector<const Value *, 4> Worklist;
486 Worklist.push_back(BlockPtr);
Michael Gottesmanf15c0bb2013-01-13 22:12:06 +0000487
488 // Ensure we do not visit any value twice.
489 SmallPtrSet<const Value *, 4> VisitedSet;
490
Dan Gohman728db492012-01-13 00:39:07 +0000491 do {
492 const Value *V = Worklist.pop_back_val();
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000493
494 DEBUG(dbgs() << "DoesObjCBlockEscape: Visiting: " << *V << "\n");
495
Dan Gohman728db492012-01-13 00:39:07 +0000496 for (Value::const_use_iterator UI = V->use_begin(), UE = V->use_end();
497 UI != UE; ++UI) {
498 const User *UUser = *UI;
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000499
500 DEBUG(dbgs() << "DoesObjCBlockEscape: User: " << *UUser << "\n");
501
Dan Gohman728db492012-01-13 00:39:07 +0000502 // Special - Use by a call (callee or argument) is not considered
503 // to be an escape.
Dan Gohmane1e352a2012-04-13 18:28:58 +0000504 switch (GetBasicInstructionClass(UUser)) {
505 case IC_StoreWeak:
506 case IC_InitWeak:
507 case IC_StoreStrong:
508 case IC_Autorelease:
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000509 case IC_AutoreleaseRV: {
510 DEBUG(dbgs() << "DoesObjCBlockEscape: User copies pointer arguments. "
511 "Block Escapes!\n");
Dan Gohmane1e352a2012-04-13 18:28:58 +0000512 // These special functions make copies of their pointer arguments.
513 return true;
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000514 }
Dan Gohmane1e352a2012-04-13 18:28:58 +0000515 case IC_User:
516 case IC_None:
517 // Use by an instruction which copies the value is an escape if the
518 // result is an escape.
519 if (isa<BitCastInst>(UUser) || isa<GetElementPtrInst>(UUser) ||
520 isa<PHINode>(UUser) || isa<SelectInst>(UUser)) {
Michael Gottesmanf15c0bb2013-01-13 22:12:06 +0000521
Michael Gottesmane9145d32013-01-14 19:18:39 +0000522 if (!VisitedSet.insert(UUser)) {
Michael Gottesman4385edf2013-01-14 01:47:53 +0000523 DEBUG(dbgs() << "DoesObjCBlockEscape: User copies value. Escapes "
524 "if result escapes. Adding to list.\n");
Michael Gottesmanf15c0bb2013-01-13 22:12:06 +0000525 Worklist.push_back(UUser);
526 } else {
527 DEBUG(dbgs() << "DoesObjCBlockEscape: Already visited node.\n");
528 }
Dan Gohmane1e352a2012-04-13 18:28:58 +0000529 continue;
530 }
531 // Use by a load is not an escape.
532 if (isa<LoadInst>(UUser))
533 continue;
534 // Use by a store is not an escape if the use is the address.
535 if (const StoreInst *SI = dyn_cast<StoreInst>(UUser))
536 if (V != SI->getValueOperand())
537 continue;
538 break;
539 default:
540 // Regular calls and other stuff are not considered escapes.
Dan Gohman728db492012-01-13 00:39:07 +0000541 continue;
542 }
Dan Gohmaneb6e0152012-02-13 22:57:02 +0000543 // Otherwise, conservatively assume an escape.
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000544 DEBUG(dbgs() << "DoesObjCBlockEscape: Assuming block escapes.\n");
Dan Gohman728db492012-01-13 00:39:07 +0000545 return true;
546 }
547 } while (!Worklist.empty());
548
549 // No escapes found.
Michael Gottesman1a89fe52013-01-13 07:47:32 +0000550 DEBUG(dbgs() << "DoesObjCBlockEscape: Block does not escape.\n");
Dan Gohman728db492012-01-13 00:39:07 +0000551 return false;
552}
553
Michael Gottesman97e3df02013-01-14 00:35:14 +0000554/// @}
555///
Michael Gottesman4385edf2013-01-14 01:47:53 +0000556/// \defgroup ARCAA Extends alias analysis using ObjC specific knowledge.
Michael Gottesman97e3df02013-01-14 00:35:14 +0000557/// @{
John McCalld935e9c2011-06-15 23:37:01 +0000558
John McCalld935e9c2011-06-15 23:37:01 +0000559namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000560 /// \brief This is a simple alias analysis implementation that uses knowledge
561 /// of ARC constructs to answer queries.
John McCalld935e9c2011-06-15 23:37:01 +0000562 ///
563 /// TODO: This class could be generalized to know about other ObjC-specific
564 /// tricks. Such as knowing that ivars in the non-fragile ABI are non-aliasing
565 /// even though their offsets are dynamic.
566 class ObjCARCAliasAnalysis : public ImmutablePass,
567 public AliasAnalysis {
568 public:
569 static char ID; // Class identification, replacement for typeinfo
570 ObjCARCAliasAnalysis() : ImmutablePass(ID) {
571 initializeObjCARCAliasAnalysisPass(*PassRegistry::getPassRegistry());
572 }
573
574 private:
575 virtual void initializePass() {
576 InitializeAliasAnalysis(this);
577 }
578
Michael Gottesman97e3df02013-01-14 00:35:14 +0000579 /// This method is used when a pass implements an analysis interface through
580 /// multiple inheritance. If needed, it should override this to adjust the
581 /// this pointer as needed for the specified pass info.
John McCalld935e9c2011-06-15 23:37:01 +0000582 virtual void *getAdjustedAnalysisPointer(const void *PI) {
583 if (PI == &AliasAnalysis::ID)
Dan Gohmandae33492012-04-27 18:56:31 +0000584 return static_cast<AliasAnalysis *>(this);
John McCalld935e9c2011-06-15 23:37:01 +0000585 return this;
586 }
587
588 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
589 virtual AliasResult alias(const Location &LocA, const Location &LocB);
590 virtual bool pointsToConstantMemory(const Location &Loc, bool OrLocal);
591 virtual ModRefBehavior getModRefBehavior(ImmutableCallSite CS);
592 virtual ModRefBehavior getModRefBehavior(const Function *F);
593 virtual ModRefResult getModRefInfo(ImmutableCallSite CS,
594 const Location &Loc);
595 virtual ModRefResult getModRefInfo(ImmutableCallSite CS1,
596 ImmutableCallSite CS2);
597 };
598} // End of anonymous namespace
599
600// Register this pass...
601char ObjCARCAliasAnalysis::ID = 0;
602INITIALIZE_AG_PASS(ObjCARCAliasAnalysis, AliasAnalysis, "objc-arc-aa",
603 "ObjC-ARC-Based Alias Analysis", false, true, false)
604
605ImmutablePass *llvm::createObjCARCAliasAnalysisPass() {
606 return new ObjCARCAliasAnalysis();
607}
608
609void
610ObjCARCAliasAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
611 AU.setPreservesAll();
612 AliasAnalysis::getAnalysisUsage(AU);
613}
614
615AliasAnalysis::AliasResult
616ObjCARCAliasAnalysis::alias(const Location &LocA, const Location &LocB) {
617 if (!EnableARCOpts)
618 return AliasAnalysis::alias(LocA, LocB);
619
620 // First, strip off no-ops, including ObjC-specific no-ops, and try making a
621 // precise alias query.
622 const Value *SA = StripPointerCastsAndObjCCalls(LocA.Ptr);
623 const Value *SB = StripPointerCastsAndObjCCalls(LocB.Ptr);
624 AliasResult Result =
625 AliasAnalysis::alias(Location(SA, LocA.Size, LocA.TBAATag),
626 Location(SB, LocB.Size, LocB.TBAATag));
627 if (Result != MayAlias)
628 return Result;
629
630 // If that failed, climb to the underlying object, including climbing through
631 // ObjC-specific no-ops, and try making an imprecise alias query.
632 const Value *UA = GetUnderlyingObjCPtr(SA);
633 const Value *UB = GetUnderlyingObjCPtr(SB);
634 if (UA != SA || UB != SB) {
635 Result = AliasAnalysis::alias(Location(UA), Location(UB));
636 // We can't use MustAlias or PartialAlias results here because
637 // GetUnderlyingObjCPtr may return an offsetted pointer value.
638 if (Result == NoAlias)
639 return NoAlias;
640 }
641
642 // If that failed, fail. We don't need to chain here, since that's covered
643 // by the earlier precise query.
644 return MayAlias;
645}
646
647bool
648ObjCARCAliasAnalysis::pointsToConstantMemory(const Location &Loc,
649 bool OrLocal) {
650 if (!EnableARCOpts)
651 return AliasAnalysis::pointsToConstantMemory(Loc, OrLocal);
652
653 // First, strip off no-ops, including ObjC-specific no-ops, and try making
654 // a precise alias query.
655 const Value *S = StripPointerCastsAndObjCCalls(Loc.Ptr);
656 if (AliasAnalysis::pointsToConstantMemory(Location(S, Loc.Size, Loc.TBAATag),
657 OrLocal))
658 return true;
659
660 // If that failed, climb to the underlying object, including climbing through
661 // ObjC-specific no-ops, and try making an imprecise alias query.
662 const Value *U = GetUnderlyingObjCPtr(S);
663 if (U != S)
664 return AliasAnalysis::pointsToConstantMemory(Location(U), OrLocal);
665
666 // If that failed, fail. We don't need to chain here, since that's covered
667 // by the earlier precise query.
668 return false;
669}
670
671AliasAnalysis::ModRefBehavior
672ObjCARCAliasAnalysis::getModRefBehavior(ImmutableCallSite CS) {
673 // We have nothing to do. Just chain to the next AliasAnalysis.
674 return AliasAnalysis::getModRefBehavior(CS);
675}
676
677AliasAnalysis::ModRefBehavior
678ObjCARCAliasAnalysis::getModRefBehavior(const Function *F) {
679 if (!EnableARCOpts)
680 return AliasAnalysis::getModRefBehavior(F);
681
682 switch (GetFunctionClass(F)) {
683 case IC_NoopCast:
684 return DoesNotAccessMemory;
685 default:
686 break;
687 }
688
689 return AliasAnalysis::getModRefBehavior(F);
690}
691
692AliasAnalysis::ModRefResult
693ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS, const Location &Loc) {
694 if (!EnableARCOpts)
695 return AliasAnalysis::getModRefInfo(CS, Loc);
696
697 switch (GetBasicInstructionClass(CS.getInstruction())) {
698 case IC_Retain:
699 case IC_RetainRV:
John McCalld935e9c2011-06-15 23:37:01 +0000700 case IC_Autorelease:
701 case IC_AutoreleaseRV:
702 case IC_NoopCast:
703 case IC_AutoreleasepoolPush:
704 case IC_FusedRetainAutorelease:
705 case IC_FusedRetainAutoreleaseRV:
706 // These functions don't access any memory visible to the compiler.
Benjamin Kramerbde91762012-06-02 10:20:22 +0000707 // Note that this doesn't include objc_retainBlock, because it updates
Dan Gohmand4b5e3a2011-09-14 18:13:00 +0000708 // pointers when it copies block data.
John McCalld935e9c2011-06-15 23:37:01 +0000709 return NoModRef;
710 default:
711 break;
712 }
713
714 return AliasAnalysis::getModRefInfo(CS, Loc);
715}
716
717AliasAnalysis::ModRefResult
718ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS1,
719 ImmutableCallSite CS2) {
720 // TODO: Theoretically we could check for dependencies between objc_* calls
721 // and OnlyAccessesArgumentPointees calls or other well-behaved calls.
722 return AliasAnalysis::getModRefInfo(CS1, CS2);
723}
724
Michael Gottesman97e3df02013-01-14 00:35:14 +0000725/// @}
726///
Michael Gottesman97e3df02013-01-14 00:35:14 +0000727/// \defgroup ARCOpt ARC Optimization.
728/// @{
John McCalld935e9c2011-06-15 23:37:01 +0000729
730// TODO: On code like this:
731//
732// objc_retain(%x)
733// stuff_that_cannot_release()
734// objc_autorelease(%x)
735// stuff_that_cannot_release()
736// objc_retain(%x)
737// stuff_that_cannot_release()
738// objc_autorelease(%x)
739//
740// The second retain and autorelease can be deleted.
741
742// TODO: It should be possible to delete
743// objc_autoreleasePoolPush and objc_autoreleasePoolPop
744// pairs if nothing is actually autoreleased between them. Also, autorelease
745// calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code
746// after inlining) can be turned into plain release calls.
747
748// TODO: Critical-edge splitting. If the optimial insertion point is
749// a critical edge, the current algorithm has to fail, because it doesn't
750// know how to split edges. It should be possible to make the optimizer
751// think in terms of edges, rather than blocks, and then split critical
752// edges on demand.
753
754// TODO: OptimizeSequences could generalized to be Interprocedural.
755
756// TODO: Recognize that a bunch of other objc runtime calls have
757// non-escaping arguments and non-releasing arguments, and may be
758// non-autoreleasing.
759
760// TODO: Sink autorelease calls as far as possible. Unfortunately we
761// usually can't sink them past other calls, which would be the main
762// case where it would be useful.
763
Dan Gohmanb3894012011-08-19 00:26:36 +0000764// TODO: The pointer returned from objc_loadWeakRetained is retained.
765
766// TODO: Delete release+retain pairs (rare).
Dan Gohmanceaac7c2011-06-20 23:20:43 +0000767
Chandler Carruthed0881b2012-12-03 16:50:05 +0000768#include "llvm/ADT/SmallPtrSet.h"
769#include "llvm/ADT/Statistic.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +0000770#include "llvm/IR/LLVMContext.h"
John McCalld935e9c2011-06-15 23:37:01 +0000771#include "llvm/Support/CFG.h"
John McCalld935e9c2011-06-15 23:37:01 +0000772
773STATISTIC(NumNoops, "Number of no-op objc calls eliminated");
774STATISTIC(NumPartialNoops, "Number of partially no-op objc calls eliminated");
775STATISTIC(NumAutoreleases,"Number of autoreleases converted to releases");
776STATISTIC(NumRets, "Number of return value forwarding "
777 "retain+autoreleaes eliminated");
778STATISTIC(NumRRs, "Number of retain+release paths eliminated");
779STATISTIC(NumPeeps, "Number of calls peephole-optimized");
780
781namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000782 /// \brief This is similar to BasicAliasAnalysis, and it uses many of the same
783 /// techniques, except it uses special ObjC-specific reasoning about pointer
784 /// relationships.
Michael Gottesman12780c2d2013-01-24 21:35:00 +0000785 ///
786 /// In this context ``Provenance'' is defined as the history of an object's
787 /// ownership. Thus ``Provenance Analysis'' is defined by using the notion of
788 /// an ``independent provenance source'' of a pointer to determine whether or
789 /// not two pointers have the same provenance source and thus could
790 /// potentially be related.
John McCalld935e9c2011-06-15 23:37:01 +0000791 class ProvenanceAnalysis {
792 AliasAnalysis *AA;
793
794 typedef std::pair<const Value *, const Value *> ValuePairTy;
795 typedef DenseMap<ValuePairTy, bool> CachedResultsTy;
796 CachedResultsTy CachedResults;
797
798 bool relatedCheck(const Value *A, const Value *B);
799 bool relatedSelect(const SelectInst *A, const Value *B);
800 bool relatedPHI(const PHINode *A, const Value *B);
801
Craig Topperb1d83e82012-09-18 02:01:41 +0000802 void operator=(const ProvenanceAnalysis &) LLVM_DELETED_FUNCTION;
803 ProvenanceAnalysis(const ProvenanceAnalysis &) LLVM_DELETED_FUNCTION;
John McCalld935e9c2011-06-15 23:37:01 +0000804
805 public:
806 ProvenanceAnalysis() {}
807
808 void setAA(AliasAnalysis *aa) { AA = aa; }
809
810 AliasAnalysis *getAA() const { return AA; }
811
812 bool related(const Value *A, const Value *B);
813
814 void clear() {
815 CachedResults.clear();
816 }
817 };
818}
819
820bool ProvenanceAnalysis::relatedSelect(const SelectInst *A, const Value *B) {
821 // If the values are Selects with the same condition, we can do a more precise
822 // check: just check for relations between the values on corresponding arms.
823 if (const SelectInst *SB = dyn_cast<SelectInst>(B))
Dan Gohmandae33492012-04-27 18:56:31 +0000824 if (A->getCondition() == SB->getCondition())
825 return related(A->getTrueValue(), SB->getTrueValue()) ||
826 related(A->getFalseValue(), SB->getFalseValue());
John McCalld935e9c2011-06-15 23:37:01 +0000827
828 // Check both arms of the Select node individually.
Dan Gohmandae33492012-04-27 18:56:31 +0000829 return related(A->getTrueValue(), B) ||
830 related(A->getFalseValue(), B);
John McCalld935e9c2011-06-15 23:37:01 +0000831}
832
833bool ProvenanceAnalysis::relatedPHI(const PHINode *A, const Value *B) {
834 // If the values are PHIs in the same block, we can do a more precise as well
835 // as efficient check: just check for relations between the values on
836 // corresponding edges.
837 if (const PHINode *PNB = dyn_cast<PHINode>(B))
838 if (PNB->getParent() == A->getParent()) {
839 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i)
840 if (related(A->getIncomingValue(i),
841 PNB->getIncomingValueForBlock(A->getIncomingBlock(i))))
842 return true;
843 return false;
844 }
845
846 // Check each unique source of the PHI node against B.
847 SmallPtrSet<const Value *, 4> UniqueSrc;
848 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i) {
849 const Value *PV1 = A->getIncomingValue(i);
850 if (UniqueSrc.insert(PV1) && related(PV1, B))
851 return true;
852 }
853
854 // All of the arms checked out.
855 return false;
856}
857
Michael Gottesman97e3df02013-01-14 00:35:14 +0000858/// Test if the value of P, or any value covered by its provenance, is ever
859/// stored within the function (not counting callees).
John McCalld935e9c2011-06-15 23:37:01 +0000860static bool isStoredObjCPointer(const Value *P) {
861 SmallPtrSet<const Value *, 8> Visited;
862 SmallVector<const Value *, 8> Worklist;
863 Worklist.push_back(P);
864 Visited.insert(P);
865 do {
866 P = Worklist.pop_back_val();
867 for (Value::const_use_iterator UI = P->use_begin(), UE = P->use_end();
868 UI != UE; ++UI) {
869 const User *Ur = *UI;
870 if (isa<StoreInst>(Ur)) {
871 if (UI.getOperandNo() == 0)
872 // The pointer is stored.
873 return true;
874 // The pointed is stored through.
875 continue;
876 }
877 if (isa<CallInst>(Ur))
878 // The pointer is passed as an argument, ignore this.
879 continue;
880 if (isa<PtrToIntInst>(P))
881 // Assume the worst.
882 return true;
883 if (Visited.insert(Ur))
884 Worklist.push_back(Ur);
885 }
886 } while (!Worklist.empty());
887
888 // Everything checked out.
889 return false;
890}
891
892bool ProvenanceAnalysis::relatedCheck(const Value *A, const Value *B) {
893 // Skip past provenance pass-throughs.
894 A = GetUnderlyingObjCPtr(A);
895 B = GetUnderlyingObjCPtr(B);
896
897 // Quick check.
898 if (A == B)
899 return true;
900
901 // Ask regular AliasAnalysis, for a first approximation.
902 switch (AA->alias(A, B)) {
903 case AliasAnalysis::NoAlias:
904 return false;
905 case AliasAnalysis::MustAlias:
906 case AliasAnalysis::PartialAlias:
907 return true;
908 case AliasAnalysis::MayAlias:
909 break;
910 }
911
912 bool AIsIdentified = IsObjCIdentifiedObject(A);
913 bool BIsIdentified = IsObjCIdentifiedObject(B);
914
915 // An ObjC-Identified object can't alias a load if it is never locally stored.
916 if (AIsIdentified) {
Dan Gohmandf476e52012-09-04 23:16:20 +0000917 // Check for an obvious escape.
918 if (isa<LoadInst>(B))
919 return isStoredObjCPointer(A);
John McCalld935e9c2011-06-15 23:37:01 +0000920 if (BIsIdentified) {
Dan Gohmandf476e52012-09-04 23:16:20 +0000921 // Check for an obvious escape.
922 if (isa<LoadInst>(A))
923 return isStoredObjCPointer(B);
924 // Both pointers are identified and escapes aren't an evident problem.
925 return false;
John McCalld935e9c2011-06-15 23:37:01 +0000926 }
Dan Gohmandf476e52012-09-04 23:16:20 +0000927 } else if (BIsIdentified) {
928 // Check for an obvious escape.
929 if (isa<LoadInst>(A))
John McCalld935e9c2011-06-15 23:37:01 +0000930 return isStoredObjCPointer(B);
931 }
932
933 // Special handling for PHI and Select.
934 if (const PHINode *PN = dyn_cast<PHINode>(A))
935 return relatedPHI(PN, B);
936 if (const PHINode *PN = dyn_cast<PHINode>(B))
937 return relatedPHI(PN, A);
938 if (const SelectInst *S = dyn_cast<SelectInst>(A))
939 return relatedSelect(S, B);
940 if (const SelectInst *S = dyn_cast<SelectInst>(B))
941 return relatedSelect(S, A);
942
943 // Conservative.
944 return true;
945}
946
947bool ProvenanceAnalysis::related(const Value *A, const Value *B) {
948 // Begin by inserting a conservative value into the map. If the insertion
949 // fails, we have the answer already. If it succeeds, leave it there until we
950 // compute the real answer to guard against recursive queries.
951 if (A > B) std::swap(A, B);
952 std::pair<CachedResultsTy::iterator, bool> Pair =
953 CachedResults.insert(std::make_pair(ValuePairTy(A, B), true));
954 if (!Pair.second)
955 return Pair.first->second;
956
957 bool Result = relatedCheck(A, B);
958 CachedResults[ValuePairTy(A, B)] = Result;
959 return Result;
960}
961
962namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +0000963 /// \enum Sequence
964 ///
965 /// \brief A sequence of states that a pointer may go through in which an
966 /// objc_retain and objc_release are actually needed.
John McCalld935e9c2011-06-15 23:37:01 +0000967 enum Sequence {
968 S_None,
969 S_Retain, ///< objc_retain(x)
970 S_CanRelease, ///< foo(x) -- x could possibly see a ref count decrement
971 S_Use, ///< any use of x
972 S_Stop, ///< like S_Release, but code motion is stopped
973 S_Release, ///< objc_release(x)
974 S_MovableRelease ///< objc_release(x), !clang.imprecise_release
975 };
976}
977
978static Sequence MergeSeqs(Sequence A, Sequence B, bool TopDown) {
979 // The easy cases.
980 if (A == B)
981 return A;
982 if (A == S_None || B == S_None)
983 return S_None;
984
John McCalld935e9c2011-06-15 23:37:01 +0000985 if (A > B) std::swap(A, B);
986 if (TopDown) {
987 // Choose the side which is further along in the sequence.
Dan Gohman12130272011-08-12 00:26:31 +0000988 if ((A == S_Retain || A == S_CanRelease) &&
989 (B == S_CanRelease || B == S_Use))
John McCalld935e9c2011-06-15 23:37:01 +0000990 return B;
991 } else {
992 // Choose the side which is further along in the sequence.
993 if ((A == S_Use || A == S_CanRelease) &&
Dan Gohman12130272011-08-12 00:26:31 +0000994 (B == S_Use || B == S_Release || B == S_Stop || B == S_MovableRelease))
John McCalld935e9c2011-06-15 23:37:01 +0000995 return A;
996 // If both sides are releases, choose the more conservative one.
997 if (A == S_Stop && (B == S_Release || B == S_MovableRelease))
998 return A;
999 if (A == S_Release && B == S_MovableRelease)
1000 return A;
1001 }
1002
1003 return S_None;
1004}
1005
1006namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +00001007 /// \brief Unidirectional information about either a
John McCalld935e9c2011-06-15 23:37:01 +00001008 /// retain-decrement-use-release sequence or release-use-decrement-retain
1009 /// reverese sequence.
1010 struct RRInfo {
Michael Gottesman97e3df02013-01-14 00:35:14 +00001011 /// After an objc_retain, the reference count of the referenced
Dan Gohmanb3894012011-08-19 00:26:36 +00001012 /// object is known to be positive. Similarly, before an objc_release, the
1013 /// reference count of the referenced object is known to be positive. If
1014 /// there are retain-release pairs in code regions where the retain count
1015 /// is known to be positive, they can be eliminated, regardless of any side
1016 /// effects between them.
1017 ///
1018 /// Also, a retain+release pair nested within another retain+release
1019 /// pair all on the known same pointer value can be eliminated, regardless
1020 /// of any intervening side effects.
1021 ///
1022 /// KnownSafe is true when either of these conditions is satisfied.
1023 bool KnownSafe;
John McCalld935e9c2011-06-15 23:37:01 +00001024
Michael Gottesman97e3df02013-01-14 00:35:14 +00001025 /// True if the Calls are objc_retainBlock calls (as opposed to objc_retain
1026 /// calls).
John McCalld935e9c2011-06-15 23:37:01 +00001027 bool IsRetainBlock;
1028
Michael Gottesman97e3df02013-01-14 00:35:14 +00001029 /// True of the objc_release calls are all marked with the "tail" keyword.
John McCalld935e9c2011-06-15 23:37:01 +00001030 bool IsTailCallRelease;
1031
Michael Gottesman97e3df02013-01-14 00:35:14 +00001032 /// If the Calls are objc_release calls and they all have a
1033 /// clang.imprecise_release tag, this is the metadata tag.
John McCalld935e9c2011-06-15 23:37:01 +00001034 MDNode *ReleaseMetadata;
1035
Michael Gottesman97e3df02013-01-14 00:35:14 +00001036 /// For a top-down sequence, the set of objc_retains or
John McCalld935e9c2011-06-15 23:37:01 +00001037 /// objc_retainBlocks. For bottom-up, the set of objc_releases.
1038 SmallPtrSet<Instruction *, 2> Calls;
1039
Michael Gottesman97e3df02013-01-14 00:35:14 +00001040 /// The set of optimal insert positions for moving calls in the opposite
1041 /// sequence.
John McCalld935e9c2011-06-15 23:37:01 +00001042 SmallPtrSet<Instruction *, 2> ReverseInsertPts;
1043
1044 RRInfo() :
Dan Gohman728db492012-01-13 00:39:07 +00001045 KnownSafe(false), IsRetainBlock(false),
Dan Gohman62079b42012-04-25 00:50:46 +00001046 IsTailCallRelease(false),
John McCalld935e9c2011-06-15 23:37:01 +00001047 ReleaseMetadata(0) {}
1048
1049 void clear();
1050 };
1051}
1052
1053void RRInfo::clear() {
Dan Gohmanb3894012011-08-19 00:26:36 +00001054 KnownSafe = false;
John McCalld935e9c2011-06-15 23:37:01 +00001055 IsRetainBlock = false;
1056 IsTailCallRelease = false;
1057 ReleaseMetadata = 0;
1058 Calls.clear();
1059 ReverseInsertPts.clear();
1060}
1061
1062namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +00001063 /// \brief This class summarizes several per-pointer runtime properties which
1064 /// are propogated through the flow graph.
John McCalld935e9c2011-06-15 23:37:01 +00001065 class PtrState {
Michael Gottesman97e3df02013-01-14 00:35:14 +00001066 /// True if the reference count is known to be incremented.
Dan Gohman62079b42012-04-25 00:50:46 +00001067 bool KnownPositiveRefCount;
1068
Michael Gottesman97e3df02013-01-14 00:35:14 +00001069 /// True of we've seen an opportunity for partial RR elimination, such as
1070 /// pushing calls into a CFG triangle or into one side of a CFG diamond.
Dan Gohman62079b42012-04-25 00:50:46 +00001071 bool Partial;
John McCalld935e9c2011-06-15 23:37:01 +00001072
Michael Gottesman97e3df02013-01-14 00:35:14 +00001073 /// The current position in the sequence.
Dan Gohman41375a32012-05-08 23:39:44 +00001074 Sequence Seq : 8;
John McCalld935e9c2011-06-15 23:37:01 +00001075
1076 public:
Michael Gottesman97e3df02013-01-14 00:35:14 +00001077 /// Unidirectional information about the current sequence.
1078 ///
John McCalld935e9c2011-06-15 23:37:01 +00001079 /// TODO: Encapsulate this better.
1080 RRInfo RRI;
1081
Dan Gohmandf476e52012-09-04 23:16:20 +00001082 PtrState() : KnownPositiveRefCount(false), Partial(false),
Dan Gohman41375a32012-05-08 23:39:44 +00001083 Seq(S_None) {}
John McCalld935e9c2011-06-15 23:37:01 +00001084
Dan Gohman62079b42012-04-25 00:50:46 +00001085 void SetKnownPositiveRefCount() {
1086 KnownPositiveRefCount = true;
Dan Gohman12130272011-08-12 00:26:31 +00001087 }
1088
Dan Gohman62079b42012-04-25 00:50:46 +00001089 void ClearRefCount() {
1090 KnownPositiveRefCount = false;
John McCalld935e9c2011-06-15 23:37:01 +00001091 }
1092
John McCalld935e9c2011-06-15 23:37:01 +00001093 bool IsKnownIncremented() const {
Dan Gohman62079b42012-04-25 00:50:46 +00001094 return KnownPositiveRefCount;
John McCalld935e9c2011-06-15 23:37:01 +00001095 }
1096
1097 void SetSeq(Sequence NewSeq) {
1098 Seq = NewSeq;
1099 }
1100
John McCalld935e9c2011-06-15 23:37:01 +00001101 Sequence GetSeq() const {
1102 return Seq;
1103 }
1104
1105 void ClearSequenceProgress() {
Dan Gohman62079b42012-04-25 00:50:46 +00001106 ResetSequenceProgress(S_None);
1107 }
1108
1109 void ResetSequenceProgress(Sequence NewSeq) {
1110 Seq = NewSeq;
1111 Partial = false;
John McCalld935e9c2011-06-15 23:37:01 +00001112 RRI.clear();
1113 }
1114
1115 void Merge(const PtrState &Other, bool TopDown);
1116 };
1117}
1118
1119void
1120PtrState::Merge(const PtrState &Other, bool TopDown) {
1121 Seq = MergeSeqs(Seq, Other.Seq, TopDown);
Dan Gohman62079b42012-04-25 00:50:46 +00001122 KnownPositiveRefCount = KnownPositiveRefCount && Other.KnownPositiveRefCount;
John McCalld935e9c2011-06-15 23:37:01 +00001123
1124 // We can't merge a plain objc_retain with an objc_retainBlock.
1125 if (RRI.IsRetainBlock != Other.RRI.IsRetainBlock)
1126 Seq = S_None;
1127
Dan Gohman1736c142011-10-17 18:48:25 +00001128 // If we're not in a sequence (anymore), drop all associated state.
John McCalld935e9c2011-06-15 23:37:01 +00001129 if (Seq == S_None) {
Dan Gohman62079b42012-04-25 00:50:46 +00001130 Partial = false;
John McCalld935e9c2011-06-15 23:37:01 +00001131 RRI.clear();
Dan Gohman62079b42012-04-25 00:50:46 +00001132 } else if (Partial || Other.Partial) {
Dan Gohman1736c142011-10-17 18:48:25 +00001133 // If we're doing a merge on a path that's previously seen a partial
1134 // merge, conservatively drop the sequence, to avoid doing partial
1135 // RR elimination. If the branch predicates for the two merge differ,
1136 // mixing them is unsafe.
Dan Gohman62079b42012-04-25 00:50:46 +00001137 ClearSequenceProgress();
John McCalld935e9c2011-06-15 23:37:01 +00001138 } else {
1139 // Conservatively merge the ReleaseMetadata information.
1140 if (RRI.ReleaseMetadata != Other.RRI.ReleaseMetadata)
1141 RRI.ReleaseMetadata = 0;
1142
Dan Gohmanb3894012011-08-19 00:26:36 +00001143 RRI.KnownSafe = RRI.KnownSafe && Other.RRI.KnownSafe;
Dan Gohman41375a32012-05-08 23:39:44 +00001144 RRI.IsTailCallRelease = RRI.IsTailCallRelease &&
1145 Other.RRI.IsTailCallRelease;
John McCalld935e9c2011-06-15 23:37:01 +00001146 RRI.Calls.insert(Other.RRI.Calls.begin(), Other.RRI.Calls.end());
Dan Gohman1736c142011-10-17 18:48:25 +00001147
1148 // Merge the insert point sets. If there are any differences,
1149 // that makes this a partial merge.
Dan Gohman41375a32012-05-08 23:39:44 +00001150 Partial = RRI.ReverseInsertPts.size() != Other.RRI.ReverseInsertPts.size();
Dan Gohman1736c142011-10-17 18:48:25 +00001151 for (SmallPtrSet<Instruction *, 2>::const_iterator
1152 I = Other.RRI.ReverseInsertPts.begin(),
1153 E = Other.RRI.ReverseInsertPts.end(); I != E; ++I)
Dan Gohman62079b42012-04-25 00:50:46 +00001154 Partial |= RRI.ReverseInsertPts.insert(*I);
John McCalld935e9c2011-06-15 23:37:01 +00001155 }
1156}
1157
1158namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +00001159 /// \brief Per-BasicBlock state.
John McCalld935e9c2011-06-15 23:37:01 +00001160 class BBState {
Michael Gottesman97e3df02013-01-14 00:35:14 +00001161 /// The number of unique control paths from the entry which can reach this
1162 /// block.
John McCalld935e9c2011-06-15 23:37:01 +00001163 unsigned TopDownPathCount;
1164
Michael Gottesman97e3df02013-01-14 00:35:14 +00001165 /// The number of unique control paths to exits from this block.
John McCalld935e9c2011-06-15 23:37:01 +00001166 unsigned BottomUpPathCount;
1167
Michael Gottesman97e3df02013-01-14 00:35:14 +00001168 /// A type for PerPtrTopDown and PerPtrBottomUp.
John McCalld935e9c2011-06-15 23:37:01 +00001169 typedef MapVector<const Value *, PtrState> MapTy;
1170
Michael Gottesman97e3df02013-01-14 00:35:14 +00001171 /// The top-down traversal uses this to record information known about a
1172 /// pointer at the bottom of each block.
John McCalld935e9c2011-06-15 23:37:01 +00001173 MapTy PerPtrTopDown;
1174
Michael Gottesman97e3df02013-01-14 00:35:14 +00001175 /// The bottom-up traversal uses this to record information known about a
1176 /// pointer at the top of each block.
John McCalld935e9c2011-06-15 23:37:01 +00001177 MapTy PerPtrBottomUp;
1178
Michael Gottesman97e3df02013-01-14 00:35:14 +00001179 /// Effective predecessors of the current block ignoring ignorable edges and
1180 /// ignored backedges.
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001181 SmallVector<BasicBlock *, 2> Preds;
Michael Gottesman97e3df02013-01-14 00:35:14 +00001182 /// Effective successors of the current block ignoring ignorable edges and
1183 /// ignored backedges.
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001184 SmallVector<BasicBlock *, 2> Succs;
1185
John McCalld935e9c2011-06-15 23:37:01 +00001186 public:
1187 BBState() : TopDownPathCount(0), BottomUpPathCount(0) {}
1188
1189 typedef MapTy::iterator ptr_iterator;
1190 typedef MapTy::const_iterator ptr_const_iterator;
1191
1192 ptr_iterator top_down_ptr_begin() { return PerPtrTopDown.begin(); }
1193 ptr_iterator top_down_ptr_end() { return PerPtrTopDown.end(); }
1194 ptr_const_iterator top_down_ptr_begin() const {
1195 return PerPtrTopDown.begin();
1196 }
1197 ptr_const_iterator top_down_ptr_end() const {
1198 return PerPtrTopDown.end();
1199 }
1200
1201 ptr_iterator bottom_up_ptr_begin() { return PerPtrBottomUp.begin(); }
1202 ptr_iterator bottom_up_ptr_end() { return PerPtrBottomUp.end(); }
1203 ptr_const_iterator bottom_up_ptr_begin() const {
1204 return PerPtrBottomUp.begin();
1205 }
1206 ptr_const_iterator bottom_up_ptr_end() const {
1207 return PerPtrBottomUp.end();
1208 }
1209
Michael Gottesman97e3df02013-01-14 00:35:14 +00001210 /// Mark this block as being an entry block, which has one path from the
1211 /// entry by definition.
John McCalld935e9c2011-06-15 23:37:01 +00001212 void SetAsEntry() { TopDownPathCount = 1; }
1213
Michael Gottesman97e3df02013-01-14 00:35:14 +00001214 /// Mark this block as being an exit block, which has one path to an exit by
1215 /// definition.
John McCalld935e9c2011-06-15 23:37:01 +00001216 void SetAsExit() { BottomUpPathCount = 1; }
1217
1218 PtrState &getPtrTopDownState(const Value *Arg) {
1219 return PerPtrTopDown[Arg];
1220 }
1221
1222 PtrState &getPtrBottomUpState(const Value *Arg) {
1223 return PerPtrBottomUp[Arg];
1224 }
1225
1226 void clearBottomUpPointers() {
Evan Chenge4df6a22011-08-04 18:40:26 +00001227 PerPtrBottomUp.clear();
John McCalld935e9c2011-06-15 23:37:01 +00001228 }
1229
1230 void clearTopDownPointers() {
1231 PerPtrTopDown.clear();
1232 }
1233
1234 void InitFromPred(const BBState &Other);
1235 void InitFromSucc(const BBState &Other);
1236 void MergePred(const BBState &Other);
1237 void MergeSucc(const BBState &Other);
1238
Michael Gottesman97e3df02013-01-14 00:35:14 +00001239 /// Return the number of possible unique paths from an entry to an exit
1240 /// which pass through this block. This is only valid after both the
1241 /// top-down and bottom-up traversals are complete.
John McCalld935e9c2011-06-15 23:37:01 +00001242 unsigned GetAllPathCount() const {
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001243 assert(TopDownPathCount != 0);
1244 assert(BottomUpPathCount != 0);
John McCalld935e9c2011-06-15 23:37:01 +00001245 return TopDownPathCount * BottomUpPathCount;
1246 }
Dan Gohman12130272011-08-12 00:26:31 +00001247
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001248 // Specialized CFG utilities.
Dan Gohmandae33492012-04-27 18:56:31 +00001249 typedef SmallVectorImpl<BasicBlock *>::const_iterator edge_iterator;
Dan Gohmanc24c66f2012-04-24 22:53:18 +00001250 edge_iterator pred_begin() { return Preds.begin(); }
1251 edge_iterator pred_end() { return Preds.end(); }
1252 edge_iterator succ_begin() { return Succs.begin(); }
1253 edge_iterator succ_end() { return Succs.end(); }
1254
1255 void addSucc(BasicBlock *Succ) { Succs.push_back(Succ); }
1256 void addPred(BasicBlock *Pred) { Preds.push_back(Pred); }
1257
1258 bool isExit() const { return Succs.empty(); }
John McCalld935e9c2011-06-15 23:37:01 +00001259 };
1260}
1261
1262void BBState::InitFromPred(const BBState &Other) {
1263 PerPtrTopDown = Other.PerPtrTopDown;
1264 TopDownPathCount = Other.TopDownPathCount;
1265}
1266
1267void BBState::InitFromSucc(const BBState &Other) {
1268 PerPtrBottomUp = Other.PerPtrBottomUp;
1269 BottomUpPathCount = Other.BottomUpPathCount;
1270}
1271
Michael Gottesman97e3df02013-01-14 00:35:14 +00001272/// The top-down traversal uses this to merge information about predecessors to
1273/// form the initial state for a new block.
John McCalld935e9c2011-06-15 23:37:01 +00001274void BBState::MergePred(const BBState &Other) {
1275 // Other.TopDownPathCount can be 0, in which case it is either dead or a
1276 // loop backedge. Loop backedges are special.
1277 TopDownPathCount += Other.TopDownPathCount;
1278
Michael Gottesman4385edf2013-01-14 01:47:53 +00001279 // Check for overflow. If we have overflow, fall back to conservative
1280 // behavior.
Dan Gohman7c84dad2012-09-12 20:45:17 +00001281 if (TopDownPathCount < Other.TopDownPathCount) {
1282 clearTopDownPointers();
1283 return;
1284 }
1285
John McCalld935e9c2011-06-15 23:37:01 +00001286 // For each entry in the other set, if our set has an entry with the same key,
1287 // merge the entries. Otherwise, copy the entry and merge it with an empty
1288 // entry.
1289 for (ptr_const_iterator MI = Other.top_down_ptr_begin(),
1290 ME = Other.top_down_ptr_end(); MI != ME; ++MI) {
1291 std::pair<ptr_iterator, bool> Pair = PerPtrTopDown.insert(*MI);
1292 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
1293 /*TopDown=*/true);
1294 }
1295
Dan Gohman7e315fc32011-08-11 21:06:32 +00001296 // For each entry in our set, if the other set doesn't have an entry with the
John McCalld935e9c2011-06-15 23:37:01 +00001297 // same key, force it to merge with an empty entry.
1298 for (ptr_iterator MI = top_down_ptr_begin(),
1299 ME = top_down_ptr_end(); MI != ME; ++MI)
1300 if (Other.PerPtrTopDown.find(MI->first) == Other.PerPtrTopDown.end())
1301 MI->second.Merge(PtrState(), /*TopDown=*/true);
1302}
1303
Michael Gottesman97e3df02013-01-14 00:35:14 +00001304/// The bottom-up traversal uses this to merge information about successors to
1305/// form the initial state for a new block.
John McCalld935e9c2011-06-15 23:37:01 +00001306void BBState::MergeSucc(const BBState &Other) {
1307 // Other.BottomUpPathCount can be 0, in which case it is either dead or a
1308 // loop backedge. Loop backedges are special.
1309 BottomUpPathCount += Other.BottomUpPathCount;
1310
Michael Gottesman4385edf2013-01-14 01:47:53 +00001311 // Check for overflow. If we have overflow, fall back to conservative
1312 // behavior.
Dan Gohman7c84dad2012-09-12 20:45:17 +00001313 if (BottomUpPathCount < Other.BottomUpPathCount) {
1314 clearBottomUpPointers();
1315 return;
1316 }
1317
John McCalld935e9c2011-06-15 23:37:01 +00001318 // For each entry in the other set, if our set has an entry with the
1319 // same key, merge the entries. Otherwise, copy the entry and merge
1320 // it with an empty entry.
1321 for (ptr_const_iterator MI = Other.bottom_up_ptr_begin(),
1322 ME = Other.bottom_up_ptr_end(); MI != ME; ++MI) {
1323 std::pair<ptr_iterator, bool> Pair = PerPtrBottomUp.insert(*MI);
1324 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second,
1325 /*TopDown=*/false);
1326 }
1327
Dan Gohman7e315fc32011-08-11 21:06:32 +00001328 // For each entry in our set, if the other set doesn't have an entry
John McCalld935e9c2011-06-15 23:37:01 +00001329 // with the same key, force it to merge with an empty entry.
1330 for (ptr_iterator MI = bottom_up_ptr_begin(),
1331 ME = bottom_up_ptr_end(); MI != ME; ++MI)
1332 if (Other.PerPtrBottomUp.find(MI->first) == Other.PerPtrBottomUp.end())
1333 MI->second.Merge(PtrState(), /*TopDown=*/false);
1334}
1335
1336namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +00001337 /// \brief The main ARC optimization pass.
John McCalld935e9c2011-06-15 23:37:01 +00001338 class ObjCARCOpt : public FunctionPass {
1339 bool Changed;
1340 ProvenanceAnalysis PA;
1341
Michael Gottesman97e3df02013-01-14 00:35:14 +00001342 /// A flag indicating whether this optimization pass should run.
Dan Gohmanceaac7c2011-06-20 23:20:43 +00001343 bool Run;
1344
Michael Gottesman97e3df02013-01-14 00:35:14 +00001345 /// Declarations for ObjC runtime functions, for use in creating calls to
1346 /// them. These are initialized lazily to avoid cluttering up the Module
1347 /// with unused declarations.
John McCalld935e9c2011-06-15 23:37:01 +00001348
Michael Gottesman97e3df02013-01-14 00:35:14 +00001349 /// Declaration for ObjC runtime function
1350 /// objc_retainAutoreleasedReturnValue.
1351 Constant *RetainRVCallee;
1352 /// Declaration for ObjC runtime function objc_autoreleaseReturnValue.
1353 Constant *AutoreleaseRVCallee;
1354 /// Declaration for ObjC runtime function objc_release.
1355 Constant *ReleaseCallee;
1356 /// Declaration for ObjC runtime function objc_retain.
1357 Constant *RetainCallee;
1358 /// Declaration for ObjC runtime function objc_retainBlock.
1359 Constant *RetainBlockCallee;
1360 /// Declaration for ObjC runtime function objc_autorelease.
1361 Constant *AutoreleaseCallee;
1362
1363 /// Flags which determine whether each of the interesting runtine functions
1364 /// is in fact used in the current function.
John McCalld935e9c2011-06-15 23:37:01 +00001365 unsigned UsedInThisFunction;
1366
Michael Gottesman97e3df02013-01-14 00:35:14 +00001367 /// The Metadata Kind for clang.imprecise_release metadata.
John McCalld935e9c2011-06-15 23:37:01 +00001368 unsigned ImpreciseReleaseMDKind;
1369
Michael Gottesman97e3df02013-01-14 00:35:14 +00001370 /// The Metadata Kind for clang.arc.copy_on_escape metadata.
Dan Gohmana7107f92011-10-17 22:53:25 +00001371 unsigned CopyOnEscapeMDKind;
1372
Michael Gottesman97e3df02013-01-14 00:35:14 +00001373 /// The Metadata Kind for clang.arc.no_objc_arc_exceptions metadata.
Dan Gohman0155f302012-02-17 18:59:53 +00001374 unsigned NoObjCARCExceptionsMDKind;
1375
John McCalld935e9c2011-06-15 23:37:01 +00001376 Constant *getRetainRVCallee(Module *M);
1377 Constant *getAutoreleaseRVCallee(Module *M);
1378 Constant *getReleaseCallee(Module *M);
1379 Constant *getRetainCallee(Module *M);
Dan Gohman6320f522011-07-22 22:29:21 +00001380 Constant *getRetainBlockCallee(Module *M);
John McCalld935e9c2011-06-15 23:37:01 +00001381 Constant *getAutoreleaseCallee(Module *M);
1382
Dan Gohman728db492012-01-13 00:39:07 +00001383 bool IsRetainBlockOptimizable(const Instruction *Inst);
1384
John McCalld935e9c2011-06-15 23:37:01 +00001385 void OptimizeRetainCall(Function &F, Instruction *Retain);
1386 bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV);
Michael Gottesman556ff612013-01-12 01:25:19 +00001387 void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV,
1388 InstructionClass &Class);
John McCalld935e9c2011-06-15 23:37:01 +00001389 void OptimizeIndividualCalls(Function &F);
1390
1391 void CheckForCFGHazards(const BasicBlock *BB,
1392 DenseMap<const BasicBlock *, BBState> &BBStates,
1393 BBState &MyStates) const;
Dan Gohman817a7c62012-03-22 18:24:56 +00001394 bool VisitInstructionBottomUp(Instruction *Inst,
Dan Gohman5c70fad2012-03-23 17:47:54 +00001395 BasicBlock *BB,
Dan Gohman817a7c62012-03-22 18:24:56 +00001396 MapVector<Value *, RRInfo> &Retains,
1397 BBState &MyStates);
John McCalld935e9c2011-06-15 23:37:01 +00001398 bool VisitBottomUp(BasicBlock *BB,
1399 DenseMap<const BasicBlock *, BBState> &BBStates,
1400 MapVector<Value *, RRInfo> &Retains);
Dan Gohman817a7c62012-03-22 18:24:56 +00001401 bool VisitInstructionTopDown(Instruction *Inst,
1402 DenseMap<Value *, RRInfo> &Releases,
1403 BBState &MyStates);
John McCalld935e9c2011-06-15 23:37:01 +00001404 bool VisitTopDown(BasicBlock *BB,
1405 DenseMap<const BasicBlock *, BBState> &BBStates,
1406 DenseMap<Value *, RRInfo> &Releases);
1407 bool Visit(Function &F,
1408 DenseMap<const BasicBlock *, BBState> &BBStates,
1409 MapVector<Value *, RRInfo> &Retains,
1410 DenseMap<Value *, RRInfo> &Releases);
1411
1412 void MoveCalls(Value *Arg, RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
1413 MapVector<Value *, RRInfo> &Retains,
1414 DenseMap<Value *, RRInfo> &Releases,
Dan Gohman6320f522011-07-22 22:29:21 +00001415 SmallVectorImpl<Instruction *> &DeadInsts,
1416 Module *M);
John McCalld935e9c2011-06-15 23:37:01 +00001417
Michael Gottesman9de6f962013-01-22 21:49:00 +00001418 bool ConnectTDBUTraversals(DenseMap<const BasicBlock *, BBState> &BBStates,
1419 MapVector<Value *, RRInfo> &Retains,
1420 DenseMap<Value *, RRInfo> &Releases,
1421 Module *M,
1422 SmallVector<Instruction *, 4> &NewRetains,
1423 SmallVector<Instruction *, 4> &NewReleases,
1424 SmallVector<Instruction *, 8> &DeadInsts,
1425 RRInfo &RetainsToMove,
1426 RRInfo &ReleasesToMove,
1427 Value *Arg,
1428 bool KnownSafe,
1429 bool &AnyPairsCompletelyEliminated);
1430
John McCalld935e9c2011-06-15 23:37:01 +00001431 bool PerformCodePlacement(DenseMap<const BasicBlock *, BBState> &BBStates,
1432 MapVector<Value *, RRInfo> &Retains,
Dan Gohman6320f522011-07-22 22:29:21 +00001433 DenseMap<Value *, RRInfo> &Releases,
1434 Module *M);
John McCalld935e9c2011-06-15 23:37:01 +00001435
1436 void OptimizeWeakCalls(Function &F);
1437
1438 bool OptimizeSequences(Function &F);
1439
1440 void OptimizeReturns(Function &F);
1441
1442 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
1443 virtual bool doInitialization(Module &M);
1444 virtual bool runOnFunction(Function &F);
1445 virtual void releaseMemory();
1446
1447 public:
1448 static char ID;
1449 ObjCARCOpt() : FunctionPass(ID) {
1450 initializeObjCARCOptPass(*PassRegistry::getPassRegistry());
1451 }
1452 };
1453}
1454
1455char ObjCARCOpt::ID = 0;
1456INITIALIZE_PASS_BEGIN(ObjCARCOpt,
1457 "objc-arc", "ObjC ARC optimization", false, false)
1458INITIALIZE_PASS_DEPENDENCY(ObjCARCAliasAnalysis)
1459INITIALIZE_PASS_END(ObjCARCOpt,
1460 "objc-arc", "ObjC ARC optimization", false, false)
1461
1462Pass *llvm::createObjCARCOptPass() {
1463 return new ObjCARCOpt();
1464}
1465
1466void ObjCARCOpt::getAnalysisUsage(AnalysisUsage &AU) const {
1467 AU.addRequired<ObjCARCAliasAnalysis>();
1468 AU.addRequired<AliasAnalysis>();
1469 // ARC optimization doesn't currently split critical edges.
1470 AU.setPreservesCFG();
1471}
1472
Dan Gohman728db492012-01-13 00:39:07 +00001473bool ObjCARCOpt::IsRetainBlockOptimizable(const Instruction *Inst) {
1474 // Without the magic metadata tag, we have to assume this might be an
1475 // objc_retainBlock call inserted to convert a block pointer to an id,
1476 // in which case it really is needed.
1477 if (!Inst->getMetadata(CopyOnEscapeMDKind))
1478 return false;
1479
1480 // If the pointer "escapes" (not including being used in a call),
1481 // the copy may be needed.
1482 if (DoesObjCBlockEscape(Inst))
1483 return false;
1484
1485 // Otherwise, it's not needed.
1486 return true;
1487}
1488
John McCalld935e9c2011-06-15 23:37:01 +00001489Constant *ObjCARCOpt::getRetainRVCallee(Module *M) {
1490 if (!RetainRVCallee) {
1491 LLVMContext &C = M->getContext();
Jay Foadb804a2b2011-07-12 14:06:48 +00001492 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
Dan Gohman41375a32012-05-08 23:39:44 +00001493 Type *Params[] = { I8X };
1494 FunctionType *FTy = FunctionType::get(I8X, Params, /*isVarArg=*/false);
Bill Wendling3d7b0b82012-12-19 07:18:57 +00001495 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +00001496 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
1497 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +00001498 RetainRVCallee =
1499 M->getOrInsertFunction("objc_retainAutoreleasedReturnValue", FTy,
Bill Wendling3d7b0b82012-12-19 07:18:57 +00001500 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +00001501 }
1502 return RetainRVCallee;
1503}
1504
1505Constant *ObjCARCOpt::getAutoreleaseRVCallee(Module *M) {
1506 if (!AutoreleaseRVCallee) {
1507 LLVMContext &C = M->getContext();
Jay Foadb804a2b2011-07-12 14:06:48 +00001508 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
Dan Gohman41375a32012-05-08 23:39:44 +00001509 Type *Params[] = { I8X };
1510 FunctionType *FTy = FunctionType::get(I8X, Params, /*isVarArg=*/false);
Bill Wendling3d7b0b82012-12-19 07:18:57 +00001511 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +00001512 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
1513 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +00001514 AutoreleaseRVCallee =
1515 M->getOrInsertFunction("objc_autoreleaseReturnValue", FTy,
Bill Wendling3d7b0b82012-12-19 07:18:57 +00001516 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +00001517 }
1518 return AutoreleaseRVCallee;
1519}
1520
1521Constant *ObjCARCOpt::getReleaseCallee(Module *M) {
1522 if (!ReleaseCallee) {
1523 LLVMContext &C = M->getContext();
Dan Gohman41375a32012-05-08 23:39:44 +00001524 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
Bill Wendling3d7b0b82012-12-19 07:18:57 +00001525 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +00001526 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
1527 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +00001528 ReleaseCallee =
1529 M->getOrInsertFunction(
1530 "objc_release",
1531 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
Bill Wendling3d7b0b82012-12-19 07:18:57 +00001532 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +00001533 }
1534 return ReleaseCallee;
1535}
1536
1537Constant *ObjCARCOpt::getRetainCallee(Module *M) {
1538 if (!RetainCallee) {
1539 LLVMContext &C = M->getContext();
Dan Gohman41375a32012-05-08 23:39:44 +00001540 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
Bill Wendling3d7b0b82012-12-19 07:18:57 +00001541 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +00001542 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
1543 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +00001544 RetainCallee =
1545 M->getOrInsertFunction(
1546 "objc_retain",
1547 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
Bill Wendling3d7b0b82012-12-19 07:18:57 +00001548 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +00001549 }
1550 return RetainCallee;
1551}
1552
Dan Gohman6320f522011-07-22 22:29:21 +00001553Constant *ObjCARCOpt::getRetainBlockCallee(Module *M) {
1554 if (!RetainBlockCallee) {
1555 LLVMContext &C = M->getContext();
Dan Gohman41375a32012-05-08 23:39:44 +00001556 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
Dan Gohmanfca43c22011-09-14 18:33:34 +00001557 // objc_retainBlock is not nounwind because it calls user copy constructors
1558 // which could theoretically throw.
Dan Gohman6320f522011-07-22 22:29:21 +00001559 RetainBlockCallee =
1560 M->getOrInsertFunction(
1561 "objc_retainBlock",
1562 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
Bill Wendlinge94d8432012-12-07 23:16:57 +00001563 AttributeSet());
Dan Gohman6320f522011-07-22 22:29:21 +00001564 }
1565 return RetainBlockCallee;
1566}
1567
John McCalld935e9c2011-06-15 23:37:01 +00001568Constant *ObjCARCOpt::getAutoreleaseCallee(Module *M) {
1569 if (!AutoreleaseCallee) {
1570 LLVMContext &C = M->getContext();
Dan Gohman41375a32012-05-08 23:39:44 +00001571 Type *Params[] = { PointerType::getUnqual(Type::getInt8Ty(C)) };
Bill Wendling3d7b0b82012-12-19 07:18:57 +00001572 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +00001573 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
1574 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +00001575 AutoreleaseCallee =
1576 M->getOrInsertFunction(
1577 "objc_autorelease",
1578 FunctionType::get(Params[0], Params, /*isVarArg=*/false),
Bill Wendling3d7b0b82012-12-19 07:18:57 +00001579 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +00001580 }
1581 return AutoreleaseCallee;
1582}
1583
Michael Gottesman97e3df02013-01-14 00:35:14 +00001584/// Test whether the given value is possible a reference-counted pointer,
1585/// including tests which utilize AliasAnalysis.
Michael Gottesman5300cdd2013-01-27 06:19:48 +00001586static bool IsPotentialRetainableObjPtr(const Value *Op, AliasAnalysis &AA) {
Dan Gohmandf476e52012-09-04 23:16:20 +00001587 // First make the rudimentary check.
Michael Gottesman5300cdd2013-01-27 06:19:48 +00001588 if (!IsPotentialRetainableObjPtr(Op))
Dan Gohmandf476e52012-09-04 23:16:20 +00001589 return false;
1590
1591 // Objects in constant memory are not reference-counted.
1592 if (AA.pointsToConstantMemory(Op))
1593 return false;
1594
1595 // Pointers in constant memory are not pointing to reference-counted objects.
1596 if (const LoadInst *LI = dyn_cast<LoadInst>(Op))
1597 if (AA.pointsToConstantMemory(LI->getPointerOperand()))
1598 return false;
1599
1600 // Otherwise assume the worst.
1601 return true;
1602}
1603
Michael Gottesman97e3df02013-01-14 00:35:14 +00001604/// Test whether the given instruction can result in a reference count
1605/// modification (positive or negative) for the pointer's object.
John McCalld935e9c2011-06-15 23:37:01 +00001606static bool
1607CanAlterRefCount(const Instruction *Inst, const Value *Ptr,
1608 ProvenanceAnalysis &PA, InstructionClass Class) {
1609 switch (Class) {
1610 case IC_Autorelease:
1611 case IC_AutoreleaseRV:
1612 case IC_User:
1613 // These operations never directly modify a reference count.
1614 return false;
1615 default: break;
1616 }
1617
1618 ImmutableCallSite CS = static_cast<const Value *>(Inst);
1619 assert(CS && "Only calls can alter reference counts!");
1620
1621 // See if AliasAnalysis can help us with the call.
1622 AliasAnalysis::ModRefBehavior MRB = PA.getAA()->getModRefBehavior(CS);
1623 if (AliasAnalysis::onlyReadsMemory(MRB))
1624 return false;
1625 if (AliasAnalysis::onlyAccessesArgPointees(MRB)) {
1626 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
1627 I != E; ++I) {
1628 const Value *Op = *I;
Michael Gottesman5300cdd2013-01-27 06:19:48 +00001629 if (IsPotentialRetainableObjPtr(Op, *PA.getAA()) && PA.related(Ptr, Op))
John McCalld935e9c2011-06-15 23:37:01 +00001630 return true;
1631 }
1632 return false;
1633 }
1634
1635 // Assume the worst.
1636 return true;
1637}
1638
Michael Gottesman97e3df02013-01-14 00:35:14 +00001639/// Test whether the given instruction can "use" the given pointer's object in a
1640/// way that requires the reference count to be positive.
John McCalld935e9c2011-06-15 23:37:01 +00001641static bool
1642CanUse(const Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA,
1643 InstructionClass Class) {
1644 // IC_Call operations (as opposed to IC_CallOrUser) never "use" objc pointers.
1645 if (Class == IC_Call)
1646 return false;
1647
1648 // Consider various instructions which may have pointer arguments which are
1649 // not "uses".
1650 if (const ICmpInst *ICI = dyn_cast<ICmpInst>(Inst)) {
1651 // Comparing a pointer with null, or any other constant, isn't really a use,
1652 // because we don't care what the pointer points to, or about the values
1653 // of any other dynamic reference-counted pointers.
Michael Gottesman5300cdd2013-01-27 06:19:48 +00001654 if (!IsPotentialRetainableObjPtr(ICI->getOperand(1), *PA.getAA()))
John McCalld935e9c2011-06-15 23:37:01 +00001655 return false;
1656 } else if (ImmutableCallSite CS = static_cast<const Value *>(Inst)) {
1657 // For calls, just check the arguments (and not the callee operand).
1658 for (ImmutableCallSite::arg_iterator OI = CS.arg_begin(),
1659 OE = CS.arg_end(); OI != OE; ++OI) {
1660 const Value *Op = *OI;
Michael Gottesman5300cdd2013-01-27 06:19:48 +00001661 if (IsPotentialRetainableObjPtr(Op, *PA.getAA()) && PA.related(Ptr, Op))
John McCalld935e9c2011-06-15 23:37:01 +00001662 return true;
1663 }
1664 return false;
1665 } else if (const StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
1666 // Special-case stores, because we don't care about the stored value, just
1667 // the store address.
1668 const Value *Op = GetUnderlyingObjCPtr(SI->getPointerOperand());
1669 // If we can't tell what the underlying object was, assume there is a
1670 // dependence.
Michael Gottesman5300cdd2013-01-27 06:19:48 +00001671 return IsPotentialRetainableObjPtr(Op, *PA.getAA()) && PA.related(Op, Ptr);
John McCalld935e9c2011-06-15 23:37:01 +00001672 }
1673
1674 // Check each operand for a match.
1675 for (User::const_op_iterator OI = Inst->op_begin(), OE = Inst->op_end();
1676 OI != OE; ++OI) {
1677 const Value *Op = *OI;
Michael Gottesman5300cdd2013-01-27 06:19:48 +00001678 if (IsPotentialRetainableObjPtr(Op, *PA.getAA()) && PA.related(Ptr, Op))
John McCalld935e9c2011-06-15 23:37:01 +00001679 return true;
1680 }
1681 return false;
1682}
1683
Michael Gottesman97e3df02013-01-14 00:35:14 +00001684/// Test whether the given instruction can autorelease any pointer or cause an
1685/// autoreleasepool pop.
John McCalld935e9c2011-06-15 23:37:01 +00001686static bool
1687CanInterruptRV(InstructionClass Class) {
1688 switch (Class) {
1689 case IC_AutoreleasepoolPop:
1690 case IC_CallOrUser:
1691 case IC_Call:
1692 case IC_Autorelease:
1693 case IC_AutoreleaseRV:
1694 case IC_FusedRetainAutorelease:
1695 case IC_FusedRetainAutoreleaseRV:
1696 return true;
1697 default:
1698 return false;
1699 }
1700}
1701
1702namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +00001703 /// \enum DependenceKind
1704 /// \brief Defines different dependence kinds among various ARC constructs.
1705 ///
1706 /// There are several kinds of dependence-like concepts in use here.
1707 ///
John McCalld935e9c2011-06-15 23:37:01 +00001708 enum DependenceKind {
1709 NeedsPositiveRetainCount,
Dan Gohman8478d762012-04-13 00:59:57 +00001710 AutoreleasePoolBoundary,
John McCalld935e9c2011-06-15 23:37:01 +00001711 CanChangeRetainCount,
1712 RetainAutoreleaseDep, ///< Blocks objc_retainAutorelease.
1713 RetainAutoreleaseRVDep, ///< Blocks objc_retainAutoreleaseReturnValue.
1714 RetainRVDep ///< Blocks objc_retainAutoreleasedReturnValue.
1715 };
1716}
1717
Michael Gottesman97e3df02013-01-14 00:35:14 +00001718/// Test if there can be dependencies on Inst through Arg. This function only
1719/// tests dependencies relevant for removing pairs of calls.
John McCalld935e9c2011-06-15 23:37:01 +00001720static bool
1721Depends(DependenceKind Flavor, Instruction *Inst, const Value *Arg,
1722 ProvenanceAnalysis &PA) {
1723 // If we've reached the definition of Arg, stop.
1724 if (Inst == Arg)
1725 return true;
1726
1727 switch (Flavor) {
1728 case NeedsPositiveRetainCount: {
1729 InstructionClass Class = GetInstructionClass(Inst);
1730 switch (Class) {
1731 case IC_AutoreleasepoolPop:
1732 case IC_AutoreleasepoolPush:
1733 case IC_None:
1734 return false;
1735 default:
1736 return CanUse(Inst, Arg, PA, Class);
1737 }
1738 }
1739
Dan Gohman8478d762012-04-13 00:59:57 +00001740 case AutoreleasePoolBoundary: {
1741 InstructionClass Class = GetInstructionClass(Inst);
1742 switch (Class) {
1743 case IC_AutoreleasepoolPop:
1744 case IC_AutoreleasepoolPush:
1745 // These mark the end and begin of an autorelease pool scope.
1746 return true;
1747 default:
1748 // Nothing else does this.
1749 return false;
1750 }
1751 }
1752
John McCalld935e9c2011-06-15 23:37:01 +00001753 case CanChangeRetainCount: {
1754 InstructionClass Class = GetInstructionClass(Inst);
1755 switch (Class) {
1756 case IC_AutoreleasepoolPop:
1757 // Conservatively assume this can decrement any count.
1758 return true;
1759 case IC_AutoreleasepoolPush:
1760 case IC_None:
1761 return false;
1762 default:
1763 return CanAlterRefCount(Inst, Arg, PA, Class);
1764 }
1765 }
1766
1767 case RetainAutoreleaseDep:
1768 switch (GetBasicInstructionClass(Inst)) {
1769 case IC_AutoreleasepoolPop:
Dan Gohman8478d762012-04-13 00:59:57 +00001770 case IC_AutoreleasepoolPush:
John McCalld935e9c2011-06-15 23:37:01 +00001771 // Don't merge an objc_autorelease with an objc_retain inside a different
1772 // autoreleasepool scope.
1773 return true;
1774 case IC_Retain:
1775 case IC_RetainRV:
1776 // Check for a retain of the same pointer for merging.
1777 return GetObjCArg(Inst) == Arg;
1778 default:
1779 // Nothing else matters for objc_retainAutorelease formation.
1780 return false;
1781 }
John McCalld935e9c2011-06-15 23:37:01 +00001782
1783 case RetainAutoreleaseRVDep: {
1784 InstructionClass Class = GetBasicInstructionClass(Inst);
1785 switch (Class) {
1786 case IC_Retain:
1787 case IC_RetainRV:
1788 // Check for a retain of the same pointer for merging.
1789 return GetObjCArg(Inst) == Arg;
1790 default:
1791 // Anything that can autorelease interrupts
1792 // retainAutoreleaseReturnValue formation.
1793 return CanInterruptRV(Class);
1794 }
John McCalld935e9c2011-06-15 23:37:01 +00001795 }
1796
1797 case RetainRVDep:
1798 return CanInterruptRV(GetBasicInstructionClass(Inst));
1799 }
1800
1801 llvm_unreachable("Invalid dependence flavor");
John McCalld935e9c2011-06-15 23:37:01 +00001802}
1803
Michael Gottesman97e3df02013-01-14 00:35:14 +00001804/// Walk up the CFG from StartPos (which is in StartBB) and find local and
1805/// non-local dependencies on Arg.
1806///
John McCalld935e9c2011-06-15 23:37:01 +00001807/// TODO: Cache results?
1808static void
1809FindDependencies(DependenceKind Flavor,
1810 const Value *Arg,
1811 BasicBlock *StartBB, Instruction *StartInst,
1812 SmallPtrSet<Instruction *, 4> &DependingInstructions,
1813 SmallPtrSet<const BasicBlock *, 4> &Visited,
1814 ProvenanceAnalysis &PA) {
1815 BasicBlock::iterator StartPos = StartInst;
1816
1817 SmallVector<std::pair<BasicBlock *, BasicBlock::iterator>, 4> Worklist;
1818 Worklist.push_back(std::make_pair(StartBB, StartPos));
1819 do {
1820 std::pair<BasicBlock *, BasicBlock::iterator> Pair =
1821 Worklist.pop_back_val();
1822 BasicBlock *LocalStartBB = Pair.first;
1823 BasicBlock::iterator LocalStartPos = Pair.second;
1824 BasicBlock::iterator StartBBBegin = LocalStartBB->begin();
1825 for (;;) {
1826 if (LocalStartPos == StartBBBegin) {
1827 pred_iterator PI(LocalStartBB), PE(LocalStartBB, false);
1828 if (PI == PE)
1829 // If we've reached the function entry, produce a null dependence.
1830 DependingInstructions.insert(0);
1831 else
1832 // Add the predecessors to the worklist.
1833 do {
1834 BasicBlock *PredBB = *PI;
1835 if (Visited.insert(PredBB))
1836 Worklist.push_back(std::make_pair(PredBB, PredBB->end()));
1837 } while (++PI != PE);
1838 break;
1839 }
1840
1841 Instruction *Inst = --LocalStartPos;
1842 if (Depends(Flavor, Inst, Arg, PA)) {
1843 DependingInstructions.insert(Inst);
1844 break;
1845 }
1846 }
1847 } while (!Worklist.empty());
1848
1849 // Determine whether the original StartBB post-dominates all of the blocks we
1850 // visited. If not, insert a sentinal indicating that most optimizations are
1851 // not safe.
1852 for (SmallPtrSet<const BasicBlock *, 4>::const_iterator I = Visited.begin(),
1853 E = Visited.end(); I != E; ++I) {
1854 const BasicBlock *BB = *I;
1855 if (BB == StartBB)
1856 continue;
1857 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
1858 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
1859 const BasicBlock *Succ = *SI;
1860 if (Succ != StartBB && !Visited.count(Succ)) {
1861 DependingInstructions.insert(reinterpret_cast<Instruction *>(-1));
1862 return;
1863 }
1864 }
1865 }
1866}
1867
1868static bool isNullOrUndef(const Value *V) {
1869 return isa<ConstantPointerNull>(V) || isa<UndefValue>(V);
1870}
1871
1872static bool isNoopInstruction(const Instruction *I) {
1873 return isa<BitCastInst>(I) ||
1874 (isa<GetElementPtrInst>(I) &&
1875 cast<GetElementPtrInst>(I)->hasAllZeroIndices());
1876}
1877
Michael Gottesman97e3df02013-01-14 00:35:14 +00001878/// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a
1879/// return value.
John McCalld935e9c2011-06-15 23:37:01 +00001880void
1881ObjCARCOpt::OptimizeRetainCall(Function &F, Instruction *Retain) {
Dan Gohmandae33492012-04-27 18:56:31 +00001882 ImmutableCallSite CS(GetObjCArg(Retain));
1883 const Instruction *Call = CS.getInstruction();
John McCalld935e9c2011-06-15 23:37:01 +00001884 if (!Call) return;
1885 if (Call->getParent() != Retain->getParent()) return;
1886
1887 // Check that the call is next to the retain.
Dan Gohmandae33492012-04-27 18:56:31 +00001888 BasicBlock::const_iterator I = Call;
John McCalld935e9c2011-06-15 23:37:01 +00001889 ++I;
1890 while (isNoopInstruction(I)) ++I;
1891 if (&*I != Retain)
1892 return;
1893
1894 // Turn it to an objc_retainAutoreleasedReturnValue..
1895 Changed = true;
1896 ++NumPeeps;
Michael Gottesman10426b52013-01-07 21:26:07 +00001897
Michael Gottesman1e00ac62013-01-04 21:30:38 +00001898 DEBUG(dbgs() << "ObjCARCOpt::OptimizeRetainCall: Transforming "
Michael Gottesman9f1be682013-01-12 03:45:49 +00001899 "objc_retain => objc_retainAutoreleasedReturnValue"
1900 " since the operand is a return value.\n"
Michael Gottesman1e00ac62013-01-04 21:30:38 +00001901 " Old: "
1902 << *Retain << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00001903
John McCalld935e9c2011-06-15 23:37:01 +00001904 cast<CallInst>(Retain)->setCalledFunction(getRetainRVCallee(F.getParent()));
Michael Gottesman1e00ac62013-01-04 21:30:38 +00001905
1906 DEBUG(dbgs() << " New: "
1907 << *Retain << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00001908}
1909
Michael Gottesman97e3df02013-01-14 00:35:14 +00001910/// Turn objc_retainAutoreleasedReturnValue into objc_retain if the operand is
1911/// not a return value. Or, if it can be paired with an
1912/// objc_autoreleaseReturnValue, delete the pair and return true.
John McCalld935e9c2011-06-15 23:37:01 +00001913bool
1914ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
Dan Gohmane3ed2b02012-03-23 18:09:00 +00001915 // Check for the argument being from an immediately preceding call or invoke.
Dan Gohmandae33492012-04-27 18:56:31 +00001916 const Value *Arg = GetObjCArg(RetainRV);
1917 ImmutableCallSite CS(Arg);
1918 if (const Instruction *Call = CS.getInstruction()) {
John McCalld935e9c2011-06-15 23:37:01 +00001919 if (Call->getParent() == RetainRV->getParent()) {
Dan Gohmandae33492012-04-27 18:56:31 +00001920 BasicBlock::const_iterator I = Call;
John McCalld935e9c2011-06-15 23:37:01 +00001921 ++I;
1922 while (isNoopInstruction(I)) ++I;
1923 if (&*I == RetainRV)
1924 return false;
Dan Gohmandae33492012-04-27 18:56:31 +00001925 } else if (const InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
Dan Gohmane3ed2b02012-03-23 18:09:00 +00001926 BasicBlock *RetainRVParent = RetainRV->getParent();
1927 if (II->getNormalDest() == RetainRVParent) {
Dan Gohmandae33492012-04-27 18:56:31 +00001928 BasicBlock::const_iterator I = RetainRVParent->begin();
Dan Gohmane3ed2b02012-03-23 18:09:00 +00001929 while (isNoopInstruction(I)) ++I;
1930 if (&*I == RetainRV)
1931 return false;
1932 }
John McCalld935e9c2011-06-15 23:37:01 +00001933 }
Dan Gohmane3ed2b02012-03-23 18:09:00 +00001934 }
John McCalld935e9c2011-06-15 23:37:01 +00001935
1936 // Check for being preceded by an objc_autoreleaseReturnValue on the same
1937 // pointer. In this case, we can delete the pair.
1938 BasicBlock::iterator I = RetainRV, Begin = RetainRV->getParent()->begin();
1939 if (I != Begin) {
1940 do --I; while (I != Begin && isNoopInstruction(I));
1941 if (GetBasicInstructionClass(I) == IC_AutoreleaseRV &&
1942 GetObjCArg(I) == Arg) {
1943 Changed = true;
1944 ++NumPeeps;
Michael Gottesman10426b52013-01-07 21:26:07 +00001945
Michael Gottesman5c32ce92013-01-05 17:55:35 +00001946 DEBUG(dbgs() << "ObjCARCOpt::OptimizeRetainRVCall: Erasing " << *I << "\n"
1947 << " Erasing " << *RetainRV
1948 << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00001949
John McCalld935e9c2011-06-15 23:37:01 +00001950 EraseInstruction(I);
1951 EraseInstruction(RetainRV);
1952 return true;
1953 }
1954 }
1955
1956 // Turn it to a plain objc_retain.
1957 Changed = true;
1958 ++NumPeeps;
Michael Gottesman10426b52013-01-07 21:26:07 +00001959
Michael Gottesmandef07bb2013-01-05 17:55:42 +00001960 DEBUG(dbgs() << "ObjCARCOpt::OptimizeRetainRVCall: Transforming "
1961 "objc_retainAutoreleasedReturnValue => "
1962 "objc_retain since the operand is not a return value.\n"
1963 " Old: "
1964 << *RetainRV << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00001965
John McCalld935e9c2011-06-15 23:37:01 +00001966 cast<CallInst>(RetainRV)->setCalledFunction(getRetainCallee(F.getParent()));
Michael Gottesmandef07bb2013-01-05 17:55:42 +00001967
1968 DEBUG(dbgs() << " New: "
1969 << *RetainRV << "\n");
1970
John McCalld935e9c2011-06-15 23:37:01 +00001971 return false;
1972}
1973
Michael Gottesman97e3df02013-01-14 00:35:14 +00001974/// Turn objc_autoreleaseReturnValue into objc_autorelease if the result is not
1975/// used as a return value.
John McCalld935e9c2011-06-15 23:37:01 +00001976void
Michael Gottesman556ff612013-01-12 01:25:19 +00001977ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV,
1978 InstructionClass &Class) {
John McCalld935e9c2011-06-15 23:37:01 +00001979 // Check for a return of the pointer value.
1980 const Value *Ptr = GetObjCArg(AutoreleaseRV);
Dan Gohman10a18d52011-08-12 00:36:31 +00001981 SmallVector<const Value *, 2> Users;
1982 Users.push_back(Ptr);
1983 do {
1984 Ptr = Users.pop_back_val();
1985 for (Value::const_use_iterator UI = Ptr->use_begin(), UE = Ptr->use_end();
1986 UI != UE; ++UI) {
1987 const User *I = *UI;
1988 if (isa<ReturnInst>(I) || GetBasicInstructionClass(I) == IC_RetainRV)
1989 return;
1990 if (isa<BitCastInst>(I))
1991 Users.push_back(I);
1992 }
1993 } while (!Users.empty());
John McCalld935e9c2011-06-15 23:37:01 +00001994
1995 Changed = true;
1996 ++NumPeeps;
Michael Gottesman1bf69082013-01-06 21:07:11 +00001997
1998 DEBUG(dbgs() << "ObjCARCOpt::OptimizeAutoreleaseRVCall: Transforming "
1999 "objc_autoreleaseReturnValue => "
2000 "objc_autorelease since its operand is not used as a return "
2001 "value.\n"
2002 " Old: "
2003 << *AutoreleaseRV << "\n");
2004
Michael Gottesmanc9656fa2013-01-12 01:25:15 +00002005 CallInst *AutoreleaseRVCI = cast<CallInst>(AutoreleaseRV);
2006 AutoreleaseRVCI->
John McCalld935e9c2011-06-15 23:37:01 +00002007 setCalledFunction(getAutoreleaseCallee(F.getParent()));
Michael Gottesmanc9656fa2013-01-12 01:25:15 +00002008 AutoreleaseRVCI->setTailCall(false); // Never tail call objc_autorelease.
Michael Gottesman556ff612013-01-12 01:25:19 +00002009 Class = IC_Autorelease;
Michael Gottesman10426b52013-01-07 21:26:07 +00002010
Michael Gottesman1bf69082013-01-06 21:07:11 +00002011 DEBUG(dbgs() << " New: "
2012 << *AutoreleaseRV << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00002013
John McCalld935e9c2011-06-15 23:37:01 +00002014}
2015
Michael Gottesman97e3df02013-01-14 00:35:14 +00002016/// Visit each call, one at a time, and make simplifications without doing any
2017/// additional analysis.
John McCalld935e9c2011-06-15 23:37:01 +00002018void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
2019 // Reset all the flags in preparation for recomputing them.
2020 UsedInThisFunction = 0;
2021
2022 // Visit all objc_* calls in F.
2023 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
2024 Instruction *Inst = &*I++;
Michael Gottesman3f146e22013-01-01 16:05:48 +00002025
John McCalld935e9c2011-06-15 23:37:01 +00002026 InstructionClass Class = GetBasicInstructionClass(Inst);
2027
Michael Gottesmand359e062013-01-18 03:08:39 +00002028 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Visiting: Class: "
2029 << Class << "; " << *Inst << "\n");
Michael Gottesman782e3442013-01-17 18:32:34 +00002030
John McCalld935e9c2011-06-15 23:37:01 +00002031 switch (Class) {
2032 default: break;
2033
2034 // Delete no-op casts. These function calls have special semantics, but
2035 // the semantics are entirely implemented via lowering in the front-end,
2036 // so by the time they reach the optimizer, they are just no-op calls
2037 // which return their argument.
2038 //
2039 // There are gray areas here, as the ability to cast reference-counted
2040 // pointers to raw void* and back allows code to break ARC assumptions,
2041 // however these are currently considered to be unimportant.
2042 case IC_NoopCast:
2043 Changed = true;
2044 ++NumNoops;
Michael Gottesmandc042f02013-01-06 21:07:15 +00002045 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Erasing no-op cast:"
2046 " " << *Inst << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002047 EraseInstruction(Inst);
2048 continue;
2049
2050 // If the pointer-to-weak-pointer is null, it's undefined behavior.
2051 case IC_StoreWeak:
2052 case IC_LoadWeak:
2053 case IC_LoadWeakRetained:
2054 case IC_InitWeak:
2055 case IC_DestroyWeak: {
2056 CallInst *CI = cast<CallInst>(Inst);
2057 if (isNullOrUndef(CI->getArgOperand(0))) {
Dan Gohman670f9372012-04-13 18:57:48 +00002058 Changed = true;
Chris Lattner229907c2011-07-18 04:54:35 +00002059 Type *Ty = CI->getArgOperand(0)->getType();
John McCalld935e9c2011-06-15 23:37:01 +00002060 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
2061 Constant::getNullValue(Ty),
2062 CI);
Michael Gottesman10426b52013-01-07 21:26:07 +00002063 llvm::Value *NewValue = UndefValue::get(CI->getType());
Michael Gottesmanfec61c02013-01-06 21:54:30 +00002064 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: A null "
2065 "pointer-to-weak-pointer is undefined behavior.\n"
2066 " Old = " << *CI <<
2067 "\n New = " <<
Michael Gottesman10426b52013-01-07 21:26:07 +00002068 *NewValue << "\n");
Michael Gottesmanfec61c02013-01-06 21:54:30 +00002069 CI->replaceAllUsesWith(NewValue);
John McCalld935e9c2011-06-15 23:37:01 +00002070 CI->eraseFromParent();
2071 continue;
2072 }
2073 break;
2074 }
2075 case IC_CopyWeak:
2076 case IC_MoveWeak: {
2077 CallInst *CI = cast<CallInst>(Inst);
2078 if (isNullOrUndef(CI->getArgOperand(0)) ||
2079 isNullOrUndef(CI->getArgOperand(1))) {
Dan Gohman670f9372012-04-13 18:57:48 +00002080 Changed = true;
Chris Lattner229907c2011-07-18 04:54:35 +00002081 Type *Ty = CI->getArgOperand(0)->getType();
John McCalld935e9c2011-06-15 23:37:01 +00002082 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
2083 Constant::getNullValue(Ty),
2084 CI);
Michael Gottesmanfec61c02013-01-06 21:54:30 +00002085
2086 llvm::Value *NewValue = UndefValue::get(CI->getType());
2087 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: A null "
2088 "pointer-to-weak-pointer is undefined behavior.\n"
2089 " Old = " << *CI <<
2090 "\n New = " <<
2091 *NewValue << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00002092
Michael Gottesmanfec61c02013-01-06 21:54:30 +00002093 CI->replaceAllUsesWith(NewValue);
John McCalld935e9c2011-06-15 23:37:01 +00002094 CI->eraseFromParent();
2095 continue;
2096 }
2097 break;
2098 }
2099 case IC_Retain:
2100 OptimizeRetainCall(F, Inst);
2101 break;
2102 case IC_RetainRV:
2103 if (OptimizeRetainRVCall(F, Inst))
2104 continue;
2105 break;
2106 case IC_AutoreleaseRV:
Michael Gottesman556ff612013-01-12 01:25:19 +00002107 OptimizeAutoreleaseRVCall(F, Inst, Class);
John McCalld935e9c2011-06-15 23:37:01 +00002108 break;
2109 }
2110
2111 // objc_autorelease(x) -> objc_release(x) if x is otherwise unused.
2112 if (IsAutorelease(Class) && Inst->use_empty()) {
2113 CallInst *Call = cast<CallInst>(Inst);
2114 const Value *Arg = Call->getArgOperand(0);
2115 Arg = FindSingleUseIdentifiedObject(Arg);
2116 if (Arg) {
2117 Changed = true;
2118 ++NumAutoreleases;
2119
2120 // Create the declaration lazily.
2121 LLVMContext &C = Inst->getContext();
2122 CallInst *NewCall =
2123 CallInst::Create(getReleaseCallee(F.getParent()),
2124 Call->getArgOperand(0), "", Call);
2125 NewCall->setMetadata(ImpreciseReleaseMDKind,
2126 MDNode::get(C, ArrayRef<Value *>()));
Michael Gottesman10426b52013-01-07 21:26:07 +00002127
Michael Gottesmana6a1dad2013-01-06 22:56:50 +00002128 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Replacing "
2129 "objc_autorelease(x) with objc_release(x) since x is "
2130 "otherwise unused.\n"
Michael Gottesman4bf6e752013-01-06 22:56:54 +00002131 " Old: " << *Call <<
Michael Gottesmana6a1dad2013-01-06 22:56:50 +00002132 "\n New: " <<
2133 *NewCall << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00002134
John McCalld935e9c2011-06-15 23:37:01 +00002135 EraseInstruction(Call);
2136 Inst = NewCall;
2137 Class = IC_Release;
2138 }
2139 }
2140
2141 // For functions which can never be passed stack arguments, add
2142 // a tail keyword.
2143 if (IsAlwaysTail(Class)) {
2144 Changed = true;
Michael Gottesman2d763312013-01-06 23:39:09 +00002145 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Adding tail keyword"
2146 " to function since it can never be passed stack args: " << *Inst <<
2147 "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002148 cast<CallInst>(Inst)->setTailCall();
2149 }
2150
Michael Gottesmanc9656fa2013-01-12 01:25:15 +00002151 // Ensure that functions that can never have a "tail" keyword due to the
2152 // semantics of ARC truly do not do so.
2153 if (IsNeverTail(Class)) {
2154 Changed = true;
Michael Gottesman4385edf2013-01-14 01:47:53 +00002155 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Removing tail "
2156 "keyword from function: " << *Inst <<
Michael Gottesmanc9656fa2013-01-12 01:25:15 +00002157 "\n");
2158 cast<CallInst>(Inst)->setTailCall(false);
2159 }
2160
John McCalld935e9c2011-06-15 23:37:01 +00002161 // Set nounwind as needed.
2162 if (IsNoThrow(Class)) {
2163 Changed = true;
Michael Gottesman8800a512013-01-06 23:39:13 +00002164 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Found no throw"
2165 " class. Setting nounwind on: " << *Inst << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002166 cast<CallInst>(Inst)->setDoesNotThrow();
2167 }
2168
2169 if (!IsNoopOnNull(Class)) {
2170 UsedInThisFunction |= 1 << Class;
2171 continue;
2172 }
2173
2174 const Value *Arg = GetObjCArg(Inst);
2175
2176 // ARC calls with null are no-ops. Delete them.
2177 if (isNullOrUndef(Arg)) {
2178 Changed = true;
2179 ++NumNoops;
Michael Gottesman5b970e12013-01-07 00:04:52 +00002180 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: ARC calls with "
2181 " null are no-ops. Erasing: " << *Inst << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002182 EraseInstruction(Inst);
2183 continue;
2184 }
2185
2186 // Keep track of which of retain, release, autorelease, and retain_block
2187 // are actually present in this function.
2188 UsedInThisFunction |= 1 << Class;
2189
2190 // If Arg is a PHI, and one or more incoming values to the
2191 // PHI are null, and the call is control-equivalent to the PHI, and there
2192 // are no relevant side effects between the PHI and the call, the call
2193 // could be pushed up to just those paths with non-null incoming values.
2194 // For now, don't bother splitting critical edges for this.
2195 SmallVector<std::pair<Instruction *, const Value *>, 4> Worklist;
2196 Worklist.push_back(std::make_pair(Inst, Arg));
2197 do {
2198 std::pair<Instruction *, const Value *> Pair = Worklist.pop_back_val();
2199 Inst = Pair.first;
2200 Arg = Pair.second;
2201
2202 const PHINode *PN = dyn_cast<PHINode>(Arg);
2203 if (!PN) continue;
2204
2205 // Determine if the PHI has any null operands, or any incoming
2206 // critical edges.
2207 bool HasNull = false;
2208 bool HasCriticalEdges = false;
2209 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
2210 Value *Incoming =
2211 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
2212 if (isNullOrUndef(Incoming))
2213 HasNull = true;
2214 else if (cast<TerminatorInst>(PN->getIncomingBlock(i)->back())
2215 .getNumSuccessors() != 1) {
2216 HasCriticalEdges = true;
2217 break;
2218 }
2219 }
2220 // If we have null operands and no critical edges, optimize.
2221 if (!HasCriticalEdges && HasNull) {
2222 SmallPtrSet<Instruction *, 4> DependingInstructions;
2223 SmallPtrSet<const BasicBlock *, 4> Visited;
2224
2225 // Check that there is nothing that cares about the reference
2226 // count between the call and the phi.
Dan Gohman8478d762012-04-13 00:59:57 +00002227 switch (Class) {
2228 case IC_Retain:
2229 case IC_RetainBlock:
2230 // These can always be moved up.
2231 break;
2232 case IC_Release:
Dan Gohman41375a32012-05-08 23:39:44 +00002233 // These can't be moved across things that care about the retain
2234 // count.
Dan Gohman8478d762012-04-13 00:59:57 +00002235 FindDependencies(NeedsPositiveRetainCount, Arg,
2236 Inst->getParent(), Inst,
2237 DependingInstructions, Visited, PA);
2238 break;
2239 case IC_Autorelease:
2240 // These can't be moved across autorelease pool scope boundaries.
2241 FindDependencies(AutoreleasePoolBoundary, Arg,
2242 Inst->getParent(), Inst,
2243 DependingInstructions, Visited, PA);
2244 break;
2245 case IC_RetainRV:
2246 case IC_AutoreleaseRV:
2247 // Don't move these; the RV optimization depends on the autoreleaseRV
2248 // being tail called, and the retainRV being immediately after a call
2249 // (which might still happen if we get lucky with codegen layout, but
2250 // it's not worth taking the chance).
2251 continue;
2252 default:
2253 llvm_unreachable("Invalid dependence flavor");
2254 }
2255
John McCalld935e9c2011-06-15 23:37:01 +00002256 if (DependingInstructions.size() == 1 &&
2257 *DependingInstructions.begin() == PN) {
2258 Changed = true;
2259 ++NumPartialNoops;
2260 // Clone the call into each predecessor that has a non-null value.
2261 CallInst *CInst = cast<CallInst>(Inst);
Chris Lattner229907c2011-07-18 04:54:35 +00002262 Type *ParamTy = CInst->getArgOperand(0)->getType();
John McCalld935e9c2011-06-15 23:37:01 +00002263 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
2264 Value *Incoming =
2265 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i));
2266 if (!isNullOrUndef(Incoming)) {
2267 CallInst *Clone = cast<CallInst>(CInst->clone());
2268 Value *Op = PN->getIncomingValue(i);
2269 Instruction *InsertPos = &PN->getIncomingBlock(i)->back();
2270 if (Op->getType() != ParamTy)
2271 Op = new BitCastInst(Op, ParamTy, "", InsertPos);
2272 Clone->setArgOperand(0, Op);
2273 Clone->insertBefore(InsertPos);
Michael Gottesmanc189a392013-01-09 19:23:24 +00002274
2275 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Cloning "
2276 << *CInst << "\n"
2277 " And inserting "
2278 "clone at " << *InsertPos << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002279 Worklist.push_back(std::make_pair(Clone, Incoming));
2280 }
2281 }
2282 // Erase the original call.
Michael Gottesmanc189a392013-01-09 19:23:24 +00002283 DEBUG(dbgs() << "Erasing: " << *CInst << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002284 EraseInstruction(CInst);
2285 continue;
2286 }
2287 }
2288 } while (!Worklist.empty());
2289 }
Michael Gottesmanb24bdef2013-01-12 02:57:16 +00002290 DEBUG(dbgs() << "ObjCARCOpt::OptimizeIndividualCalls: Finished List.\n");
John McCalld935e9c2011-06-15 23:37:01 +00002291}
2292
Michael Gottesman97e3df02013-01-14 00:35:14 +00002293/// Check for critical edges, loop boundaries, irreducible control flow, or
2294/// other CFG structures where moving code across the edge would result in it
2295/// being executed more.
John McCalld935e9c2011-06-15 23:37:01 +00002296void
2297ObjCARCOpt::CheckForCFGHazards(const BasicBlock *BB,
2298 DenseMap<const BasicBlock *, BBState> &BBStates,
2299 BBState &MyStates) const {
2300 // If any top-down local-use or possible-dec has a succ which is earlier in
2301 // the sequence, forget it.
Dan Gohman55b06742012-03-02 01:13:53 +00002302 for (BBState::ptr_iterator I = MyStates.top_down_ptr_begin(),
John McCalld935e9c2011-06-15 23:37:01 +00002303 E = MyStates.top_down_ptr_end(); I != E; ++I)
2304 switch (I->second.GetSeq()) {
2305 default: break;
2306 case S_Use: {
2307 const Value *Arg = I->first;
2308 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2309 bool SomeSuccHasSame = false;
2310 bool AllSuccsHaveSame = true;
Dan Gohman55b06742012-03-02 01:13:53 +00002311 PtrState &S = I->second;
Dan Gohman0155f302012-02-17 18:59:53 +00002312 succ_const_iterator SI(TI), SE(TI, false);
2313
Dan Gohman0155f302012-02-17 18:59:53 +00002314 for (; SI != SE; ++SI) {
Dan Gohman362eb692012-03-02 01:26:46 +00002315 Sequence SuccSSeq = S_None;
2316 bool SuccSRRIKnownSafe = false;
Dan Gohman41375a32012-05-08 23:39:44 +00002317 // If VisitBottomUp has pointer information for this successor, take
2318 // what we know about it.
Dan Gohmandae33492012-04-27 18:56:31 +00002319 DenseMap<const BasicBlock *, BBState>::iterator BBI =
2320 BBStates.find(*SI);
2321 assert(BBI != BBStates.end());
2322 const PtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
2323 SuccSSeq = SuccS.GetSeq();
2324 SuccSRRIKnownSafe = SuccS.RRI.KnownSafe;
Dan Gohman362eb692012-03-02 01:26:46 +00002325 switch (SuccSSeq) {
John McCalld935e9c2011-06-15 23:37:01 +00002326 case S_None:
Dan Gohman12130272011-08-12 00:26:31 +00002327 case S_CanRelease: {
Dan Gohman362eb692012-03-02 01:26:46 +00002328 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe) {
Dan Gohman12130272011-08-12 00:26:31 +00002329 S.ClearSequenceProgress();
Dan Gohman362eb692012-03-02 01:26:46 +00002330 break;
2331 }
Dan Gohman12130272011-08-12 00:26:31 +00002332 continue;
2333 }
John McCalld935e9c2011-06-15 23:37:01 +00002334 case S_Use:
2335 SomeSuccHasSame = true;
2336 break;
2337 case S_Stop:
2338 case S_Release:
2339 case S_MovableRelease:
Dan Gohman362eb692012-03-02 01:26:46 +00002340 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe)
Dan Gohman12130272011-08-12 00:26:31 +00002341 AllSuccsHaveSame = false;
John McCalld935e9c2011-06-15 23:37:01 +00002342 break;
2343 case S_Retain:
2344 llvm_unreachable("bottom-up pointer in retain state!");
2345 }
Dan Gohman12130272011-08-12 00:26:31 +00002346 }
John McCalld935e9c2011-06-15 23:37:01 +00002347 // If the state at the other end of any of the successor edges
2348 // matches the current state, require all edges to match. This
2349 // guards against loops in the middle of a sequence.
2350 if (SomeSuccHasSame && !AllSuccsHaveSame)
Dan Gohman12130272011-08-12 00:26:31 +00002351 S.ClearSequenceProgress();
Dan Gohman044437062011-12-12 18:13:53 +00002352 break;
John McCalld935e9c2011-06-15 23:37:01 +00002353 }
2354 case S_CanRelease: {
2355 const Value *Arg = I->first;
2356 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
2357 bool SomeSuccHasSame = false;
2358 bool AllSuccsHaveSame = true;
Dan Gohman55b06742012-03-02 01:13:53 +00002359 PtrState &S = I->second;
Dan Gohman0155f302012-02-17 18:59:53 +00002360 succ_const_iterator SI(TI), SE(TI, false);
2361
Dan Gohman0155f302012-02-17 18:59:53 +00002362 for (; SI != SE; ++SI) {
Dan Gohman362eb692012-03-02 01:26:46 +00002363 Sequence SuccSSeq = S_None;
2364 bool SuccSRRIKnownSafe = false;
Dan Gohman41375a32012-05-08 23:39:44 +00002365 // If VisitBottomUp has pointer information for this successor, take
2366 // what we know about it.
Dan Gohmandae33492012-04-27 18:56:31 +00002367 DenseMap<const BasicBlock *, BBState>::iterator BBI =
2368 BBStates.find(*SI);
2369 assert(BBI != BBStates.end());
2370 const PtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
2371 SuccSSeq = SuccS.GetSeq();
2372 SuccSRRIKnownSafe = SuccS.RRI.KnownSafe;
Dan Gohman362eb692012-03-02 01:26:46 +00002373 switch (SuccSSeq) {
Dan Gohman12130272011-08-12 00:26:31 +00002374 case S_None: {
Dan Gohman362eb692012-03-02 01:26:46 +00002375 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe) {
Dan Gohman12130272011-08-12 00:26:31 +00002376 S.ClearSequenceProgress();
Dan Gohman362eb692012-03-02 01:26:46 +00002377 break;
2378 }
Dan Gohman12130272011-08-12 00:26:31 +00002379 continue;
2380 }
John McCalld935e9c2011-06-15 23:37:01 +00002381 case S_CanRelease:
2382 SomeSuccHasSame = true;
2383 break;
2384 case S_Stop:
2385 case S_Release:
2386 case S_MovableRelease:
2387 case S_Use:
Dan Gohman362eb692012-03-02 01:26:46 +00002388 if (!S.RRI.KnownSafe && !SuccSRRIKnownSafe)
Dan Gohman12130272011-08-12 00:26:31 +00002389 AllSuccsHaveSame = false;
John McCalld935e9c2011-06-15 23:37:01 +00002390 break;
2391 case S_Retain:
2392 llvm_unreachable("bottom-up pointer in retain state!");
2393 }
Dan Gohman12130272011-08-12 00:26:31 +00002394 }
John McCalld935e9c2011-06-15 23:37:01 +00002395 // If the state at the other end of any of the successor edges
2396 // matches the current state, require all edges to match. This
2397 // guards against loops in the middle of a sequence.
2398 if (SomeSuccHasSame && !AllSuccsHaveSame)
Dan Gohman12130272011-08-12 00:26:31 +00002399 S.ClearSequenceProgress();
Dan Gohman044437062011-12-12 18:13:53 +00002400 break;
John McCalld935e9c2011-06-15 23:37:01 +00002401 }
2402 }
2403}
2404
2405bool
Dan Gohman817a7c62012-03-22 18:24:56 +00002406ObjCARCOpt::VisitInstructionBottomUp(Instruction *Inst,
Dan Gohman5c70fad2012-03-23 17:47:54 +00002407 BasicBlock *BB,
Dan Gohman817a7c62012-03-22 18:24:56 +00002408 MapVector<Value *, RRInfo> &Retains,
2409 BBState &MyStates) {
2410 bool NestingDetected = false;
2411 InstructionClass Class = GetInstructionClass(Inst);
2412 const Value *Arg = 0;
2413
2414 switch (Class) {
2415 case IC_Release: {
2416 Arg = GetObjCArg(Inst);
2417
2418 PtrState &S = MyStates.getPtrBottomUpState(Arg);
2419
2420 // If we see two releases in a row on the same pointer. If so, make
2421 // a note, and we'll cicle back to revisit it after we've
2422 // hopefully eliminated the second release, which may allow us to
2423 // eliminate the first release too.
2424 // Theoretically we could implement removal of nested retain+release
2425 // pairs by making PtrState hold a stack of states, but this is
2426 // simple and avoids adding overhead for the non-nested case.
Michael Gottesmanaf2113f2013-01-13 07:00:51 +00002427 if (S.GetSeq() == S_Release || S.GetSeq() == S_MovableRelease) {
2428 DEBUG(dbgs() << "ObjCARCOpt::VisitInstructionBottomUp: Found nested "
2429 "releases (i.e. a release pair)\n");
Dan Gohman817a7c62012-03-22 18:24:56 +00002430 NestingDetected = true;
Michael Gottesmanaf2113f2013-01-13 07:00:51 +00002431 }
Dan Gohman817a7c62012-03-22 18:24:56 +00002432
Dan Gohman817a7c62012-03-22 18:24:56 +00002433 MDNode *ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
Dan Gohman62079b42012-04-25 00:50:46 +00002434 S.ResetSequenceProgress(ReleaseMetadata ? S_MovableRelease : S_Release);
Dan Gohman817a7c62012-03-22 18:24:56 +00002435 S.RRI.ReleaseMetadata = ReleaseMetadata;
Dan Gohmandf476e52012-09-04 23:16:20 +00002436 S.RRI.KnownSafe = S.IsKnownIncremented();
Dan Gohman817a7c62012-03-22 18:24:56 +00002437 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
2438 S.RRI.Calls.insert(Inst);
2439
Dan Gohmandf476e52012-09-04 23:16:20 +00002440 S.SetKnownPositiveRefCount();
Dan Gohman817a7c62012-03-22 18:24:56 +00002441 break;
2442 }
2443 case IC_RetainBlock:
2444 // An objc_retainBlock call with just a use may need to be kept,
2445 // because it may be copying a block from the stack to the heap.
2446 if (!IsRetainBlockOptimizable(Inst))
2447 break;
2448 // FALLTHROUGH
2449 case IC_Retain:
2450 case IC_RetainRV: {
2451 Arg = GetObjCArg(Inst);
2452
2453 PtrState &S = MyStates.getPtrBottomUpState(Arg);
Dan Gohman62079b42012-04-25 00:50:46 +00002454 S.SetKnownPositiveRefCount();
Dan Gohman817a7c62012-03-22 18:24:56 +00002455
2456 switch (S.GetSeq()) {
2457 case S_Stop:
2458 case S_Release:
2459 case S_MovableRelease:
2460 case S_Use:
2461 S.RRI.ReverseInsertPts.clear();
2462 // FALL THROUGH
2463 case S_CanRelease:
2464 // Don't do retain+release tracking for IC_RetainRV, because it's
2465 // better to let it remain as the first instruction after a call.
2466 if (Class != IC_RetainRV) {
2467 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
2468 Retains[Inst] = S.RRI;
2469 }
2470 S.ClearSequenceProgress();
2471 break;
2472 case S_None:
2473 break;
2474 case S_Retain:
2475 llvm_unreachable("bottom-up pointer in retain state!");
2476 }
2477 return NestingDetected;
2478 }
2479 case IC_AutoreleasepoolPop:
2480 // Conservatively, clear MyStates for all known pointers.
2481 MyStates.clearBottomUpPointers();
2482 return NestingDetected;
2483 case IC_AutoreleasepoolPush:
2484 case IC_None:
2485 // These are irrelevant.
2486 return NestingDetected;
2487 default:
2488 break;
2489 }
2490
2491 // Consider any other possible effects of this instruction on each
2492 // pointer being tracked.
2493 for (BBState::ptr_iterator MI = MyStates.bottom_up_ptr_begin(),
2494 ME = MyStates.bottom_up_ptr_end(); MI != ME; ++MI) {
2495 const Value *Ptr = MI->first;
2496 if (Ptr == Arg)
2497 continue; // Handled above.
2498 PtrState &S = MI->second;
2499 Sequence Seq = S.GetSeq();
2500
2501 // Check for possible releases.
2502 if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
Dan Gohman62079b42012-04-25 00:50:46 +00002503 S.ClearRefCount();
Dan Gohman817a7c62012-03-22 18:24:56 +00002504 switch (Seq) {
2505 case S_Use:
2506 S.SetSeq(S_CanRelease);
2507 continue;
2508 case S_CanRelease:
2509 case S_Release:
2510 case S_MovableRelease:
2511 case S_Stop:
2512 case S_None:
2513 break;
2514 case S_Retain:
2515 llvm_unreachable("bottom-up pointer in retain state!");
2516 }
2517 }
2518
2519 // Check for possible direct uses.
2520 switch (Seq) {
2521 case S_Release:
2522 case S_MovableRelease:
2523 if (CanUse(Inst, Ptr, PA, Class)) {
2524 assert(S.RRI.ReverseInsertPts.empty());
Dan Gohman5c70fad2012-03-23 17:47:54 +00002525 // If this is an invoke instruction, we're scanning it as part of
2526 // one of its successor blocks, since we can't insert code after it
2527 // in its own block, and we don't want to split critical edges.
2528 if (isa<InvokeInst>(Inst))
2529 S.RRI.ReverseInsertPts.insert(BB->getFirstInsertionPt());
2530 else
Francois Pichet4b9ab742012-03-24 01:36:37 +00002531 S.RRI.ReverseInsertPts.insert(llvm::next(BasicBlock::iterator(Inst)));
Dan Gohman817a7c62012-03-22 18:24:56 +00002532 S.SetSeq(S_Use);
2533 } else if (Seq == S_Release &&
2534 (Class == IC_User || Class == IC_CallOrUser)) {
2535 // Non-movable releases depend on any possible objc pointer use.
2536 S.SetSeq(S_Stop);
2537 assert(S.RRI.ReverseInsertPts.empty());
Dan Gohman5c70fad2012-03-23 17:47:54 +00002538 // As above; handle invoke specially.
2539 if (isa<InvokeInst>(Inst))
2540 S.RRI.ReverseInsertPts.insert(BB->getFirstInsertionPt());
2541 else
Francois Pichet4b9ab742012-03-24 01:36:37 +00002542 S.RRI.ReverseInsertPts.insert(llvm::next(BasicBlock::iterator(Inst)));
Dan Gohman817a7c62012-03-22 18:24:56 +00002543 }
2544 break;
2545 case S_Stop:
2546 if (CanUse(Inst, Ptr, PA, Class))
2547 S.SetSeq(S_Use);
2548 break;
2549 case S_CanRelease:
2550 case S_Use:
2551 case S_None:
2552 break;
2553 case S_Retain:
2554 llvm_unreachable("bottom-up pointer in retain state!");
2555 }
2556 }
2557
2558 return NestingDetected;
2559}
2560
2561bool
John McCalld935e9c2011-06-15 23:37:01 +00002562ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
2563 DenseMap<const BasicBlock *, BBState> &BBStates,
2564 MapVector<Value *, RRInfo> &Retains) {
2565 bool NestingDetected = false;
2566 BBState &MyStates = BBStates[BB];
2567
2568 // Merge the states from each successor to compute the initial state
2569 // for the current block.
Dan Gohman10c82ce2012-08-27 18:31:36 +00002570 BBState::edge_iterator SI(MyStates.succ_begin()),
2571 SE(MyStates.succ_end());
2572 if (SI != SE) {
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002573 const BasicBlock *Succ = *SI;
2574 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Succ);
2575 assert(I != BBStates.end());
2576 MyStates.InitFromSucc(I->second);
2577 ++SI;
2578 for (; SI != SE; ++SI) {
2579 Succ = *SI;
2580 I = BBStates.find(Succ);
2581 assert(I != BBStates.end());
2582 MyStates.MergeSucc(I->second);
2583 }
Dan Gohman0155f302012-02-17 18:59:53 +00002584 }
John McCalld935e9c2011-06-15 23:37:01 +00002585
2586 // Visit all the instructions, bottom-up.
2587 for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
2588 Instruction *Inst = llvm::prior(I);
Dan Gohman5c70fad2012-03-23 17:47:54 +00002589
2590 // Invoke instructions are visited as part of their successors (below).
2591 if (isa<InvokeInst>(Inst))
2592 continue;
2593
Michael Gottesmanaf2113f2013-01-13 07:00:51 +00002594 DEBUG(dbgs() << "ObjCARCOpt::VisitButtonUp: Visiting " << *Inst << "\n");
2595
Dan Gohman5c70fad2012-03-23 17:47:54 +00002596 NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
2597 }
2598
Dan Gohmandae33492012-04-27 18:56:31 +00002599 // If there's a predecessor with an invoke, visit the invoke as if it were
2600 // part of this block, since we can't insert code after an invoke in its own
2601 // block, and we don't want to split critical edges.
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002602 for (BBState::edge_iterator PI(MyStates.pred_begin()),
2603 PE(MyStates.pred_end()); PI != PE; ++PI) {
Dan Gohman5c70fad2012-03-23 17:47:54 +00002604 BasicBlock *Pred = *PI;
Dan Gohmandae33492012-04-27 18:56:31 +00002605 if (InvokeInst *II = dyn_cast<InvokeInst>(&Pred->back()))
2606 NestingDetected |= VisitInstructionBottomUp(II, BB, Retains, MyStates);
Dan Gohman817a7c62012-03-22 18:24:56 +00002607 }
John McCalld935e9c2011-06-15 23:37:01 +00002608
Dan Gohman817a7c62012-03-22 18:24:56 +00002609 return NestingDetected;
2610}
John McCalld935e9c2011-06-15 23:37:01 +00002611
Dan Gohman817a7c62012-03-22 18:24:56 +00002612bool
2613ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
2614 DenseMap<Value *, RRInfo> &Releases,
2615 BBState &MyStates) {
2616 bool NestingDetected = false;
2617 InstructionClass Class = GetInstructionClass(Inst);
2618 const Value *Arg = 0;
John McCalld935e9c2011-06-15 23:37:01 +00002619
Dan Gohman817a7c62012-03-22 18:24:56 +00002620 switch (Class) {
2621 case IC_RetainBlock:
2622 // An objc_retainBlock call with just a use may need to be kept,
2623 // because it may be copying a block from the stack to the heap.
2624 if (!IsRetainBlockOptimizable(Inst))
2625 break;
2626 // FALLTHROUGH
2627 case IC_Retain:
2628 case IC_RetainRV: {
2629 Arg = GetObjCArg(Inst);
2630
2631 PtrState &S = MyStates.getPtrTopDownState(Arg);
2632
2633 // Don't do retain+release tracking for IC_RetainRV, because it's
2634 // better to let it remain as the first instruction after a call.
2635 if (Class != IC_RetainRV) {
2636 // If we see two retains in a row on the same pointer. If so, make
John McCalld935e9c2011-06-15 23:37:01 +00002637 // a note, and we'll cicle back to revisit it after we've
Dan Gohman817a7c62012-03-22 18:24:56 +00002638 // hopefully eliminated the second retain, which may allow us to
2639 // eliminate the first retain too.
John McCalld935e9c2011-06-15 23:37:01 +00002640 // Theoretically we could implement removal of nested retain+release
2641 // pairs by making PtrState hold a stack of states, but this is
2642 // simple and avoids adding overhead for the non-nested case.
Dan Gohman817a7c62012-03-22 18:24:56 +00002643 if (S.GetSeq() == S_Retain)
John McCalld935e9c2011-06-15 23:37:01 +00002644 NestingDetected = true;
2645
Dan Gohman62079b42012-04-25 00:50:46 +00002646 S.ResetSequenceProgress(S_Retain);
Dan Gohman817a7c62012-03-22 18:24:56 +00002647 S.RRI.IsRetainBlock = Class == IC_RetainBlock;
Dan Gohmandf476e52012-09-04 23:16:20 +00002648 S.RRI.KnownSafe = S.IsKnownIncremented();
John McCalld935e9c2011-06-15 23:37:01 +00002649 S.RRI.Calls.insert(Inst);
John McCalld935e9c2011-06-15 23:37:01 +00002650 }
John McCalld935e9c2011-06-15 23:37:01 +00002651
Dan Gohmandf476e52012-09-04 23:16:20 +00002652 S.SetKnownPositiveRefCount();
Dan Gohmanf64ff8e2012-07-23 19:27:31 +00002653
2654 // A retain can be a potential use; procede to the generic checking
2655 // code below.
2656 break;
Dan Gohman817a7c62012-03-22 18:24:56 +00002657 }
2658 case IC_Release: {
2659 Arg = GetObjCArg(Inst);
2660
2661 PtrState &S = MyStates.getPtrTopDownState(Arg);
Dan Gohmandf476e52012-09-04 23:16:20 +00002662 S.ClearRefCount();
Dan Gohman817a7c62012-03-22 18:24:56 +00002663
2664 switch (S.GetSeq()) {
2665 case S_Retain:
2666 case S_CanRelease:
2667 S.RRI.ReverseInsertPts.clear();
2668 // FALL THROUGH
2669 case S_Use:
2670 S.RRI.ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
2671 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
2672 Releases[Inst] = S.RRI;
2673 S.ClearSequenceProgress();
2674 break;
2675 case S_None:
2676 break;
2677 case S_Stop:
2678 case S_Release:
2679 case S_MovableRelease:
2680 llvm_unreachable("top-down pointer in release state!");
2681 }
2682 break;
2683 }
2684 case IC_AutoreleasepoolPop:
2685 // Conservatively, clear MyStates for all known pointers.
2686 MyStates.clearTopDownPointers();
2687 return NestingDetected;
2688 case IC_AutoreleasepoolPush:
2689 case IC_None:
2690 // These are irrelevant.
2691 return NestingDetected;
2692 default:
2693 break;
2694 }
2695
2696 // Consider any other possible effects of this instruction on each
2697 // pointer being tracked.
2698 for (BBState::ptr_iterator MI = MyStates.top_down_ptr_begin(),
2699 ME = MyStates.top_down_ptr_end(); MI != ME; ++MI) {
2700 const Value *Ptr = MI->first;
2701 if (Ptr == Arg)
2702 continue; // Handled above.
2703 PtrState &S = MI->second;
2704 Sequence Seq = S.GetSeq();
2705
2706 // Check for possible releases.
2707 if (CanAlterRefCount(Inst, Ptr, PA, Class)) {
Dan Gohman62079b42012-04-25 00:50:46 +00002708 S.ClearRefCount();
John McCalld935e9c2011-06-15 23:37:01 +00002709 switch (Seq) {
Dan Gohman817a7c62012-03-22 18:24:56 +00002710 case S_Retain:
2711 S.SetSeq(S_CanRelease);
2712 assert(S.RRI.ReverseInsertPts.empty());
2713 S.RRI.ReverseInsertPts.insert(Inst);
2714
2715 // One call can't cause a transition from S_Retain to S_CanRelease
2716 // and S_CanRelease to S_Use. If we've made the first transition,
2717 // we're done.
2718 continue;
John McCalld935e9c2011-06-15 23:37:01 +00002719 case S_Use:
Dan Gohman817a7c62012-03-22 18:24:56 +00002720 case S_CanRelease:
John McCalld935e9c2011-06-15 23:37:01 +00002721 case S_None:
2722 break;
Dan Gohman817a7c62012-03-22 18:24:56 +00002723 case S_Stop:
2724 case S_Release:
2725 case S_MovableRelease:
2726 llvm_unreachable("top-down pointer in release state!");
John McCalld935e9c2011-06-15 23:37:01 +00002727 }
2728 }
Dan Gohman817a7c62012-03-22 18:24:56 +00002729
2730 // Check for possible direct uses.
2731 switch (Seq) {
2732 case S_CanRelease:
2733 if (CanUse(Inst, Ptr, PA, Class))
2734 S.SetSeq(S_Use);
2735 break;
2736 case S_Retain:
2737 case S_Use:
2738 case S_None:
2739 break;
2740 case S_Stop:
2741 case S_Release:
2742 case S_MovableRelease:
2743 llvm_unreachable("top-down pointer in release state!");
2744 }
John McCalld935e9c2011-06-15 23:37:01 +00002745 }
2746
2747 return NestingDetected;
2748}
2749
2750bool
2751ObjCARCOpt::VisitTopDown(BasicBlock *BB,
2752 DenseMap<const BasicBlock *, BBState> &BBStates,
2753 DenseMap<Value *, RRInfo> &Releases) {
2754 bool NestingDetected = false;
2755 BBState &MyStates = BBStates[BB];
2756
2757 // Merge the states from each predecessor to compute the initial state
2758 // for the current block.
Dan Gohman10c82ce2012-08-27 18:31:36 +00002759 BBState::edge_iterator PI(MyStates.pred_begin()),
2760 PE(MyStates.pred_end());
2761 if (PI != PE) {
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002762 const BasicBlock *Pred = *PI;
2763 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Pred);
2764 assert(I != BBStates.end());
2765 MyStates.InitFromPred(I->second);
2766 ++PI;
2767 for (; PI != PE; ++PI) {
2768 Pred = *PI;
2769 I = BBStates.find(Pred);
2770 assert(I != BBStates.end());
2771 MyStates.MergePred(I->second);
2772 }
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002773 }
John McCalld935e9c2011-06-15 23:37:01 +00002774
2775 // Visit all the instructions, top-down.
2776 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
2777 Instruction *Inst = I;
Michael Gottesmanaf2113f2013-01-13 07:00:51 +00002778
2779 DEBUG(dbgs() << "ObjCARCOpt::VisitTopDown: Visiting " << *Inst << "\n");
2780
Dan Gohman817a7c62012-03-22 18:24:56 +00002781 NestingDetected |= VisitInstructionTopDown(Inst, Releases, MyStates);
John McCalld935e9c2011-06-15 23:37:01 +00002782 }
2783
2784 CheckForCFGHazards(BB, BBStates, MyStates);
2785 return NestingDetected;
2786}
2787
Dan Gohmana53a12c2011-12-12 19:42:25 +00002788static void
2789ComputePostOrders(Function &F,
2790 SmallVectorImpl<BasicBlock *> &PostOrder,
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002791 SmallVectorImpl<BasicBlock *> &ReverseCFGPostOrder,
2792 unsigned NoObjCARCExceptionsMDKind,
2793 DenseMap<const BasicBlock *, BBState> &BBStates) {
Michael Gottesman97e3df02013-01-14 00:35:14 +00002794 /// The visited set, for doing DFS walks.
Dan Gohmana53a12c2011-12-12 19:42:25 +00002795 SmallPtrSet<BasicBlock *, 16> Visited;
2796
2797 // Do DFS, computing the PostOrder.
2798 SmallPtrSet<BasicBlock *, 16> OnStack;
2799 SmallVector<std::pair<BasicBlock *, succ_iterator>, 16> SuccStack;
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002800
2801 // Functions always have exactly one entry block, and we don't have
2802 // any other block that we treat like an entry block.
Dan Gohmana53a12c2011-12-12 19:42:25 +00002803 BasicBlock *EntryBB = &F.getEntryBlock();
Dan Gohman41375a32012-05-08 23:39:44 +00002804 BBState &MyStates = BBStates[EntryBB];
2805 MyStates.SetAsEntry();
2806 TerminatorInst *EntryTI = cast<TerminatorInst>(&EntryBB->back());
2807 SuccStack.push_back(std::make_pair(EntryBB, succ_iterator(EntryTI)));
Dan Gohmana53a12c2011-12-12 19:42:25 +00002808 Visited.insert(EntryBB);
2809 OnStack.insert(EntryBB);
2810 do {
2811 dfs_next_succ:
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002812 BasicBlock *CurrBB = SuccStack.back().first;
2813 TerminatorInst *TI = cast<TerminatorInst>(&CurrBB->back());
2814 succ_iterator SE(TI, false);
Dan Gohman41375a32012-05-08 23:39:44 +00002815
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002816 while (SuccStack.back().second != SE) {
2817 BasicBlock *SuccBB = *SuccStack.back().second++;
2818 if (Visited.insert(SuccBB)) {
Dan Gohman41375a32012-05-08 23:39:44 +00002819 TerminatorInst *TI = cast<TerminatorInst>(&SuccBB->back());
2820 SuccStack.push_back(std::make_pair(SuccBB, succ_iterator(TI)));
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002821 BBStates[CurrBB].addSucc(SuccBB);
Dan Gohman41375a32012-05-08 23:39:44 +00002822 BBState &SuccStates = BBStates[SuccBB];
2823 SuccStates.addPred(CurrBB);
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002824 OnStack.insert(SuccBB);
Dan Gohmana53a12c2011-12-12 19:42:25 +00002825 goto dfs_next_succ;
2826 }
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002827
2828 if (!OnStack.count(SuccBB)) {
2829 BBStates[CurrBB].addSucc(SuccBB);
2830 BBStates[SuccBB].addPred(CurrBB);
2831 }
Dan Gohmana53a12c2011-12-12 19:42:25 +00002832 }
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002833 OnStack.erase(CurrBB);
2834 PostOrder.push_back(CurrBB);
2835 SuccStack.pop_back();
Dan Gohmana53a12c2011-12-12 19:42:25 +00002836 } while (!SuccStack.empty());
2837
2838 Visited.clear();
2839
Dan Gohmana53a12c2011-12-12 19:42:25 +00002840 // Do reverse-CFG DFS, computing the reverse-CFG PostOrder.
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002841 // Functions may have many exits, and there also blocks which we treat
2842 // as exits due to ignored edges.
2843 SmallVector<std::pair<BasicBlock *, BBState::edge_iterator>, 16> PredStack;
2844 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) {
2845 BasicBlock *ExitBB = I;
2846 BBState &MyStates = BBStates[ExitBB];
2847 if (!MyStates.isExit())
2848 continue;
2849
Dan Gohmandae33492012-04-27 18:56:31 +00002850 MyStates.SetAsExit();
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002851
2852 PredStack.push_back(std::make_pair(ExitBB, MyStates.pred_begin()));
Dan Gohmana53a12c2011-12-12 19:42:25 +00002853 Visited.insert(ExitBB);
2854 while (!PredStack.empty()) {
2855 reverse_dfs_next_succ:
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002856 BBState::edge_iterator PE = BBStates[PredStack.back().first].pred_end();
2857 while (PredStack.back().second != PE) {
Dan Gohmana53a12c2011-12-12 19:42:25 +00002858 BasicBlock *BB = *PredStack.back().second++;
Dan Gohmana53a12c2011-12-12 19:42:25 +00002859 if (Visited.insert(BB)) {
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002860 PredStack.push_back(std::make_pair(BB, BBStates[BB].pred_begin()));
Dan Gohmana53a12c2011-12-12 19:42:25 +00002861 goto reverse_dfs_next_succ;
2862 }
2863 }
2864 ReverseCFGPostOrder.push_back(PredStack.pop_back_val().first);
2865 }
2866 }
2867}
2868
Michael Gottesman97e3df02013-01-14 00:35:14 +00002869// Visit the function both top-down and bottom-up.
John McCalld935e9c2011-06-15 23:37:01 +00002870bool
2871ObjCARCOpt::Visit(Function &F,
2872 DenseMap<const BasicBlock *, BBState> &BBStates,
2873 MapVector<Value *, RRInfo> &Retains,
2874 DenseMap<Value *, RRInfo> &Releases) {
Dan Gohmana53a12c2011-12-12 19:42:25 +00002875
2876 // Use reverse-postorder traversals, because we magically know that loops
2877 // will be well behaved, i.e. they won't repeatedly call retain on a single
2878 // pointer without doing a release. We can't use the ReversePostOrderTraversal
2879 // class here because we want the reverse-CFG postorder to consider each
2880 // function exit point, and we want to ignore selected cycle edges.
2881 SmallVector<BasicBlock *, 16> PostOrder;
2882 SmallVector<BasicBlock *, 16> ReverseCFGPostOrder;
Dan Gohmanc24c66f2012-04-24 22:53:18 +00002883 ComputePostOrders(F, PostOrder, ReverseCFGPostOrder,
2884 NoObjCARCExceptionsMDKind,
2885 BBStates);
Dan Gohmana53a12c2011-12-12 19:42:25 +00002886
2887 // Use reverse-postorder on the reverse CFG for bottom-up.
John McCalld935e9c2011-06-15 23:37:01 +00002888 bool BottomUpNestingDetected = false;
Dan Gohmanc57b58c2011-08-18 21:27:42 +00002889 for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
Dan Gohmana53a12c2011-12-12 19:42:25 +00002890 ReverseCFGPostOrder.rbegin(), E = ReverseCFGPostOrder.rend();
2891 I != E; ++I)
2892 BottomUpNestingDetected |= VisitBottomUp(*I, BBStates, Retains);
John McCalld935e9c2011-06-15 23:37:01 +00002893
Dan Gohmana53a12c2011-12-12 19:42:25 +00002894 // Use reverse-postorder for top-down.
John McCalld935e9c2011-06-15 23:37:01 +00002895 bool TopDownNestingDetected = false;
Dan Gohmana53a12c2011-12-12 19:42:25 +00002896 for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I =
2897 PostOrder.rbegin(), E = PostOrder.rend();
2898 I != E; ++I)
2899 TopDownNestingDetected |= VisitTopDown(*I, BBStates, Releases);
John McCalld935e9c2011-06-15 23:37:01 +00002900
2901 return TopDownNestingDetected && BottomUpNestingDetected;
2902}
2903
Michael Gottesman97e3df02013-01-14 00:35:14 +00002904/// Move the calls in RetainsToMove and ReleasesToMove.
John McCalld935e9c2011-06-15 23:37:01 +00002905void ObjCARCOpt::MoveCalls(Value *Arg,
2906 RRInfo &RetainsToMove,
2907 RRInfo &ReleasesToMove,
2908 MapVector<Value *, RRInfo> &Retains,
2909 DenseMap<Value *, RRInfo> &Releases,
Dan Gohman6320f522011-07-22 22:29:21 +00002910 SmallVectorImpl<Instruction *> &DeadInsts,
2911 Module *M) {
Chris Lattner229907c2011-07-18 04:54:35 +00002912 Type *ArgTy = Arg->getType();
Dan Gohman6320f522011-07-22 22:29:21 +00002913 Type *ParamTy = PointerType::getUnqual(Type::getInt8Ty(ArgTy->getContext()));
John McCalld935e9c2011-06-15 23:37:01 +00002914
2915 // Insert the new retain and release calls.
2916 for (SmallPtrSet<Instruction *, 2>::const_iterator
2917 PI = ReleasesToMove.ReverseInsertPts.begin(),
2918 PE = ReleasesToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
2919 Instruction *InsertPt = *PI;
2920 Value *MyArg = ArgTy == ParamTy ? Arg :
2921 new BitCastInst(Arg, ParamTy, "", InsertPt);
2922 CallInst *Call =
2923 CallInst::Create(RetainsToMove.IsRetainBlock ?
Dan Gohman6320f522011-07-22 22:29:21 +00002924 getRetainBlockCallee(M) : getRetainCallee(M),
John McCalld935e9c2011-06-15 23:37:01 +00002925 MyArg, "", InsertPt);
2926 Call->setDoesNotThrow();
Dan Gohman728db492012-01-13 00:39:07 +00002927 if (RetainsToMove.IsRetainBlock)
Dan Gohmana7107f92011-10-17 22:53:25 +00002928 Call->setMetadata(CopyOnEscapeMDKind,
2929 MDNode::get(M->getContext(), ArrayRef<Value *>()));
Dan Gohman728db492012-01-13 00:39:07 +00002930 else
John McCalld935e9c2011-06-15 23:37:01 +00002931 Call->setTailCall();
Michael Gottesmanc189a392013-01-09 19:23:24 +00002932
2933 DEBUG(dbgs() << "ObjCARCOpt::MoveCalls: Inserting new Release: " << *Call
2934 << "\n"
2935 " At insertion point: " << *InsertPt
2936 << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002937 }
2938 for (SmallPtrSet<Instruction *, 2>::const_iterator
2939 PI = RetainsToMove.ReverseInsertPts.begin(),
2940 PE = RetainsToMove.ReverseInsertPts.end(); PI != PE; ++PI) {
Dan Gohman5c70fad2012-03-23 17:47:54 +00002941 Instruction *InsertPt = *PI;
2942 Value *MyArg = ArgTy == ParamTy ? Arg :
2943 new BitCastInst(Arg, ParamTy, "", InsertPt);
2944 CallInst *Call = CallInst::Create(getReleaseCallee(M), MyArg,
2945 "", InsertPt);
2946 // Attach a clang.imprecise_release metadata tag, if appropriate.
2947 if (MDNode *M = ReleasesToMove.ReleaseMetadata)
2948 Call->setMetadata(ImpreciseReleaseMDKind, M);
2949 Call->setDoesNotThrow();
2950 if (ReleasesToMove.IsTailCallRelease)
2951 Call->setTailCall();
Michael Gottesmanc189a392013-01-09 19:23:24 +00002952
2953 DEBUG(dbgs() << "ObjCARCOpt::MoveCalls: Inserting new Retain: " << *Call
2954 << "\n"
2955 " At insertion point: " << *InsertPt
2956 << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002957 }
2958
2959 // Delete the original retain and release calls.
2960 for (SmallPtrSet<Instruction *, 2>::const_iterator
2961 AI = RetainsToMove.Calls.begin(),
2962 AE = RetainsToMove.Calls.end(); AI != AE; ++AI) {
2963 Instruction *OrigRetain = *AI;
2964 Retains.blot(OrigRetain);
2965 DeadInsts.push_back(OrigRetain);
Michael Gottesmanc189a392013-01-09 19:23:24 +00002966 DEBUG(dbgs() << "ObjCARCOpt::MoveCalls: Deleting retain: " << *OrigRetain <<
2967 "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002968 }
2969 for (SmallPtrSet<Instruction *, 2>::const_iterator
2970 AI = ReleasesToMove.Calls.begin(),
2971 AE = ReleasesToMove.Calls.end(); AI != AE; ++AI) {
2972 Instruction *OrigRelease = *AI;
2973 Releases.erase(OrigRelease);
2974 DeadInsts.push_back(OrigRelease);
Michael Gottesmanc189a392013-01-09 19:23:24 +00002975 DEBUG(dbgs() << "ObjCARCOpt::MoveCalls: Deleting release: " << *OrigRelease
2976 << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00002977 }
2978}
2979
Michael Gottesman9de6f962013-01-22 21:49:00 +00002980bool
2981ObjCARCOpt::ConnectTDBUTraversals(DenseMap<const BasicBlock *, BBState>
2982 &BBStates,
2983 MapVector<Value *, RRInfo> &Retains,
2984 DenseMap<Value *, RRInfo> &Releases,
2985 Module *M,
2986 SmallVector<Instruction *, 4> &NewRetains,
2987 SmallVector<Instruction *, 4> &NewReleases,
2988 SmallVector<Instruction *, 8> &DeadInsts,
2989 RRInfo &RetainsToMove,
2990 RRInfo &ReleasesToMove,
2991 Value *Arg,
2992 bool KnownSafe,
2993 bool &AnyPairsCompletelyEliminated) {
2994 // If a pair happens in a region where it is known that the reference count
2995 // is already incremented, we can similarly ignore possible decrements.
2996 bool KnownSafeTD = true, KnownSafeBU = true;
2997
2998 // Connect the dots between the top-down-collected RetainsToMove and
2999 // bottom-up-collected ReleasesToMove to form sets of related calls.
3000 // This is an iterative process so that we connect multiple releases
3001 // to multiple retains if needed.
3002 unsigned OldDelta = 0;
3003 unsigned NewDelta = 0;
3004 unsigned OldCount = 0;
3005 unsigned NewCount = 0;
3006 bool FirstRelease = true;
3007 bool FirstRetain = true;
3008 for (;;) {
3009 for (SmallVectorImpl<Instruction *>::const_iterator
3010 NI = NewRetains.begin(), NE = NewRetains.end(); NI != NE; ++NI) {
3011 Instruction *NewRetain = *NI;
3012 MapVector<Value *, RRInfo>::const_iterator It = Retains.find(NewRetain);
3013 assert(It != Retains.end());
3014 const RRInfo &NewRetainRRI = It->second;
3015 KnownSafeTD &= NewRetainRRI.KnownSafe;
3016 for (SmallPtrSet<Instruction *, 2>::const_iterator
3017 LI = NewRetainRRI.Calls.begin(),
3018 LE = NewRetainRRI.Calls.end(); LI != LE; ++LI) {
3019 Instruction *NewRetainRelease = *LI;
3020 DenseMap<Value *, RRInfo>::const_iterator Jt =
3021 Releases.find(NewRetainRelease);
3022 if (Jt == Releases.end())
3023 return false;
3024 const RRInfo &NewRetainReleaseRRI = Jt->second;
3025 assert(NewRetainReleaseRRI.Calls.count(NewRetain));
3026 if (ReleasesToMove.Calls.insert(NewRetainRelease)) {
3027 OldDelta -=
3028 BBStates[NewRetainRelease->getParent()].GetAllPathCount();
3029
3030 // Merge the ReleaseMetadata and IsTailCallRelease values.
3031 if (FirstRelease) {
3032 ReleasesToMove.ReleaseMetadata =
3033 NewRetainReleaseRRI.ReleaseMetadata;
3034 ReleasesToMove.IsTailCallRelease =
3035 NewRetainReleaseRRI.IsTailCallRelease;
3036 FirstRelease = false;
3037 } else {
3038 if (ReleasesToMove.ReleaseMetadata !=
3039 NewRetainReleaseRRI.ReleaseMetadata)
3040 ReleasesToMove.ReleaseMetadata = 0;
3041 if (ReleasesToMove.IsTailCallRelease !=
3042 NewRetainReleaseRRI.IsTailCallRelease)
3043 ReleasesToMove.IsTailCallRelease = false;
3044 }
3045
3046 // Collect the optimal insertion points.
3047 if (!KnownSafe)
3048 for (SmallPtrSet<Instruction *, 2>::const_iterator
3049 RI = NewRetainReleaseRRI.ReverseInsertPts.begin(),
3050 RE = NewRetainReleaseRRI.ReverseInsertPts.end();
3051 RI != RE; ++RI) {
3052 Instruction *RIP = *RI;
3053 if (ReleasesToMove.ReverseInsertPts.insert(RIP))
3054 NewDelta -= BBStates[RIP->getParent()].GetAllPathCount();
3055 }
3056 NewReleases.push_back(NewRetainRelease);
3057 }
3058 }
3059 }
3060 NewRetains.clear();
3061 if (NewReleases.empty()) break;
3062
3063 // Back the other way.
3064 for (SmallVectorImpl<Instruction *>::const_iterator
3065 NI = NewReleases.begin(), NE = NewReleases.end(); NI != NE; ++NI) {
3066 Instruction *NewRelease = *NI;
3067 DenseMap<Value *, RRInfo>::const_iterator It =
3068 Releases.find(NewRelease);
3069 assert(It != Releases.end());
3070 const RRInfo &NewReleaseRRI = It->second;
3071 KnownSafeBU &= NewReleaseRRI.KnownSafe;
3072 for (SmallPtrSet<Instruction *, 2>::const_iterator
3073 LI = NewReleaseRRI.Calls.begin(),
3074 LE = NewReleaseRRI.Calls.end(); LI != LE; ++LI) {
3075 Instruction *NewReleaseRetain = *LI;
3076 MapVector<Value *, RRInfo>::const_iterator Jt =
3077 Retains.find(NewReleaseRetain);
3078 if (Jt == Retains.end())
3079 return false;
3080 const RRInfo &NewReleaseRetainRRI = Jt->second;
3081 assert(NewReleaseRetainRRI.Calls.count(NewRelease));
3082 if (RetainsToMove.Calls.insert(NewReleaseRetain)) {
3083 unsigned PathCount =
3084 BBStates[NewReleaseRetain->getParent()].GetAllPathCount();
3085 OldDelta += PathCount;
3086 OldCount += PathCount;
3087
3088 // Merge the IsRetainBlock values.
3089 if (FirstRetain) {
3090 RetainsToMove.IsRetainBlock = NewReleaseRetainRRI.IsRetainBlock;
3091 FirstRetain = false;
3092 } else if (ReleasesToMove.IsRetainBlock !=
3093 NewReleaseRetainRRI.IsRetainBlock)
3094 // It's not possible to merge the sequences if one uses
3095 // objc_retain and the other uses objc_retainBlock.
3096 return false;
3097
3098 // Collect the optimal insertion points.
3099 if (!KnownSafe)
3100 for (SmallPtrSet<Instruction *, 2>::const_iterator
3101 RI = NewReleaseRetainRRI.ReverseInsertPts.begin(),
3102 RE = NewReleaseRetainRRI.ReverseInsertPts.end();
3103 RI != RE; ++RI) {
3104 Instruction *RIP = *RI;
3105 if (RetainsToMove.ReverseInsertPts.insert(RIP)) {
3106 PathCount = BBStates[RIP->getParent()].GetAllPathCount();
3107 NewDelta += PathCount;
3108 NewCount += PathCount;
3109 }
3110 }
3111 NewRetains.push_back(NewReleaseRetain);
3112 }
3113 }
3114 }
3115 NewReleases.clear();
3116 if (NewRetains.empty()) break;
3117 }
3118
3119 // If the pointer is known incremented or nested, we can safely delete the
3120 // pair regardless of what's between them.
3121 if (KnownSafeTD || KnownSafeBU) {
3122 RetainsToMove.ReverseInsertPts.clear();
3123 ReleasesToMove.ReverseInsertPts.clear();
3124 NewCount = 0;
3125 } else {
3126 // Determine whether the new insertion points we computed preserve the
3127 // balance of retain and release calls through the program.
3128 // TODO: If the fully aggressive solution isn't valid, try to find a
3129 // less aggressive solution which is.
3130 if (NewDelta != 0)
3131 return false;
3132 }
3133
3134 // Determine whether the original call points are balanced in the retain and
3135 // release calls through the program. If not, conservatively don't touch
3136 // them.
3137 // TODO: It's theoretically possible to do code motion in this case, as
3138 // long as the existing imbalances are maintained.
3139 if (OldDelta != 0)
3140 return false;
3141
3142 Changed = true;
3143 assert(OldCount != 0 && "Unreachable code?");
3144 NumRRs += OldCount - NewCount;
Michael Gottesman9de6f962013-01-22 21:49:00 +00003145 // Set to true if we completely removed any RR pairs.
Michael Gottesman8b5515f2013-01-22 21:53:43 +00003146 AnyPairsCompletelyEliminated = NewCount == 0;
Michael Gottesman9de6f962013-01-22 21:49:00 +00003147
3148 // We can move calls!
3149 return true;
3150}
3151
Michael Gottesman97e3df02013-01-14 00:35:14 +00003152/// Identify pairings between the retains and releases, and delete and/or move
3153/// them.
John McCalld935e9c2011-06-15 23:37:01 +00003154bool
3155ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState>
3156 &BBStates,
3157 MapVector<Value *, RRInfo> &Retains,
Dan Gohman6320f522011-07-22 22:29:21 +00003158 DenseMap<Value *, RRInfo> &Releases,
3159 Module *M) {
John McCalld935e9c2011-06-15 23:37:01 +00003160 bool AnyPairsCompletelyEliminated = false;
3161 RRInfo RetainsToMove;
3162 RRInfo ReleasesToMove;
3163 SmallVector<Instruction *, 4> NewRetains;
3164 SmallVector<Instruction *, 4> NewReleases;
3165 SmallVector<Instruction *, 8> DeadInsts;
3166
Dan Gohman670f9372012-04-13 18:57:48 +00003167 // Visit each retain.
John McCalld935e9c2011-06-15 23:37:01 +00003168 for (MapVector<Value *, RRInfo>::const_iterator I = Retains.begin(),
Dan Gohman2053a5d2011-09-29 22:25:23 +00003169 E = Retains.end(); I != E; ++I) {
3170 Value *V = I->first;
John McCalld935e9c2011-06-15 23:37:01 +00003171 if (!V) continue; // blotted
3172
3173 Instruction *Retain = cast<Instruction>(V);
Michael Gottesmanc189a392013-01-09 19:23:24 +00003174
3175 DEBUG(dbgs() << "ObjCARCOpt::PerformCodePlacement: Visiting: " << *Retain
3176 << "\n");
3177
John McCalld935e9c2011-06-15 23:37:01 +00003178 Value *Arg = GetObjCArg(Retain);
3179
Dan Gohman728db492012-01-13 00:39:07 +00003180 // If the object being released is in static or stack storage, we know it's
John McCalld935e9c2011-06-15 23:37:01 +00003181 // not being managed by ObjC reference counting, so we can delete pairs
3182 // regardless of what possible decrements or uses lie between them.
Dan Gohman728db492012-01-13 00:39:07 +00003183 bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
Dan Gohman41375a32012-05-08 23:39:44 +00003184
Dan Gohman56e1cef2011-08-22 17:29:11 +00003185 // A constant pointer can't be pointing to an object on the heap. It may
3186 // be reference-counted, but it won't be deleted.
3187 if (const LoadInst *LI = dyn_cast<LoadInst>(Arg))
3188 if (const GlobalVariable *GV =
3189 dyn_cast<GlobalVariable>(
3190 StripPointerCastsAndObjCCalls(LI->getPointerOperand())))
3191 if (GV->isConstant())
3192 KnownSafe = true;
3193
John McCalld935e9c2011-06-15 23:37:01 +00003194 // Connect the dots between the top-down-collected RetainsToMove and
3195 // bottom-up-collected ReleasesToMove to form sets of related calls.
John McCalld935e9c2011-06-15 23:37:01 +00003196 NewRetains.push_back(Retain);
Michael Gottesman9de6f962013-01-22 21:49:00 +00003197 bool PerformMoveCalls =
3198 ConnectTDBUTraversals(BBStates, Retains, Releases, M, NewRetains,
3199 NewReleases, DeadInsts, RetainsToMove,
3200 ReleasesToMove, Arg, KnownSafe,
3201 AnyPairsCompletelyEliminated);
John McCalld935e9c2011-06-15 23:37:01 +00003202
Michael Gottesman9de6f962013-01-22 21:49:00 +00003203 if (PerformMoveCalls) {
3204 // Ok, everything checks out and we're all set. Let's move/delete some
3205 // code!
3206 MoveCalls(Arg, RetainsToMove, ReleasesToMove,
3207 Retains, Releases, DeadInsts, M);
John McCalld935e9c2011-06-15 23:37:01 +00003208 }
3209
Michael Gottesman9de6f962013-01-22 21:49:00 +00003210 // Clean up state for next retain.
John McCalld935e9c2011-06-15 23:37:01 +00003211 NewReleases.clear();
3212 NewRetains.clear();
3213 RetainsToMove.clear();
3214 ReleasesToMove.clear();
3215 }
3216
3217 // Now that we're done moving everything, we can delete the newly dead
3218 // instructions, as we no longer need them as insert points.
3219 while (!DeadInsts.empty())
3220 EraseInstruction(DeadInsts.pop_back_val());
3221
3222 return AnyPairsCompletelyEliminated;
3223}
3224
Michael Gottesman97e3df02013-01-14 00:35:14 +00003225/// Weak pointer optimizations.
John McCalld935e9c2011-06-15 23:37:01 +00003226void ObjCARCOpt::OptimizeWeakCalls(Function &F) {
3227 // First, do memdep-style RLE and S2L optimizations. We can't use memdep
3228 // itself because it uses AliasAnalysis and we need to do provenance
3229 // queries instead.
3230 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
3231 Instruction *Inst = &*I++;
Michael Gottesman3f146e22013-01-01 16:05:48 +00003232
Michael Gottesman9f848ae2013-01-04 21:29:57 +00003233 DEBUG(dbgs() << "ObjCARCOpt::OptimizeWeakCalls: Visiting: " << *Inst <<
Michael Gottesman3f146e22013-01-01 16:05:48 +00003234 "\n");
3235
John McCalld935e9c2011-06-15 23:37:01 +00003236 InstructionClass Class = GetBasicInstructionClass(Inst);
3237 if (Class != IC_LoadWeak && Class != IC_LoadWeakRetained)
3238 continue;
3239
3240 // Delete objc_loadWeak calls with no users.
3241 if (Class == IC_LoadWeak && Inst->use_empty()) {
3242 Inst->eraseFromParent();
3243 continue;
3244 }
3245
3246 // TODO: For now, just look for an earlier available version of this value
3247 // within the same block. Theoretically, we could do memdep-style non-local
3248 // analysis too, but that would want caching. A better approach would be to
3249 // use the technique that EarlyCSE uses.
3250 inst_iterator Current = llvm::prior(I);
3251 BasicBlock *CurrentBB = Current.getBasicBlockIterator();
3252 for (BasicBlock::iterator B = CurrentBB->begin(),
3253 J = Current.getInstructionIterator();
3254 J != B; --J) {
3255 Instruction *EarlierInst = &*llvm::prior(J);
3256 InstructionClass EarlierClass = GetInstructionClass(EarlierInst);
3257 switch (EarlierClass) {
3258 case IC_LoadWeak:
3259 case IC_LoadWeakRetained: {
3260 // If this is loading from the same pointer, replace this load's value
3261 // with that one.
3262 CallInst *Call = cast<CallInst>(Inst);
3263 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
3264 Value *Arg = Call->getArgOperand(0);
3265 Value *EarlierArg = EarlierCall->getArgOperand(0);
3266 switch (PA.getAA()->alias(Arg, EarlierArg)) {
3267 case AliasAnalysis::MustAlias:
3268 Changed = true;
3269 // If the load has a builtin retain, insert a plain retain for it.
3270 if (Class == IC_LoadWeakRetained) {
3271 CallInst *CI =
3272 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
3273 "", Call);
3274 CI->setTailCall();
3275 }
3276 // Zap the fully redundant load.
3277 Call->replaceAllUsesWith(EarlierCall);
3278 Call->eraseFromParent();
3279 goto clobbered;
3280 case AliasAnalysis::MayAlias:
3281 case AliasAnalysis::PartialAlias:
3282 goto clobbered;
3283 case AliasAnalysis::NoAlias:
3284 break;
3285 }
3286 break;
3287 }
3288 case IC_StoreWeak:
3289 case IC_InitWeak: {
3290 // If this is storing to the same pointer and has the same size etc.
3291 // replace this load's value with the stored value.
3292 CallInst *Call = cast<CallInst>(Inst);
3293 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
3294 Value *Arg = Call->getArgOperand(0);
3295 Value *EarlierArg = EarlierCall->getArgOperand(0);
3296 switch (PA.getAA()->alias(Arg, EarlierArg)) {
3297 case AliasAnalysis::MustAlias:
3298 Changed = true;
3299 // If the load has a builtin retain, insert a plain retain for it.
3300 if (Class == IC_LoadWeakRetained) {
3301 CallInst *CI =
3302 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall,
3303 "", Call);
3304 CI->setTailCall();
3305 }
3306 // Zap the fully redundant load.
3307 Call->replaceAllUsesWith(EarlierCall->getArgOperand(1));
3308 Call->eraseFromParent();
3309 goto clobbered;
3310 case AliasAnalysis::MayAlias:
3311 case AliasAnalysis::PartialAlias:
3312 goto clobbered;
3313 case AliasAnalysis::NoAlias:
3314 break;
3315 }
3316 break;
3317 }
3318 case IC_MoveWeak:
3319 case IC_CopyWeak:
3320 // TOOD: Grab the copied value.
3321 goto clobbered;
3322 case IC_AutoreleasepoolPush:
3323 case IC_None:
3324 case IC_User:
3325 // Weak pointers are only modified through the weak entry points
3326 // (and arbitrary calls, which could call the weak entry points).
3327 break;
3328 default:
3329 // Anything else could modify the weak pointer.
3330 goto clobbered;
3331 }
3332 }
3333 clobbered:;
3334 }
3335
3336 // Then, for each destroyWeak with an alloca operand, check to see if
3337 // the alloca and all its users can be zapped.
3338 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
3339 Instruction *Inst = &*I++;
3340 InstructionClass Class = GetBasicInstructionClass(Inst);
3341 if (Class != IC_DestroyWeak)
3342 continue;
3343
3344 CallInst *Call = cast<CallInst>(Inst);
3345 Value *Arg = Call->getArgOperand(0);
3346 if (AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
3347 for (Value::use_iterator UI = Alloca->use_begin(),
3348 UE = Alloca->use_end(); UI != UE; ++UI) {
Dan Gohmandae33492012-04-27 18:56:31 +00003349 const Instruction *UserInst = cast<Instruction>(*UI);
John McCalld935e9c2011-06-15 23:37:01 +00003350 switch (GetBasicInstructionClass(UserInst)) {
3351 case IC_InitWeak:
3352 case IC_StoreWeak:
3353 case IC_DestroyWeak:
3354 continue;
3355 default:
3356 goto done;
3357 }
3358 }
3359 Changed = true;
3360 for (Value::use_iterator UI = Alloca->use_begin(),
3361 UE = Alloca->use_end(); UI != UE; ) {
3362 CallInst *UserInst = cast<CallInst>(*UI++);
Dan Gohman14862c32012-05-18 22:17:29 +00003363 switch (GetBasicInstructionClass(UserInst)) {
3364 case IC_InitWeak:
3365 case IC_StoreWeak:
3366 // These functions return their second argument.
3367 UserInst->replaceAllUsesWith(UserInst->getArgOperand(1));
3368 break;
3369 case IC_DestroyWeak:
3370 // No return value.
3371 break;
3372 default:
Dan Gohman9c97eea02012-05-21 17:41:28 +00003373 llvm_unreachable("alloca really is used!");
Dan Gohman14862c32012-05-18 22:17:29 +00003374 }
John McCalld935e9c2011-06-15 23:37:01 +00003375 UserInst->eraseFromParent();
3376 }
3377 Alloca->eraseFromParent();
3378 done:;
3379 }
3380 }
Michael Gottesman10426b52013-01-07 21:26:07 +00003381
Michael Gottesman9f848ae2013-01-04 21:29:57 +00003382 DEBUG(dbgs() << "ObjCARCOpt::OptimizeWeakCalls: Finished List.\n\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00003383
John McCalld935e9c2011-06-15 23:37:01 +00003384}
3385
Michael Gottesman97e3df02013-01-14 00:35:14 +00003386/// Identify program paths which execute sequences of retains and releases which
3387/// can be eliminated.
John McCalld935e9c2011-06-15 23:37:01 +00003388bool ObjCARCOpt::OptimizeSequences(Function &F) {
3389 /// Releases, Retains - These are used to store the results of the main flow
3390 /// analysis. These use Value* as the key instead of Instruction* so that the
3391 /// map stays valid when we get around to rewriting code and calls get
3392 /// replaced by arguments.
3393 DenseMap<Value *, RRInfo> Releases;
3394 MapVector<Value *, RRInfo> Retains;
3395
Michael Gottesman97e3df02013-01-14 00:35:14 +00003396 /// This is used during the traversal of the function to track the
John McCalld935e9c2011-06-15 23:37:01 +00003397 /// states for each identified object at each block.
3398 DenseMap<const BasicBlock *, BBState> BBStates;
3399
3400 // Analyze the CFG of the function, and all instructions.
3401 bool NestingDetected = Visit(F, BBStates, Retains, Releases);
3402
3403 // Transform.
Dan Gohman6320f522011-07-22 22:29:21 +00003404 return PerformCodePlacement(BBStates, Retains, Releases, F.getParent()) &&
3405 NestingDetected;
John McCalld935e9c2011-06-15 23:37:01 +00003406}
3407
Michael Gottesman97e3df02013-01-14 00:35:14 +00003408/// Look for this pattern:
Dmitri Gribenko5485acd2012-09-14 14:57:36 +00003409/// \code
John McCalld935e9c2011-06-15 23:37:01 +00003410/// %call = call i8* @something(...)
3411/// %2 = call i8* @objc_retain(i8* %call)
3412/// %3 = call i8* @objc_autorelease(i8* %2)
3413/// ret i8* %3
Dmitri Gribenko5485acd2012-09-14 14:57:36 +00003414/// \endcode
John McCalld935e9c2011-06-15 23:37:01 +00003415/// And delete the retain and autorelease.
3416///
3417/// Otherwise if it's just this:
Dmitri Gribenko5485acd2012-09-14 14:57:36 +00003418/// \code
John McCalld935e9c2011-06-15 23:37:01 +00003419/// %3 = call i8* @objc_autorelease(i8* %2)
3420/// ret i8* %3
Dmitri Gribenko5485acd2012-09-14 14:57:36 +00003421/// \endcode
John McCalld935e9c2011-06-15 23:37:01 +00003422/// convert the autorelease to autoreleaseRV.
3423void ObjCARCOpt::OptimizeReturns(Function &F) {
3424 if (!F.getReturnType()->isPointerTy())
3425 return;
3426
3427 SmallPtrSet<Instruction *, 4> DependingInstructions;
3428 SmallPtrSet<const BasicBlock *, 4> Visited;
3429 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
3430 BasicBlock *BB = FI;
3431 ReturnInst *Ret = dyn_cast<ReturnInst>(&BB->back());
Michael Gottesman3f146e22013-01-01 16:05:48 +00003432
Michael Gottesman9f848ae2013-01-04 21:29:57 +00003433 DEBUG(dbgs() << "ObjCARCOpt::OptimizeReturns: Visiting: " << *Ret << "\n");
Michael Gottesman3f146e22013-01-01 16:05:48 +00003434
John McCalld935e9c2011-06-15 23:37:01 +00003435 if (!Ret) continue;
3436
3437 const Value *Arg = StripPointerCastsAndObjCCalls(Ret->getOperand(0));
3438 FindDependencies(NeedsPositiveRetainCount, Arg,
3439 BB, Ret, DependingInstructions, Visited, PA);
3440 if (DependingInstructions.size() != 1)
3441 goto next_block;
3442
3443 {
3444 CallInst *Autorelease =
3445 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3446 if (!Autorelease)
3447 goto next_block;
Dan Gohman41375a32012-05-08 23:39:44 +00003448 InstructionClass AutoreleaseClass = GetBasicInstructionClass(Autorelease);
John McCalld935e9c2011-06-15 23:37:01 +00003449 if (!IsAutorelease(AutoreleaseClass))
3450 goto next_block;
3451 if (GetObjCArg(Autorelease) != Arg)
3452 goto next_block;
3453
3454 DependingInstructions.clear();
3455 Visited.clear();
3456
3457 // Check that there is nothing that can affect the reference
3458 // count between the autorelease and the retain.
3459 FindDependencies(CanChangeRetainCount, Arg,
3460 BB, Autorelease, DependingInstructions, Visited, PA);
3461 if (DependingInstructions.size() != 1)
3462 goto next_block;
3463
3464 {
3465 CallInst *Retain =
3466 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3467
3468 // Check that we found a retain with the same argument.
3469 if (!Retain ||
3470 !IsRetain(GetBasicInstructionClass(Retain)) ||
3471 GetObjCArg(Retain) != Arg)
3472 goto next_block;
3473
3474 DependingInstructions.clear();
3475 Visited.clear();
3476
3477 // Convert the autorelease to an autoreleaseRV, since it's
3478 // returning the value.
3479 if (AutoreleaseClass == IC_Autorelease) {
Michael Gottesmana6cb0182013-01-10 02:03:50 +00003480 DEBUG(dbgs() << "ObjCARCOpt::OptimizeReturns: Converting autorelease "
3481 "=> autoreleaseRV since it's returning a value.\n"
3482 " In: " << *Autorelease
3483 << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00003484 Autorelease->setCalledFunction(getAutoreleaseRVCallee(F.getParent()));
Michael Gottesmana6cb0182013-01-10 02:03:50 +00003485 DEBUG(dbgs() << " Out: " << *Autorelease
3486 << "\n");
Michael Gottesmanc9656fa2013-01-12 01:25:15 +00003487 Autorelease->setTailCall(); // Always tail call autoreleaseRV.
John McCalld935e9c2011-06-15 23:37:01 +00003488 AutoreleaseClass = IC_AutoreleaseRV;
3489 }
3490
3491 // Check that there is nothing that can affect the reference
3492 // count between the retain and the call.
Dan Gohman4ac148d2011-09-29 22:27:34 +00003493 // Note that Retain need not be in BB.
3494 FindDependencies(CanChangeRetainCount, Arg, Retain->getParent(), Retain,
John McCalld935e9c2011-06-15 23:37:01 +00003495 DependingInstructions, Visited, PA);
3496 if (DependingInstructions.size() != 1)
3497 goto next_block;
3498
3499 {
3500 CallInst *Call =
3501 dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3502
3503 // Check that the pointer is the return value of the call.
3504 if (!Call || Arg != Call)
3505 goto next_block;
3506
3507 // Check that the call is a regular call.
3508 InstructionClass Class = GetBasicInstructionClass(Call);
3509 if (Class != IC_CallOrUser && Class != IC_Call)
3510 goto next_block;
3511
3512 // If so, we can zap the retain and autorelease.
3513 Changed = true;
3514 ++NumRets;
Michael Gottesmand61a3b22013-01-07 00:04:56 +00003515 DEBUG(dbgs() << "ObjCARCOpt::OptimizeReturns: Erasing: " << *Retain
3516 << "\n Erasing: "
3517 << *Autorelease << "\n");
John McCalld935e9c2011-06-15 23:37:01 +00003518 EraseInstruction(Retain);
3519 EraseInstruction(Autorelease);
3520 }
3521 }
3522 }
3523
3524 next_block:
3525 DependingInstructions.clear();
3526 Visited.clear();
3527 }
Michael Gottesman10426b52013-01-07 21:26:07 +00003528
Michael Gottesman9f848ae2013-01-04 21:29:57 +00003529 DEBUG(dbgs() << "ObjCARCOpt::OptimizeReturns: Finished List.\n\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00003530
John McCalld935e9c2011-06-15 23:37:01 +00003531}
3532
3533bool ObjCARCOpt::doInitialization(Module &M) {
3534 if (!EnableARCOpts)
3535 return false;
3536
Dan Gohman670f9372012-04-13 18:57:48 +00003537 // If nothing in the Module uses ARC, don't do anything.
Dan Gohmanceaac7c2011-06-20 23:20:43 +00003538 Run = ModuleHasARC(M);
3539 if (!Run)
3540 return false;
3541
John McCalld935e9c2011-06-15 23:37:01 +00003542 // Identify the imprecise release metadata kind.
3543 ImpreciseReleaseMDKind =
3544 M.getContext().getMDKindID("clang.imprecise_release");
Dan Gohmana7107f92011-10-17 22:53:25 +00003545 CopyOnEscapeMDKind =
3546 M.getContext().getMDKindID("clang.arc.copy_on_escape");
Dan Gohman0155f302012-02-17 18:59:53 +00003547 NoObjCARCExceptionsMDKind =
3548 M.getContext().getMDKindID("clang.arc.no_objc_arc_exceptions");
John McCalld935e9c2011-06-15 23:37:01 +00003549
John McCalld935e9c2011-06-15 23:37:01 +00003550 // Intuitively, objc_retain and others are nocapture, however in practice
3551 // they are not, because they return their argument value. And objc_release
Dan Gohmandae33492012-04-27 18:56:31 +00003552 // calls finalizers which can have arbitrary side effects.
John McCalld935e9c2011-06-15 23:37:01 +00003553
3554 // These are initialized lazily.
3555 RetainRVCallee = 0;
3556 AutoreleaseRVCallee = 0;
3557 ReleaseCallee = 0;
3558 RetainCallee = 0;
Dan Gohman6320f522011-07-22 22:29:21 +00003559 RetainBlockCallee = 0;
John McCalld935e9c2011-06-15 23:37:01 +00003560 AutoreleaseCallee = 0;
3561
3562 return false;
3563}
3564
3565bool ObjCARCOpt::runOnFunction(Function &F) {
3566 if (!EnableARCOpts)
3567 return false;
3568
Dan Gohmanceaac7c2011-06-20 23:20:43 +00003569 // If nothing in the Module uses ARC, don't do anything.
3570 if (!Run)
3571 return false;
3572
John McCalld935e9c2011-06-15 23:37:01 +00003573 Changed = false;
3574
Michael Gottesmanb24bdef2013-01-12 02:57:16 +00003575 DEBUG(dbgs() << "ObjCARCOpt: Visiting Function: " << F.getName() << "\n");
3576
John McCalld935e9c2011-06-15 23:37:01 +00003577 PA.setAA(&getAnalysis<AliasAnalysis>());
3578
3579 // This pass performs several distinct transformations. As a compile-time aid
3580 // when compiling code that isn't ObjC, skip these if the relevant ObjC
3581 // library functions aren't declared.
3582
3583 // Preliminary optimizations. This also computs UsedInThisFunction.
3584 OptimizeIndividualCalls(F);
3585
3586 // Optimizations for weak pointers.
3587 if (UsedInThisFunction & ((1 << IC_LoadWeak) |
3588 (1 << IC_LoadWeakRetained) |
3589 (1 << IC_StoreWeak) |
3590 (1 << IC_InitWeak) |
3591 (1 << IC_CopyWeak) |
3592 (1 << IC_MoveWeak) |
3593 (1 << IC_DestroyWeak)))
3594 OptimizeWeakCalls(F);
3595
3596 // Optimizations for retain+release pairs.
3597 if (UsedInThisFunction & ((1 << IC_Retain) |
3598 (1 << IC_RetainRV) |
3599 (1 << IC_RetainBlock)))
3600 if (UsedInThisFunction & (1 << IC_Release))
3601 // Run OptimizeSequences until it either stops making changes or
3602 // no retain+release pair nesting is detected.
3603 while (OptimizeSequences(F)) {}
3604
3605 // Optimizations if objc_autorelease is used.
Dan Gohman41375a32012-05-08 23:39:44 +00003606 if (UsedInThisFunction & ((1 << IC_Autorelease) |
3607 (1 << IC_AutoreleaseRV)))
John McCalld935e9c2011-06-15 23:37:01 +00003608 OptimizeReturns(F);
3609
Michael Gottesmanb24bdef2013-01-12 02:57:16 +00003610 DEBUG(dbgs() << "\n");
3611
John McCalld935e9c2011-06-15 23:37:01 +00003612 return Changed;
3613}
3614
3615void ObjCARCOpt::releaseMemory() {
3616 PA.clear();
3617}
3618
Michael Gottesman97e3df02013-01-14 00:35:14 +00003619/// @}
3620///
3621/// \defgroup ARCContract ARC Contraction.
3622/// @{
John McCalld935e9c2011-06-15 23:37:01 +00003623
3624// TODO: ObjCARCContract could insert PHI nodes when uses aren't
3625// dominated by single calls.
3626
John McCalld935e9c2011-06-15 23:37:01 +00003627#include "llvm/Analysis/Dominators.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +00003628#include "llvm/IR/InlineAsm.h"
3629#include "llvm/IR/Operator.h"
John McCalld935e9c2011-06-15 23:37:01 +00003630
3631STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
3632
3633namespace {
Michael Gottesman97e3df02013-01-14 00:35:14 +00003634 /// \brief Late ARC optimizations
3635 ///
3636 /// These change the IR in a way that makes it difficult to be analyzed by
3637 /// ObjCARCOpt, so it's run late.
John McCalld935e9c2011-06-15 23:37:01 +00003638 class ObjCARCContract : public FunctionPass {
3639 bool Changed;
3640 AliasAnalysis *AA;
3641 DominatorTree *DT;
3642 ProvenanceAnalysis PA;
3643
Michael Gottesman97e3df02013-01-14 00:35:14 +00003644 /// A flag indicating whether this optimization pass should run.
Dan Gohmanceaac7c2011-06-20 23:20:43 +00003645 bool Run;
3646
Michael Gottesman97e3df02013-01-14 00:35:14 +00003647 /// Declarations for ObjC runtime functions, for use in creating calls to
3648 /// them. These are initialized lazily to avoid cluttering up the Module
3649 /// with unused declarations.
John McCalld935e9c2011-06-15 23:37:01 +00003650
Michael Gottesman97e3df02013-01-14 00:35:14 +00003651 /// Declaration for objc_storeStrong().
3652 Constant *StoreStrongCallee;
3653 /// Declaration for objc_retainAutorelease().
3654 Constant *RetainAutoreleaseCallee;
3655 /// Declaration for objc_retainAutoreleaseReturnValue().
3656 Constant *RetainAutoreleaseRVCallee;
3657
3658 /// The inline asm string to insert between calls and RetainRV calls to make
3659 /// the optimization work on targets which need it.
John McCalld935e9c2011-06-15 23:37:01 +00003660 const MDString *RetainRVMarker;
3661
Michael Gottesman97e3df02013-01-14 00:35:14 +00003662 /// The set of inserted objc_storeStrong calls. If at the end of walking the
3663 /// function we have found no alloca instructions, these calls can be marked
3664 /// "tail".
Dan Gohman41375a32012-05-08 23:39:44 +00003665 SmallPtrSet<CallInst *, 8> StoreStrongCalls;
Dan Gohman8ee108b2012-01-19 19:14:36 +00003666
John McCalld935e9c2011-06-15 23:37:01 +00003667 Constant *getStoreStrongCallee(Module *M);
3668 Constant *getRetainAutoreleaseCallee(Module *M);
3669 Constant *getRetainAutoreleaseRVCallee(Module *M);
3670
3671 bool ContractAutorelease(Function &F, Instruction *Autorelease,
3672 InstructionClass Class,
3673 SmallPtrSet<Instruction *, 4>
3674 &DependingInstructions,
3675 SmallPtrSet<const BasicBlock *, 4>
3676 &Visited);
3677
3678 void ContractRelease(Instruction *Release,
3679 inst_iterator &Iter);
3680
3681 virtual void getAnalysisUsage(AnalysisUsage &AU) const;
3682 virtual bool doInitialization(Module &M);
3683 virtual bool runOnFunction(Function &F);
3684
3685 public:
3686 static char ID;
3687 ObjCARCContract() : FunctionPass(ID) {
3688 initializeObjCARCContractPass(*PassRegistry::getPassRegistry());
3689 }
3690 };
3691}
3692
3693char ObjCARCContract::ID = 0;
3694INITIALIZE_PASS_BEGIN(ObjCARCContract,
3695 "objc-arc-contract", "ObjC ARC contraction", false, false)
3696INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
3697INITIALIZE_PASS_DEPENDENCY(DominatorTree)
3698INITIALIZE_PASS_END(ObjCARCContract,
3699 "objc-arc-contract", "ObjC ARC contraction", false, false)
3700
3701Pass *llvm::createObjCARCContractPass() {
3702 return new ObjCARCContract();
3703}
3704
3705void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const {
3706 AU.addRequired<AliasAnalysis>();
3707 AU.addRequired<DominatorTree>();
3708 AU.setPreservesCFG();
3709}
3710
3711Constant *ObjCARCContract::getStoreStrongCallee(Module *M) {
3712 if (!StoreStrongCallee) {
3713 LLVMContext &C = M->getContext();
Jay Foadb804a2b2011-07-12 14:06:48 +00003714 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3715 Type *I8XX = PointerType::getUnqual(I8X);
Dan Gohman41375a32012-05-08 23:39:44 +00003716 Type *Params[] = { I8XX, I8X };
John McCalld935e9c2011-06-15 23:37:01 +00003717
Bill Wendling09175b32013-01-22 21:15:51 +00003718 AttributeSet Attr = AttributeSet()
3719 .addAttribute(M->getContext(), AttributeSet::FunctionIndex,
3720 Attribute::NoUnwind)
3721 .addAttribute(M->getContext(), 1, Attribute::NoCapture);
John McCalld935e9c2011-06-15 23:37:01 +00003722
3723 StoreStrongCallee =
3724 M->getOrInsertFunction(
3725 "objc_storeStrong",
3726 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false),
Bill Wendling09175b32013-01-22 21:15:51 +00003727 Attr);
John McCalld935e9c2011-06-15 23:37:01 +00003728 }
3729 return StoreStrongCallee;
3730}
3731
3732Constant *ObjCARCContract::getRetainAutoreleaseCallee(Module *M) {
3733 if (!RetainAutoreleaseCallee) {
3734 LLVMContext &C = M->getContext();
Jay Foadb804a2b2011-07-12 14:06:48 +00003735 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
Dan Gohman41375a32012-05-08 23:39:44 +00003736 Type *Params[] = { I8X };
3737 FunctionType *FTy = FunctionType::get(I8X, Params, /*isVarArg=*/false);
Bill Wendling3d7b0b82012-12-19 07:18:57 +00003738 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +00003739 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
3740 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +00003741 RetainAutoreleaseCallee =
Bill Wendling3d7b0b82012-12-19 07:18:57 +00003742 M->getOrInsertFunction("objc_retainAutorelease", FTy, Attribute);
John McCalld935e9c2011-06-15 23:37:01 +00003743 }
3744 return RetainAutoreleaseCallee;
3745}
3746
3747Constant *ObjCARCContract::getRetainAutoreleaseRVCallee(Module *M) {
3748 if (!RetainAutoreleaseRVCallee) {
3749 LLVMContext &C = M->getContext();
Jay Foadb804a2b2011-07-12 14:06:48 +00003750 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
Dan Gohman41375a32012-05-08 23:39:44 +00003751 Type *Params[] = { I8X };
3752 FunctionType *FTy = FunctionType::get(I8X, Params, /*isVarArg=*/false);
Bill Wendling3d7b0b82012-12-19 07:18:57 +00003753 AttributeSet Attribute =
Bill Wendling09175b32013-01-22 21:15:51 +00003754 AttributeSet().addAttribute(M->getContext(), AttributeSet::FunctionIndex,
3755 Attribute::NoUnwind);
John McCalld935e9c2011-06-15 23:37:01 +00003756 RetainAutoreleaseRVCallee =
3757 M->getOrInsertFunction("objc_retainAutoreleaseReturnValue", FTy,
Bill Wendling3d7b0b82012-12-19 07:18:57 +00003758 Attribute);
John McCalld935e9c2011-06-15 23:37:01 +00003759 }
3760 return RetainAutoreleaseRVCallee;
3761}
3762
Michael Gottesman97e3df02013-01-14 00:35:14 +00003763/// Merge an autorelease with a retain into a fused call.
John McCalld935e9c2011-06-15 23:37:01 +00003764bool
3765ObjCARCContract::ContractAutorelease(Function &F, Instruction *Autorelease,
3766 InstructionClass Class,
3767 SmallPtrSet<Instruction *, 4>
3768 &DependingInstructions,
3769 SmallPtrSet<const BasicBlock *, 4>
3770 &Visited) {
3771 const Value *Arg = GetObjCArg(Autorelease);
3772
3773 // Check that there are no instructions between the retain and the autorelease
3774 // (such as an autorelease_pop) which may change the count.
3775 CallInst *Retain = 0;
3776 if (Class == IC_AutoreleaseRV)
3777 FindDependencies(RetainAutoreleaseRVDep, Arg,
3778 Autorelease->getParent(), Autorelease,
3779 DependingInstructions, Visited, PA);
3780 else
3781 FindDependencies(RetainAutoreleaseDep, Arg,
3782 Autorelease->getParent(), Autorelease,
3783 DependingInstructions, Visited, PA);
3784
3785 Visited.clear();
3786 if (DependingInstructions.size() != 1) {
3787 DependingInstructions.clear();
3788 return false;
3789 }
3790
3791 Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
3792 DependingInstructions.clear();
3793
3794 if (!Retain ||
3795 GetBasicInstructionClass(Retain) != IC_Retain ||
3796 GetObjCArg(Retain) != Arg)
3797 return false;
3798
3799 Changed = true;
3800 ++NumPeeps;
Michael Gottesman10426b52013-01-07 21:26:07 +00003801
Michael Gottesmanadd08472013-01-07 00:31:26 +00003802 DEBUG(dbgs() << "ObjCARCContract::ContractAutorelease: Fusing "
3803 "retain/autorelease. Erasing: " << *Autorelease << "\n"
3804 " Old Retain: "
3805 << *Retain << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00003806
John McCalld935e9c2011-06-15 23:37:01 +00003807 if (Class == IC_AutoreleaseRV)
3808 Retain->setCalledFunction(getRetainAutoreleaseRVCallee(F.getParent()));
3809 else
3810 Retain->setCalledFunction(getRetainAutoreleaseCallee(F.getParent()));
Michael Gottesman10426b52013-01-07 21:26:07 +00003811
Michael Gottesmanadd08472013-01-07 00:31:26 +00003812 DEBUG(dbgs() << " New Retain: "
3813 << *Retain << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00003814
John McCalld935e9c2011-06-15 23:37:01 +00003815 EraseInstruction(Autorelease);
3816 return true;
3817}
3818
Michael Gottesman97e3df02013-01-14 00:35:14 +00003819/// Attempt to merge an objc_release with a store, load, and objc_retain to form
3820/// an objc_storeStrong. This can be a little tricky because the instructions
3821/// don't always appear in order, and there may be unrelated intervening
3822/// instructions.
John McCalld935e9c2011-06-15 23:37:01 +00003823void ObjCARCContract::ContractRelease(Instruction *Release,
3824 inst_iterator &Iter) {
3825 LoadInst *Load = dyn_cast<LoadInst>(GetObjCArg(Release));
Eli Friedman7c5dc122011-09-12 20:23:13 +00003826 if (!Load || !Load->isSimple()) return;
John McCalld935e9c2011-06-15 23:37:01 +00003827
3828 // For now, require everything to be in one basic block.
3829 BasicBlock *BB = Release->getParent();
3830 if (Load->getParent() != BB) return;
3831
Dan Gohman61708d32012-05-08 23:34:08 +00003832 // Walk down to find the store and the release, which may be in either order.
Dan Gohmanf8b19d02012-05-09 23:08:33 +00003833 BasicBlock::iterator I = Load, End = BB->end();
John McCalld935e9c2011-06-15 23:37:01 +00003834 ++I;
3835 AliasAnalysis::Location Loc = AA->getLocation(Load);
Dan Gohman61708d32012-05-08 23:34:08 +00003836 StoreInst *Store = 0;
3837 bool SawRelease = false;
3838 for (; !Store || !SawRelease; ++I) {
Dan Gohmanf8b19d02012-05-09 23:08:33 +00003839 if (I == End)
3840 return;
3841
Dan Gohman61708d32012-05-08 23:34:08 +00003842 Instruction *Inst = I;
3843 if (Inst == Release) {
3844 SawRelease = true;
3845 continue;
3846 }
3847
3848 InstructionClass Class = GetBasicInstructionClass(Inst);
3849
3850 // Unrelated retains are harmless.
3851 if (IsRetain(Class))
3852 continue;
3853
3854 if (Store) {
3855 // The store is the point where we're going to put the objc_storeStrong,
3856 // so make sure there are no uses after it.
3857 if (CanUse(Inst, Load, PA, Class))
3858 return;
3859 } else if (AA->getModRefInfo(Inst, Loc) & AliasAnalysis::Mod) {
3860 // We are moving the load down to the store, so check for anything
3861 // else which writes to the memory between the load and the store.
3862 Store = dyn_cast<StoreInst>(Inst);
3863 if (!Store || !Store->isSimple()) return;
3864 if (Store->getPointerOperand() != Loc.Ptr) return;
3865 }
3866 }
John McCalld935e9c2011-06-15 23:37:01 +00003867
3868 Value *New = StripPointerCastsAndObjCCalls(Store->getValueOperand());
3869
3870 // Walk up to find the retain.
3871 I = Store;
3872 BasicBlock::iterator Begin = BB->begin();
3873 while (I != Begin && GetBasicInstructionClass(I) != IC_Retain)
3874 --I;
3875 Instruction *Retain = I;
3876 if (GetBasicInstructionClass(Retain) != IC_Retain) return;
3877 if (GetObjCArg(Retain) != New) return;
3878
3879 Changed = true;
3880 ++NumStoreStrongs;
3881
3882 LLVMContext &C = Release->getContext();
Chris Lattner229907c2011-07-18 04:54:35 +00003883 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
3884 Type *I8XX = PointerType::getUnqual(I8X);
John McCalld935e9c2011-06-15 23:37:01 +00003885
3886 Value *Args[] = { Load->getPointerOperand(), New };
3887 if (Args[0]->getType() != I8XX)
3888 Args[0] = new BitCastInst(Args[0], I8XX, "", Store);
3889 if (Args[1]->getType() != I8X)
3890 Args[1] = new BitCastInst(Args[1], I8X, "", Store);
3891 CallInst *StoreStrong =
3892 CallInst::Create(getStoreStrongCallee(BB->getParent()->getParent()),
Jay Foad5bd375a2011-07-15 08:37:34 +00003893 Args, "", Store);
John McCalld935e9c2011-06-15 23:37:01 +00003894 StoreStrong->setDoesNotThrow();
3895 StoreStrong->setDebugLoc(Store->getDebugLoc());
3896
Dan Gohman8ee108b2012-01-19 19:14:36 +00003897 // We can't set the tail flag yet, because we haven't yet determined
3898 // whether there are any escaping allocas. Remember this call, so that
3899 // we can set the tail flag once we know it's safe.
3900 StoreStrongCalls.insert(StoreStrong);
3901
John McCalld935e9c2011-06-15 23:37:01 +00003902 if (&*Iter == Store) ++Iter;
3903 Store->eraseFromParent();
3904 Release->eraseFromParent();
3905 EraseInstruction(Retain);
3906 if (Load->use_empty())
3907 Load->eraseFromParent();
3908}
3909
3910bool ObjCARCContract::doInitialization(Module &M) {
Dan Gohman670f9372012-04-13 18:57:48 +00003911 // If nothing in the Module uses ARC, don't do anything.
Dan Gohmanceaac7c2011-06-20 23:20:43 +00003912 Run = ModuleHasARC(M);
3913 if (!Run)
3914 return false;
3915
John McCalld935e9c2011-06-15 23:37:01 +00003916 // These are initialized lazily.
3917 StoreStrongCallee = 0;
3918 RetainAutoreleaseCallee = 0;
3919 RetainAutoreleaseRVCallee = 0;
3920
3921 // Initialize RetainRVMarker.
3922 RetainRVMarker = 0;
3923 if (NamedMDNode *NMD =
3924 M.getNamedMetadata("clang.arc.retainAutoreleasedReturnValueMarker"))
3925 if (NMD->getNumOperands() == 1) {
3926 const MDNode *N = NMD->getOperand(0);
3927 if (N->getNumOperands() == 1)
3928 if (const MDString *S = dyn_cast<MDString>(N->getOperand(0)))
3929 RetainRVMarker = S;
3930 }
3931
3932 return false;
3933}
3934
3935bool ObjCARCContract::runOnFunction(Function &F) {
3936 if (!EnableARCOpts)
3937 return false;
3938
Dan Gohmanceaac7c2011-06-20 23:20:43 +00003939 // If nothing in the Module uses ARC, don't do anything.
3940 if (!Run)
3941 return false;
3942
John McCalld935e9c2011-06-15 23:37:01 +00003943 Changed = false;
3944 AA = &getAnalysis<AliasAnalysis>();
3945 DT = &getAnalysis<DominatorTree>();
3946
3947 PA.setAA(&getAnalysis<AliasAnalysis>());
3948
Dan Gohman8ee108b2012-01-19 19:14:36 +00003949 // Track whether it's ok to mark objc_storeStrong calls with the "tail"
3950 // keyword. Be conservative if the function has variadic arguments.
3951 // It seems that functions which "return twice" are also unsafe for the
3952 // "tail" argument, because they are setjmp, which could need to
3953 // return to an earlier stack state.
Dan Gohman41375a32012-05-08 23:39:44 +00003954 bool TailOkForStoreStrongs = !F.isVarArg() &&
3955 !F.callsFunctionThatReturnsTwice();
Dan Gohman8ee108b2012-01-19 19:14:36 +00003956
John McCalld935e9c2011-06-15 23:37:01 +00003957 // For ObjC library calls which return their argument, replace uses of the
3958 // argument with uses of the call return value, if it dominates the use. This
3959 // reduces register pressure.
3960 SmallPtrSet<Instruction *, 4> DependingInstructions;
3961 SmallPtrSet<const BasicBlock *, 4> Visited;
3962 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
3963 Instruction *Inst = &*I++;
Michael Gottesman10426b52013-01-07 21:26:07 +00003964
Michael Gottesman3f146e22013-01-01 16:05:48 +00003965 DEBUG(dbgs() << "ObjCARCContract: Visiting: " << *Inst << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00003966
John McCalld935e9c2011-06-15 23:37:01 +00003967 // Only these library routines return their argument. In particular,
3968 // objc_retainBlock does not necessarily return its argument.
3969 InstructionClass Class = GetBasicInstructionClass(Inst);
3970 switch (Class) {
3971 case IC_Retain:
3972 case IC_FusedRetainAutorelease:
3973 case IC_FusedRetainAutoreleaseRV:
3974 break;
3975 case IC_Autorelease:
3976 case IC_AutoreleaseRV:
3977 if (ContractAutorelease(F, Inst, Class, DependingInstructions, Visited))
3978 continue;
3979 break;
3980 case IC_RetainRV: {
3981 // If we're compiling for a target which needs a special inline-asm
3982 // marker to do the retainAutoreleasedReturnValue optimization,
3983 // insert it now.
3984 if (!RetainRVMarker)
3985 break;
3986 BasicBlock::iterator BBI = Inst;
Dan Gohman5f725cd2012-06-25 19:47:37 +00003987 BasicBlock *InstParent = Inst->getParent();
3988
3989 // Step up to see if the call immediately precedes the RetainRV call.
3990 // If it's an invoke, we have to cross a block boundary. And we have
3991 // to carefully dodge no-op instructions.
3992 do {
3993 if (&*BBI == InstParent->begin()) {
3994 BasicBlock *Pred = InstParent->getSinglePredecessor();
3995 if (!Pred)
3996 goto decline_rv_optimization;
3997 BBI = Pred->getTerminator();
3998 break;
3999 }
4000 --BBI;
4001 } while (isNoopInstruction(BBI));
4002
John McCalld935e9c2011-06-15 23:37:01 +00004003 if (&*BBI == GetObjCArg(Inst)) {
Michael Gottesman00d1f962013-01-03 07:32:41 +00004004 DEBUG(dbgs() << "ObjCARCContract: Adding inline asm marker for "
Michael Gottesman9f848ae2013-01-04 21:29:57 +00004005 "retainAutoreleasedReturnValue optimization.\n");
Dan Gohman670f9372012-04-13 18:57:48 +00004006 Changed = true;
John McCalld935e9c2011-06-15 23:37:01 +00004007 InlineAsm *IA =
4008 InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
4009 /*isVarArg=*/false),
4010 RetainRVMarker->getString(),
4011 /*Constraints=*/"", /*hasSideEffects=*/true);
4012 CallInst::Create(IA, "", Inst);
4013 }
Dan Gohman5f725cd2012-06-25 19:47:37 +00004014 decline_rv_optimization:
John McCalld935e9c2011-06-15 23:37:01 +00004015 break;
4016 }
4017 case IC_InitWeak: {
4018 // objc_initWeak(p, null) => *p = null
4019 CallInst *CI = cast<CallInst>(Inst);
4020 if (isNullOrUndef(CI->getArgOperand(1))) {
4021 Value *Null =
4022 ConstantPointerNull::get(cast<PointerType>(CI->getType()));
4023 Changed = true;
4024 new StoreInst(Null, CI->getArgOperand(0), CI);
Michael Gottesman10426b52013-01-07 21:26:07 +00004025
Michael Gottesman416dc002013-01-03 07:32:53 +00004026 DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n"
4027 << " New = " << *Null << "\n");
Michael Gottesman10426b52013-01-07 21:26:07 +00004028
John McCalld935e9c2011-06-15 23:37:01 +00004029 CI->replaceAllUsesWith(Null);
4030 CI->eraseFromParent();
4031 }
4032 continue;
4033 }
4034 case IC_Release:
4035 ContractRelease(Inst, I);
4036 continue;
Dan Gohman8ee108b2012-01-19 19:14:36 +00004037 case IC_User:
4038 // Be conservative if the function has any alloca instructions.
4039 // Technically we only care about escaping alloca instructions,
4040 // but this is sufficient to handle some interesting cases.
4041 if (isa<AllocaInst>(Inst))
4042 TailOkForStoreStrongs = false;
4043 continue;
John McCalld935e9c2011-06-15 23:37:01 +00004044 default:
4045 continue;
4046 }
4047
Michael Gottesman50ae5b22013-01-03 08:09:27 +00004048 DEBUG(dbgs() << "ObjCARCContract: Finished List.\n\n");
Michael Gottesman3f146e22013-01-01 16:05:48 +00004049
John McCalld935e9c2011-06-15 23:37:01 +00004050 // Don't use GetObjCArg because we don't want to look through bitcasts
4051 // and such; to do the replacement, the argument must have type i8*.
4052 const Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
4053 for (;;) {
4054 // If we're compiling bugpointed code, don't get in trouble.
4055 if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
4056 break;
4057 // Look through the uses of the pointer.
4058 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
4059 UI != UE; ) {
4060 Use &U = UI.getUse();
4061 unsigned OperandNo = UI.getOperandNo();
4062 ++UI; // Increment UI now, because we may unlink its element.
Dan Gohman670f9372012-04-13 18:57:48 +00004063
4064 // If the call's return value dominates a use of the call's argument
4065 // value, rewrite the use to use the return value. We check for
4066 // reachability here because an unreachable call is considered to
4067 // trivially dominate itself, which would lead us to rewriting its
4068 // argument in terms of its return value, which would lead to
4069 // infinite loops in GetObjCArg.
Dan Gohman41375a32012-05-08 23:39:44 +00004070 if (DT->isReachableFromEntry(U) && DT->dominates(Inst, U)) {
Rafael Espindolaf5892782012-03-15 15:52:59 +00004071 Changed = true;
4072 Instruction *Replacement = Inst;
4073 Type *UseTy = U.get()->getType();
Dan Gohmande8d2c42012-04-13 01:08:28 +00004074 if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) {
Rafael Espindolaf5892782012-03-15 15:52:59 +00004075 // For PHI nodes, insert the bitcast in the predecessor block.
Dan Gohman41375a32012-05-08 23:39:44 +00004076 unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo);
4077 BasicBlock *BB = PHI->getIncomingBlock(ValNo);
Rafael Espindolaf5892782012-03-15 15:52:59 +00004078 if (Replacement->getType() != UseTy)
4079 Replacement = new BitCastInst(Replacement, UseTy, "",
4080 &BB->back());
Dan Gohman670f9372012-04-13 18:57:48 +00004081 // While we're here, rewrite all edges for this PHI, rather
4082 // than just one use at a time, to minimize the number of
4083 // bitcasts we emit.
Dan Gohmandae33492012-04-27 18:56:31 +00004084 for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i)
Rafael Espindolaf5892782012-03-15 15:52:59 +00004085 if (PHI->getIncomingBlock(i) == BB) {
4086 // Keep the UI iterator valid.
4087 if (&PHI->getOperandUse(
4088 PHINode::getOperandNumForIncomingValue(i)) ==
4089 &UI.getUse())
4090 ++UI;
4091 PHI->setIncomingValue(i, Replacement);
4092 }
4093 } else {
4094 if (Replacement->getType() != UseTy)
Dan Gohmande8d2c42012-04-13 01:08:28 +00004095 Replacement = new BitCastInst(Replacement, UseTy, "",
4096 cast<Instruction>(U.getUser()));
Rafael Espindolaf5892782012-03-15 15:52:59 +00004097 U.set(Replacement);
John McCalld935e9c2011-06-15 23:37:01 +00004098 }
Rafael Espindolaf5892782012-03-15 15:52:59 +00004099 }
John McCalld935e9c2011-06-15 23:37:01 +00004100 }
4101
Dan Gohmandae33492012-04-27 18:56:31 +00004102 // If Arg is a no-op casted pointer, strip one level of casts and iterate.
John McCalld935e9c2011-06-15 23:37:01 +00004103 if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
4104 Arg = BI->getOperand(0);
4105 else if (isa<GEPOperator>(Arg) &&
4106 cast<GEPOperator>(Arg)->hasAllZeroIndices())
4107 Arg = cast<GEPOperator>(Arg)->getPointerOperand();
4108 else if (isa<GlobalAlias>(Arg) &&
4109 !cast<GlobalAlias>(Arg)->mayBeOverridden())
4110 Arg = cast<GlobalAlias>(Arg)->getAliasee();
4111 else
4112 break;
4113 }
4114 }
4115
Dan Gohman8ee108b2012-01-19 19:14:36 +00004116 // If this function has no escaping allocas or suspicious vararg usage,
4117 // objc_storeStrong calls can be marked with the "tail" keyword.
4118 if (TailOkForStoreStrongs)
Dan Gohman41375a32012-05-08 23:39:44 +00004119 for (SmallPtrSet<CallInst *, 8>::iterator I = StoreStrongCalls.begin(),
Dan Gohman8ee108b2012-01-19 19:14:36 +00004120 E = StoreStrongCalls.end(); I != E; ++I)
4121 (*I)->setTailCall();
4122 StoreStrongCalls.clear();
4123
John McCalld935e9c2011-06-15 23:37:01 +00004124 return Changed;
4125}
Michael Gottesman97e3df02013-01-14 00:35:14 +00004126
4127/// @}
4128///